gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/* See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Esri Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.catalog.publication;
import com.esri.gpt.catalog.arcims.ImsServiceException;
import com.esri.gpt.catalog.context.CatalogIndexException;
import com.esri.gpt.control.webharvest.IterationContext;
import com.esri.gpt.framework.resource.api.Native;
import com.esri.gpt.framework.resource.api.Publishable;
import com.esri.gpt.framework.resource.query.Criteria;
import com.esri.gpt.framework.resource.query.Query;
import com.esri.gpt.framework.util.Val;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Super-class for a processor that interacts with a resource for the express
* purpose of publishing metadata to the catalog.
*/
public abstract class ResourceProcessor {
/** class variables ========================================================= */
/** Logger */
private static final Logger LOGGER = Logger.getLogger(ResourceProcessor.class.getName());
/** instance variables ====================================================== */
private ProcessingContext context;
private String publicationMethod = "upload";
private Map<String,String> sourceURIs;
/** constructors ============================================================ */
/**
* Constructs with a supplied processing context.
* @param context the resource processing context
*/
public ResourceProcessor(ProcessingContext context) {
this.context = context;
}
/** properties ============================================================== */
/**
* Gets the processing context.
* @return the processing context
*/
public ProcessingContext getContext() {
return this.context ;
}
/**
* Gets the publication method.
* @return the publication method
*/
public String getPublicationMethod() {
return this.publicationMethod;
}
/**
* Sets the publication method.
* @param method the publication method
*/
public void setPublicationMethod(String method) {
this.publicationMethod = Val.chkStr(method);
}
/** methods ================================================================= */
/**
* Collects document source URIs associated with a parent resource (SQL LIKE).
* @param pattern the source URI pattern of the parent resource
* @param pattern2 optional secondary source URI pattern of the parent resource
* @throws SQLException if an exception occurs while communicating with the database
*/
protected void collectExistingSourceURIs(String pattern, String pattern2)
throws SQLException {
CatalogDao dao = new CatalogDao(this.getContext().getRequestContext());
this.sourceURIs = dao.querySourceURIs(pattern,pattern2);
}
/**
* Deletes catalog documents that are no longer referenced by the parent resource.
* <br/>Deletion only occurs if at least one URI was processed during this request.
* @throws SQLException if an exception occurs while communicating with the database
* @throws ImsServiceException if an exception occurs during delete
* @throws CatalogIndexException if an exception occurs during delete
* @throws IOException if accessing index fails
*/
protected void deleteUnreferencedSourceURIs()
throws SQLException, ImsServiceException, CatalogIndexException, IOException {
List<ProcessedRecord> records = this.getContext().getProcessedRecords();
if ((this.sourceURIs != null) && (this.sourceURIs.size() > 0) && (records.size() > 0)) {
for (ProcessedRecord record: records) {
this.sourceURIs.remove(record.getSourceUri());
}
if (this.sourceURIs.size() > 0) {
CatalogDao dao = new CatalogDao(this.getContext().getRequestContext());
dao.deleteSourceURIs(this.getContext(),this.sourceURIs);
}
}
}
/**
* Invokes processing against the resource.
* @throws Exception if an exception occurs
*/
public abstract void process() throws Exception;
/**
* Creates iteration query.
* Query is being used during synchronization.
* @param context iteration context
* @param criteria query criteria or <code>null</code> if no criteria
* @return query
*/
public abstract Query createQuery(IterationContext context, Criteria criteria);
/**
* Gets native resource.
* Native resource is a publishable resource created just for repository definition.
* Each native resource is {@link Publishable} and each repository has to be able to
* provide one.
* @param context iteration context
* @return native resource.
*/
public abstract Native getNativeResource(IterationContext context);
/**
* Publishes metadata associated with a resource.
* @param resourceUrl the URL for the resource being published
* @param resourceXml the resource XML
* @throws Exception if an exception occurs
*/
public void publishMetadata(String resourceUrl, String resourceXml)
throws Exception {
this.publishMetadata(resourceUrl,resourceXml,null);
}
/**
* Publishes metadata associated with a resource.
* @param resourceUrl the URL for the resource being published
* @param resourceXml the resource XML
* @param sourceUri a URI identifying the source
* @throws Exception if an exception occurs
*/
public void publishMetadata(String resourceUrl, String resourceXml, String sourceUri)
throws Exception {
ProcessingContext context = getContext();
ProcessedRecord processedRcord = new ProcessedRecord();
if ((sourceUri != null) && (sourceUri.length() > 0)) {
processedRcord.setSourceUri(sourceUri);
} else if ((resourceUrl != null) && (resourceUrl.length() > 0)) {
processedRcord.setSourceUri(resourceUrl);
}
context.getProcessedRecords().add(processedRcord);
// handle validation only requests
if (context.getValidateOnly()) {
try {
ValidationRequest request = new ValidationRequest(
context.getRequestContext(),resourceUrl,resourceXml);
request.verify();
context.incrementNumberValidated();
processedRcord.setStatusType(ProcessedRecord.StatusType.VALIDATED);
} catch (Exception e) {
context.incrementNumberFailed();
context.setLastException(e);
processedRcord.setStatusType(ProcessedRecord.StatusType.FAILED);
processedRcord.setException(e,this.getContext().getMessageBroker());
if (context.getWasSingleSource()) {
throw e;
} else {
// TODO: log this?
LOGGER.log(Level.FINER,"Error\n"+processedRcord.getSourceUri()+"\n"+resourceXml,e);
}
}
// handle publication requests
} else {
try {
PublicationRecord template = context.getTemplate();
UploadRequest request = new UploadRequest(
context.getRequestContext(),context.getPublisher(),resourceUrl,resourceXml);
PublicationRecord publicationRecord = request.getPublicationRecord();
if ((sourceUri != null) && (sourceUri.length() > 0)) {
publicationRecord.setSourceUri(sourceUri);
}
if (template != null) {
publicationRecord.setAutoApprove(template.getAutoApprove());
publicationRecord.setUpdateOnlyIfXmlHasChanged(template.getUpdateOnlyIfXmlHasChanged());
}
if ((this.getPublicationMethod() != null) && (this.getPublicationMethod().length() > 0)) {
publicationRecord.setPublicationMethod(this.getPublicationMethod());
}
request.publish();
context.incrementNumberValidated();
if (request.getPublicationRecord().getWasDocumentUnchanged()) {
context.incrementNumberUnchanged();
processedRcord.setStatusType(ProcessedRecord.StatusType.UNCHNAGED);
} else if (request.getPublicationRecord().getWasDocumentReplaced()) {
context.incrementNumberReplaced();
processedRcord.setStatusType(ProcessedRecord.StatusType.REPLACED);
} else {
context.incrementNumberCreated();
processedRcord.setStatusType(ProcessedRecord.StatusType.CREATED);
}
} catch (Exception e) {
context.incrementNumberFailed();
context.setLastException(e);
processedRcord.setStatusType(ProcessedRecord.StatusType.FAILED);
processedRcord.setException(e,this.getContext().getMessageBroker());
if (context.getWasSingleSource()) {
throw e;
} else {
// TODO: log this?
LOGGER.log(Level.FINER,"Error\n"+processedRcord.getSourceUri()+"\n"+resourceXml,e);
}
}
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jp.dartsclone.details;
import java.util.ArrayList;
/**
*
* @author
*/
class DawgBuilder {
int root() {
return 0;
}
int child(int id) {
// return _units.get(id).child();
return _units.get(id) >>> 2;
}
int sibling(int id) {
// return _units.get(id).hasSibling() ? (id + 1) : 0;
return ((_units.get(id) & 1) == 1) ? (id + 1) : 0;
}
int value(int id) {
// return _units.get(id).value();
return _units.get(id) >>> 1;
}
boolean isLeaf(int id) {
return label(id) == 0;
}
byte label(int id) {
return _labels.get(id);
}
boolean isIntersection(int id) {
return _isIntersections.get(id);
}
int intersectionId(int id) {
return _isIntersections.rank(id) - 1;
}
int numIntersections() {
return _isIntersections.numOnes();
}
int size() {
return _units.size();
}
void init() {
_table.resize(INITIAL_TABLE_SIZE, 0);
appendNode();
appendUnit();
_numStates = 1;
_nodes.get(0).label = (byte)0xFF;
_nodeStack.add(0);
}
void finish() {
flush(0);
_units.set(0, _nodes.get(0).unit());
_labels.set(0, _nodes.get(0).label);
_nodes.clear();
_table.clear();
_nodeStack.clear();
_recycleBin.clear();
_isIntersections.build();
}
void insert(byte[] key, int value) {
if (value < 0) {
throw new IllegalArgumentException(
"failed to insert key: negative value");
}
if (key.length == 0) {
throw new IllegalArgumentException(
"failed to inset key: zero-length key");
}
int id = 0;
int keyPos = 0;
for (; keyPos <= key.length; ++keyPos) {
int childId = _nodes.get(id).child;
if (childId == 0) {
break;
}
byte keyLabel = keyPos < key.length ? key[keyPos] : 0;
if (keyPos < key.length && keyLabel == 0) {
throw new IllegalArgumentException(
"failed to insert key: invalid null character");
}
byte unitLabel = _nodes.get(childId).label;
if ((keyLabel & 0xFF) < (unitLabel & 0xFF)) {
throw new IllegalArgumentException(
"failed to insert key: wrong key order");
} else if ((keyLabel & 0xFF) > (unitLabel & 0xFF)) {
_nodes.get(childId).hasSibling = true;
flush(childId);
break;
}
id = childId;
}
if (keyPos > key.length) {
return;
}
for (; keyPos <= key.length; ++keyPos) {
byte keyLabel = (keyPos < key.length) ? key[keyPos] : 0;
int childId = appendNode();
DawgNode node = _nodes.get(id);
DawgNode child = _nodes.get(childId);
if (node.child == 0) {
child.isState = true;
}
child.sibling = node.child;
child.label = keyLabel;
node.child = childId;
_nodeStack.add(childId);
id = childId;
}
_nodes.get(id).setValue(value);
}
void clear() {
_nodes.clear();
_units.clear();
_labels.clear();
_isIntersections.clear();
_table.clear();
_nodeStack.clear();
_recycleBin.clear();
_numStates = 0;
}
static class DawgNode {
int child;
int sibling;
byte label;
boolean isState;
boolean hasSibling;
void reset() {
child = 0;
sibling = 0;
label = (byte)0;
isState = false;
hasSibling = false;
}
int getValue() {
return child;
}
void setValue(int value) {
child = value;
}
int unit() {
if (label == 0) {
return (child << 1) | (hasSibling ? 1 : 0);
}
return (child << 2) | (isState ? 2 : 0) | (hasSibling ? 1 : 0);
}
}
private void flush(int id) {
while (_nodeStack.get(_nodeStack.size() - 1) != id) {
int nodeId = _nodeStack.get(_nodeStack.size() - 1);
_nodeStack.deleteLast();
if (_numStates >= _table.size() - (_table.size() >>> 2)) {
expandTable();
}
int numSiblings = 0;
for (int i = nodeId; i != 0; i = _nodes.get(i).sibling) {
++numSiblings;
}
// make an array of length 1 to emulate pass-by-reference
int[] matchHashId = findNode(nodeId);
int matchId = matchHashId[0];
int hashId = matchHashId[1];
if (matchId != 0) {
_isIntersections.set(matchId, true);
} else {
int unitId = 0;
for (int i = 0; i < numSiblings; ++i) {
unitId = appendUnit();
}
for (int i = nodeId; i != 0; i = _nodes.get(i).sibling) {
_units.set(unitId, _nodes.get(i).unit());
_labels.set(unitId, _nodes.get(i).label);
--unitId;
}
matchId = unitId + 1;
_table.set(hashId, matchId);
++_numStates;
}
for (int i = nodeId, next; i != 0; i = next) {
next = _nodes.get(i).sibling;
freeNode(i);
}
_nodes.get(_nodeStack.get(_nodeStack.size() - 1)).child = matchId;
}
_nodeStack.deleteLast();
}
private void expandTable() {
int tableSize = _table.size() << 1;
_table.clear();
_table.resize(tableSize, 0);
for (int id = 1; id < _units.size(); ++id) {
// if (_labels.get(i) == 0 || _units.get(id).isState)) {
if (_labels.get(id) == 0 || (_units.get(id) & 2) == 2) {
int[] ret = findUnit(id);
int hashId = ret[1];
_table.set(hashId, id);
}
}
}
private int[] findUnit(int id) {
int[] ret = new int[2];
int hashId = hashUnit(id) % _table.size();
for ( ; ; hashId = (hashId + 1) % _table.size()) {
// Remainder adjustment.
if (hashId < 0) {
hashId += _table.size();
}
int unitId = _table.get(hashId);
if (unitId == 0) {
break;
}
// there must not be the same unit.
}
ret[1] = hashId;
return ret;
}
private int[] findNode(int nodeId) {
int[] ret = new int[2];
int hashId = hashNode(nodeId) % _table.size();
for ( ; ; hashId = (hashId + 1) % _table.size()) {
// Remainder adjustment
if (hashId < 0) {
hashId += _table.size();
}
int unitId = _table.get(hashId);
if (unitId == 0) {
break;
}
if (areEqual(nodeId, unitId)) {
ret[0] = unitId;
ret[1] = hashId;
return ret;
}
}
ret[1] = hashId;
return ret;
}
private boolean areEqual(int nodeId, int unitId) {
for (int i = _nodes.get(nodeId).sibling; i != 0;
i = _nodes.get(i).sibling) {
// if (_units.get(unitId).hasSibling() == false) {
if ((_units.get(unitId) & 1) != 1) {
return false;
}
++unitId;
}
// if (_units.get(unitId).hasSibling() == true) {
if ((_units.get(unitId) & 1) == 1) {
return false;
}
for (int i = nodeId; i != 0; i = _nodes.get(i).sibling, --unitId) {
// if (_nodes.get(i) != _units.get(unitId).unit() ||
if (_nodes.get(i).unit() != _units.get(unitId) ||
_nodes.get(i).label != _labels.get(unitId)) {
return false;
}
}
return true;
}
private int hashUnit(int id) {
int hashValue = 0;
for ( ; id != 0; ++id) {
// int unit = _units.get(id).unit();
int unit = _units.get(id);
byte label = _labels.get(id);
hashValue ^= hash(((label & 0xFF) << 24) ^ unit);
// if (_units.get(id).hasSibling() == false) {
if ((_units.get(id) & 1) != 1) {
break;
}
}
return hashValue;
}
private int hashNode(int id) {
int hashValue = 0;
for ( ; id != 0; id = _nodes.get(id).sibling) {
int unit = _nodes.get(id).unit();
byte label = _nodes.get(id).label;
hashValue ^= hash(((label & 0xFF) << 24) ^ unit);
}
return hashValue;
}
private int appendUnit() {
_isIntersections.append();
_units.add(0);
_labels.add((byte)0);
return _isIntersections.size() - 1;
}
private int appendNode() {
int id;
if (_recycleBin.empty()) {
id = _nodes.size();
_nodes.add(new DawgNode());
} else {
id = _recycleBin.get(_recycleBin.size() - 1);
_nodes.get(id).reset();
_recycleBin.deleteLast();
}
return id;
}
private void freeNode(int id) {
_recycleBin.add(id);
}
private static int hash(int key) {
key = ~key + (key << 15); // key = (key << 15) - key - 1;
key = key ^ (key >>> 12);
key = key + (key << 2);
key = key ^ (key >>> 4);
key = key * 2057; // key = (key + (key << 3)) + (key << 11);
key = key ^ (key >>> 16);
return key;
}
private static final int INITIAL_TABLE_SIZE = 1 << 10;
private ArrayList<DawgNode> _nodes = new ArrayList<DawgNode>();
private AutoIntPool _units = new AutoIntPool();
private AutoBytePool _labels = new AutoBytePool();
private BitVector _isIntersections = new BitVector();
private AutoIntPool _table = new AutoIntPool();
private AutoIntPool _nodeStack = new AutoIntPool();
private AutoIntPool _recycleBin = new AutoIntPool();
private int _numStates;
}
| |
package com.google.android.gms.internal;
import android.os.Binder;
import android.os.IBinder;
import android.os.IInterface;
import android.os.Parcel;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.common.api.StatusCreator;
public abstract interface gj extends IInterface
{
public abstract void I(Status paramStatus);
public abstract void a(int paramInt, gh paramgh);
public abstract void a(int paramInt, boolean paramBoolean);
public abstract void b(int paramInt, gh paramgh);
public static abstract class a extends Binder
implements gj
{
public static gj J(IBinder paramIBinder)
{
if (paramIBinder == null)
return null;
IInterface localIInterface = paramIBinder.queryLocalInterface("com.google.android.gms.location.internal.ICopresenceCallbacks");
if ((localIInterface != null) && ((localIInterface instanceof gj)))
return (gj)localIInterface;
return new a(paramIBinder);
}
public IBinder asBinder()
{
return this;
}
public boolean onTransact(int paramInt1, Parcel paramParcel1, Parcel paramParcel2, int paramInt2)
{
switch (paramInt1)
{
default:
break;
case 1598968902:
paramParcel2.writeString("com.google.android.gms.location.internal.ICopresenceCallbacks");
return true;
case 3:
paramParcel1.enforceInterface("com.google.android.gms.location.internal.ICopresenceCallbacks");
Status localStatus;
if (paramParcel1.readInt() != 0)
localStatus = Status.CREATOR.createFromParcel(paramParcel1);
else
localStatus = null;
I(localStatus);
paramParcel2.writeNoException();
return true;
case 4:
paramParcel1.enforceInterface("com.google.android.gms.location.internal.ICopresenceCallbacks");
int k = paramParcel1.readInt();
gh localgh2;
if (paramParcel1.readInt() != 0)
localgh2 = gh.CREATOR.ah(paramParcel1);
else
localgh2 = null;
a(k, localgh2);
paramParcel2.writeNoException();
return true;
case 5:
paramParcel1.enforceInterface("com.google.android.gms.location.internal.ICopresenceCallbacks");
int j = paramParcel1.readInt();
gh localgh1;
if (paramParcel1.readInt() != 0)
localgh1 = gh.CREATOR.ah(paramParcel1);
else
localgh1 = null;
b(j, localgh1);
paramParcel2.writeNoException();
return true;
case 6:
paramParcel1.enforceInterface("com.google.android.gms.location.internal.ICopresenceCallbacks");
int i = paramParcel1.readInt();
boolean bool;
if (paramParcel1.readInt() != 0)
bool = true;
else
bool = false;
a(i, bool);
paramParcel2.writeNoException();
return true;
}
return super.onTransact(paramInt1, paramParcel1, paramParcel2, paramInt2);
}
static class a
implements gj
{
private IBinder dU;
a(IBinder paramIBinder)
{
this.dU = paramIBinder;
}
public void I(Status paramStatus)
{
Parcel localParcel1 = Parcel.obtain();
Parcel localParcel2 = Parcel.obtain();
try
{
localParcel1.writeInterfaceToken("com.google.android.gms.location.internal.ICopresenceCallbacks");
if (paramStatus != null)
{
localParcel1.writeInt(1);
paramStatus.writeToParcel(localParcel1, 0);
}
else
{
localParcel1.writeInt(0);
}
this.dU.transact(3, localParcel1, localParcel2, 0);
localParcel2.readException();
return;
}
finally
{
localParcel2.recycle();
localParcel1.recycle();
}
}
public void a(int paramInt, gh paramgh)
{
Parcel localParcel1 = Parcel.obtain();
Parcel localParcel2 = Parcel.obtain();
try
{
localParcel1.writeInterfaceToken("com.google.android.gms.location.internal.ICopresenceCallbacks");
localParcel1.writeInt(paramInt);
if (paramgh != null)
{
localParcel1.writeInt(1);
paramgh.writeToParcel(localParcel1, 0);
}
else
{
localParcel1.writeInt(0);
}
this.dU.transact(4, localParcel1, localParcel2, 0);
localParcel2.readException();
return;
}
finally
{
localParcel2.recycle();
localParcel1.recycle();
}
}
public void a(int paramInt, boolean paramBoolean)
{
Parcel localParcel1 = Parcel.obtain();
Parcel localParcel2 = Parcel.obtain();
while (true)
{
try
{
localParcel1.writeInterfaceToken("com.google.android.gms.location.internal.ICopresenceCallbacks");
localParcel1.writeInt(paramInt);
if (paramBoolean)
{
i = 1;
localParcel1.writeInt(i);
this.dU.transact(6, localParcel1, localParcel2, 0);
localParcel2.readException();
return;
}
}
finally
{
localParcel2.recycle();
localParcel1.recycle();
}
int i = 0;
}
}
public IBinder asBinder()
{
return this.dU;
}
public void b(int paramInt, gh paramgh)
{
Parcel localParcel1 = Parcel.obtain();
Parcel localParcel2 = Parcel.obtain();
try
{
localParcel1.writeInterfaceToken("com.google.android.gms.location.internal.ICopresenceCallbacks");
localParcel1.writeInt(paramInt);
if (paramgh != null)
{
localParcel1.writeInt(1);
paramgh.writeToParcel(localParcel1, 0);
}
else
{
localParcel1.writeInt(0);
}
this.dU.transact(5, localParcel1, localParcel2, 0);
localParcel2.readException();
return;
}
finally
{
localParcel2.recycle();
localParcel1.recycle();
}
}
}
}
}
/* Location: /Users/vikas/Documents/Mhacks_Real_app/classes-dex2jar.jar
* Qualified Name: com.google.android.gms.internal.gj
* JD-Core Version: 0.6.2
*/
| |
/*
* Copyright 2001-2009 James House
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
/*
* Previously Copyright (c) 2001-2004 James House
*/
package org.quartz;
import java.util.Date;
/**
* <p>
* Describes the settings and capabilities of a given <code>{@link Scheduler}</code>
* instance.
* </p>
*
* @author James House
*/
public class SchedulerMetaData implements java.io.Serializable {
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Data members.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
private String schedName;
private String schedInst;
private Class schedClass;
private boolean isRemote;
private boolean started;
private boolean isInStandbyMode;
private boolean shutdown;
private Date startTime;
private int numJobsExec;
private Class jsClass;
private boolean jsPersistent;
private Class tpClass;
private int tpSize;
private String version;
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Constructors.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
public SchedulerMetaData(String schedName, String schedInst,
Class schedClass, boolean isRemote, boolean started,
boolean isInStandbyMode, boolean shutdown, Date startTime, int numJobsExec,
Class jsClass, boolean jsPersistent, Class tpClass, int tpSize,
String version) {
this.schedName = schedName;
this.schedInst = schedInst;
this.schedClass = schedClass;
this.isRemote = isRemote;
this.started = started;
this.isInStandbyMode = isInStandbyMode;
this.shutdown = shutdown;
this.startTime = startTime;
this.numJobsExec = numJobsExec;
this.jsClass = jsClass;
this.jsPersistent = jsPersistent;
this.tpClass = tpClass;
this.tpSize = tpSize;
this.version = version;
}
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Interface.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/**
* <p>
* Returns the name of the <code>Scheduler</code>.
* </p>
*/
public String getSchedulerName() {
return schedName;
}
/**
* <p>
* Returns the instance Id of the <code>Scheduler</code>.
* </p>
*/
public String getSchedulerInstanceId() {
return schedInst;
}
/**
* <p>
* Returns the class-name of the <code>Scheduler</code> instance.
* </p>
*/
public Class getSchedulerClass() {
return schedClass;
}
/**
* <p>
* Returns the <code>Date</code> at which the Scheduler started running.
* </p>
*
* @return null if the scheduler has not been started.
* @deprecated - Please use <code>{@link #getRunningSince}</code>.
*/
public Date runningSince() {
return startTime;
}
/**
* <p>
* Returns the <code>Date</code> at which the Scheduler started running.
* </p>
*
* @return null if the scheduler has not been started.
*/
public Date getRunningSince() {
return startTime;
}
/**
* <p>
* Returns the number of jobs executed since the <code>Scheduler</code>
* started..
* </p>
*
* @deprecated - Please use <code>{@link #getNumberOfJobsExecuted()}</code>.
*/
public int numJobsExecuted() {
return numJobsExec;
}
/**
* <p>
* Returns the number of jobs executed since the <code>Scheduler</code>
* started..
* </p>
*/
public int getNumberOfJobsExecuted() {
return numJobsExec;
}
/**
* <p>
* Returns whether the <code>Scheduler</code> is being used remotely (via
* RMI).
* </p>
*/
public boolean isSchedulerRemote() {
return isRemote;
}
/**
* <p>
* Returns whether the scheduler has been started.
* </p>
*
* <p>
* Note: <code>isStarted()</code> may return <code>true</code> even if
* <code>isInStandbyMode()</code> returns <code>true</code>.
* </p>
*/
public boolean isStarted() {
return started;
}
/**
* Reports whether the <code>Scheduler</code> is in standby mode.
*/
public boolean isInStandbyMode() {
return isInStandbyMode;
}
/**
* Reports whether the <code>Scheduler</code> is paused.
*
* @deprecated Please use <code>{@link #isInStandbyMode()}</code>.
*
* @see #isInStandbyMode()
*/
public boolean isPaused() {
return isInStandbyMode();
}
/**
* <p>
* Reports whether the <code>Scheduler</code> has been shutdown.
* </p>
*/
public boolean isShutdown() {
return shutdown;
}
/**
* <p>
* Returns the class-name of the <code>JobStore</code> instance that is
* being used by the <code>Scheduler</code>.
* </p>
*/
public Class getJobStoreClass() {
return jsClass;
}
/**
* <p>
* Returns whether or not the <code>Scheduler</code>'s<code>JobStore</code>
* instance supports persistence.
* </p>
*/
public boolean jobStoreSupportsPersistence() {
return jsPersistent;
}
/**
* <p>
* Returns the class-name of the <code>ThreadPool</code> instance that is
* being used by the <code>Scheduler</code>.
* </p>
*/
public Class getThreadPoolClass() {
return tpClass;
}
/**
* <p>
* Returns the number of threads currently in the <code>Scheduler</code>'s
* <code>ThreadPool</code>.
* </p>
*/
public int getThreadPoolSize() {
return tpSize;
}
/**
* <p>
* Returns the version of Quartz that is running.
* </p>
*/
public String getVersion() {
return version;
}
/**
* <p>
* Return a simple string representation of this object.
* </p>
*/
public String toString() {
try {
return getSummary();
} catch (SchedulerException se) {
return "SchedulerMetaData: undeterminable.";
}
}
/**
* <p>
* Returns a formatted (human readable) String describing all the <code>Scheduler</code>'s
* meta-data values.
* </p>
*
* <p>
* The format of the String looks something like this:
*
* <pre>
*
*
* Quartz Scheduler 'SchedulerName' with instanceId 'SchedulerInstanceId' Scheduler class: 'org.quartz.impl.StdScheduler' - running locally. Running since: '11:33am on Jul 19, 2002' Not currently paused. Number of Triggers fired: '123' Using thread pool 'org.quartz.simpl.SimpleThreadPool' - with '8' threads Using job-store 'org.quartz.impl.JDBCJobStore' - which supports persistence.
* </pre>
*
* </p>
*/
public String getSummary() throws SchedulerException {
StringBuffer str = new StringBuffer("Quartz Scheduler (v");
str.append(getVersion());
str.append(") '");
str.append(getSchedulerName());
str.append("' with instanceId '");
str.append(getSchedulerInstanceId());
str.append("'\n");
str.append(" Scheduler class: '");
str.append(getSchedulerClass().getName());
str.append("'");
if (isSchedulerRemote()) {
str.append(" - access via RMI.");
} else {
str.append(" - running locally.");
}
str.append("\n");
if (!isShutdown()) {
if (runningSince() != null) {
str.append(" Running since: ");
str.append(getRunningSince());
} else {
str.append("NOT STARTED.");
}
str.append("\n");
if (isInStandbyMode()) {
str.append(" Currently in standby mode.");
} else {
str.append(" Not currently in standby mode.");
}
} else {
str.append(" Scheduler has been SHUTDOWN.");
}
str.append("\n");
str.append(" Number of jobs executed: ");
str.append(getNumberOfJobsExecuted());
str.append("\n");
str.append(" Using thread pool '");
str.append(getThreadPoolClass().getName());
str.append("' - with ");
str.append(getThreadPoolSize());
str.append(" threads.");
str.append("\n");
str.append(" Using job-store '");
str.append(getJobStoreClass().getName());
str.append("' - which ");
if (jobStoreSupportsPersistence()) {
str.append("supports persistence.");
} else {
str.append("does not support persistence.");
}
str.append("\n");
return str.toString();
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.formatting;
import com.intellij.diagnostic.LogMessageEx;
import com.intellij.lang.LanguageFormatting;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.formatter.FormattingDocumentModelImpl;
import com.intellij.psi.formatter.ReadOnlyBlockInformationProvider;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.Stack;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Allows to build {@link AbstractBlockWrapper formatting block wrappers} for the target {@link Block formatting blocks}.
* The main idea of block wrapping is to associate information about {@link WhiteSpace white space before block} with the block itself.
*/
class InitialInfoBuilder {
private static final Logger LOG = Logger.getInstance("#com.intellij.formatting.InitialInfoBuilder");
private final Map<AbstractBlockWrapper, Block> myResult = new THashMap<AbstractBlockWrapper, Block>();
private final FormattingDocumentModel myModel;
private final FormatTextRanges myAffectedRanges;
private final int myPositionOfInterest;
@NotNull
private final FormattingProgressCallback myProgressCallback;
private final FormatterTagHandler myFormatterTagHandler;
private final CommonCodeStyleSettings.IndentOptions myOptions;
private final Stack<State> myStates = new Stack<State>();
private WhiteSpace myCurrentWhiteSpace;
private CompositeBlockWrapper myRootBlockWrapper;
private LeafBlockWrapper myPreviousBlock;
private LeafBlockWrapper myFirstTokenBlock;
private LeafBlockWrapper myLastTokenBlock;
private SpacingImpl myCurrentSpaceProperty;
private ReadOnlyBlockInformationProvider myReadOnlyBlockInformationProvider;
private boolean myInsideFormatRestrictingTag;
private static final boolean INLINE_TABS_ENABLED = "true".equalsIgnoreCase(System.getProperty("inline.tabs.enabled"));
private Set<Alignment> myAlignmentsInsideRangeToModify = ContainerUtil.newHashSet();
private boolean myCollectAlignmentsInsideFormattingRange = false;
private InitialInfoBuilder(final Block rootBlock,
final FormattingDocumentModel model,
@Nullable final FormatTextRanges affectedRanges,
@NotNull CodeStyleSettings settings,
final CommonCodeStyleSettings.IndentOptions options,
final int positionOfInterest,
@NotNull FormattingProgressCallback progressCallback)
{
myModel = model;
myAffectedRanges = affectedRanges;
myProgressCallback = progressCallback;
myCurrentWhiteSpace = new WhiteSpace(getStartOffset(rootBlock), true);
myOptions = options;
myPositionOfInterest = positionOfInterest;
myInsideFormatRestrictingTag = false;
myFormatterTagHandler = new FormatterTagHandler(settings);
}
protected static InitialInfoBuilder prepareToBuildBlocksSequentially(Block root,
FormattingDocumentModel model,
@Nullable final FormatTextRanges affectedRanges,
@NotNull CodeStyleSettings settings,
final CommonCodeStyleSettings.IndentOptions options,
int interestingOffset,
@NotNull FormattingProgressCallback progressCallback)
{
InitialInfoBuilder builder = new InitialInfoBuilder(root, model, affectedRanges, settings, options, interestingOffset, progressCallback);
builder.buildFrom(root, 0, null, null, null, true);
return builder;
}
private int getStartOffset(@NotNull Block rootBlock) {
int minOffset = rootBlock.getTextRange().getStartOffset();
if (myAffectedRanges != null) {
for (FormatTextRanges.FormatTextRange range : myAffectedRanges.getRanges()) {
if (range.getStartOffset() < minOffset) minOffset = range.getStartOffset();
}
}
return minOffset;
}
int getEndOffset() {
int maxDocOffset = myModel.getTextLength();
int maxOffset = myRootBlockWrapper != null ? myRootBlockWrapper.getEndOffset() : 0;
if (myAffectedRanges != null) {
for (FormatTextRanges.FormatTextRange range : myAffectedRanges.getRanges()) {
if (range.getTextRange().getEndOffset() > maxOffset) maxOffset = range.getTextRange().getEndOffset();
}
}
return maxOffset < maxDocOffset ? maxOffset : maxDocOffset;
}
/**
* Asks current builder to wrap one more remaining {@link Block code block} (if any).
*
* @return <code>true</code> if all blocks are wrapped; <code>false</code> otherwise
*/
public boolean iteration() {
if (myStates.isEmpty()) {
return true;
}
State state = myStates.peek();
doIteration(state);
return myStates.isEmpty();
}
/**
* Wraps given root block and all of its descendants and returns root block wrapper.
* <p/>
* This method performs necessary infrastructure actions and delegates actual processing to
* {@link #buildCompositeBlock(Block, CompositeBlockWrapper, int, WrapImpl, boolean)} and
* {@link #processSimpleBlock(Block, CompositeBlockWrapper, boolean, int, Block)}.
*
* @param rootBlock block to wrap
* @param index index of the current block at its parent block. <code>-1</code> may be used here if we don't
* have information about parent block
* @param parent parent block wrapper. <code>null</code> may be used here we no parent block wrapper exists
* @param currentWrapParent parent wrap if any; <code>null</code> otherwise
* @param parentBlock parent block of the block to wrap
* @param rootBlockIsRightBlock flag that shows if target block is the right-most block
* @return wrapper for the given <code>'rootBlock'</code>
*/
private AbstractBlockWrapper buildFrom(final Block rootBlock,
final int index,
@Nullable final CompositeBlockWrapper parent,
@Nullable WrapImpl currentWrapParent,
@Nullable final Block parentBlock,
boolean rootBlockIsRightBlock)
{
final WrapImpl wrap = (WrapImpl)rootBlock.getWrap();
if (wrap != null) {
wrap.registerParent(currentWrapParent);
currentWrapParent = wrap;
}
TextRange textRange = rootBlock.getTextRange();
final int blockStartOffset = textRange.getStartOffset();
if (parent != null) {
if (textRange.getStartOffset() < parent.getStartOffset()) {
assertInvalidRanges(
textRange.getStartOffset(),
parent.getStartOffset(),
myModel,
"child block start is less than parent block start"
);
}
if (textRange.getEndOffset() > parent.getEndOffset()) {
assertInvalidRanges(
textRange.getEndOffset(),
parent.getEndOffset(),
myModel,
"child block end is after parent block end"
);
}
}
myCurrentWhiteSpace.append(blockStartOffset, myModel, myOptions);
if (myCollectAlignmentsInsideFormattingRange && rootBlock.getAlignment() != null
&& isAffectedByFormatting(rootBlock) && !myInsideFormatRestrictingTag)
{
myAlignmentsInsideRangeToModify.add(rootBlock.getAlignment());
}
ReadOnlyBlockInformationProvider previousProvider = myReadOnlyBlockInformationProvider;
try {
if (rootBlock instanceof ReadOnlyBlockInformationProvider) {
myReadOnlyBlockInformationProvider = (ReadOnlyBlockInformationProvider)rootBlock;
}
if (!myCollectAlignmentsInsideFormattingRange && !isInsideFormattingRanges(rootBlock, rootBlockIsRightBlock)) {
return processSimpleBlock(rootBlock, parent, true, index, parentBlock);
}
final List<Block> subBlocks = rootBlock.getSubBlocks();
if (subBlocks.isEmpty() || myReadOnlyBlockInformationProvider != null
&& myReadOnlyBlockInformationProvider.isReadOnly(rootBlock)) {
final AbstractBlockWrapper wrapper = processSimpleBlock(rootBlock, parent, false, index, parentBlock);
if (!subBlocks.isEmpty()) {
wrapper.setIndent((IndentImpl)subBlocks.get(0).getIndent());
}
return wrapper;
}
return buildCompositeBlock(rootBlock, parent, index, currentWrapParent, rootBlockIsRightBlock);
}
finally {
myReadOnlyBlockInformationProvider = previousProvider;
}
}
private CompositeBlockWrapper buildCompositeBlock(final Block rootBlock,
@Nullable final CompositeBlockWrapper parent,
final int index,
@Nullable final WrapImpl currentWrapParent,
boolean rootBlockIsRightBlock)
{
final CompositeBlockWrapper wrappedRootBlock = new CompositeBlockWrapper(rootBlock, myCurrentWhiteSpace, parent);
if (index == 0) {
wrappedRootBlock.arrangeParentTextRange();
}
if (myRootBlockWrapper == null) {
myRootBlockWrapper = wrappedRootBlock;
myRootBlockWrapper.setIndent((IndentImpl)Indent.getNoneIndent());
}
boolean blocksMayBeOfInterest = false;
if (myPositionOfInterest != -1) {
myResult.put(wrappedRootBlock, rootBlock);
blocksMayBeOfInterest = true;
}
final boolean blocksAreReadOnly = rootBlock instanceof ReadOnlyBlockContainer || blocksMayBeOfInterest;
State state = new State(rootBlock, wrappedRootBlock, currentWrapParent, blocksAreReadOnly, rootBlockIsRightBlock);
myStates.push(state);
return wrappedRootBlock;
}
private void doIteration(@NotNull State state) {
List<Block> subBlocks = state.parentBlock.getSubBlocks();
final int subBlocksCount = subBlocks.size();
int childBlockIndex = state.getIndexOfChildBlockToProcess();
final Block block = subBlocks.get(childBlockIndex);
if (state.previousBlock != null || (myCurrentWhiteSpace != null && myCurrentWhiteSpace.isIsFirstWhiteSpace())) {
myCurrentSpaceProperty = (SpacingImpl)state.parentBlock.getSpacing(state.previousBlock, block);
}
boolean childBlockIsRightBlock = false;
if (childBlockIndex == subBlocksCount - 1 && state.parentBlockIsRightBlock) {
childBlockIsRightBlock = true;
}
final AbstractBlockWrapper wrapper = buildFrom(
block, childBlockIndex, state.wrappedBlock, state.parentBlockWrap, state.parentBlock, childBlockIsRightBlock
);
if (wrapper.getIndent() == null) {
wrapper.setIndent((IndentImpl)block.getIndent());
}
if (!state.readOnly) {
try {
subBlocks.set(childBlockIndex, null); // to prevent extra strong refs during model building
} catch (Throwable ex) {
// read-only blocks
}
}
if (state.childBlockProcessed(block, wrapper)) {
while (!myStates.isEmpty() && myStates.peek().isProcessed()) {
myStates.pop();
}
}
}
private void setDefaultIndents(final List<AbstractBlockWrapper> list) {
if (!list.isEmpty()) {
for (AbstractBlockWrapper wrapper : list) {
if (wrapper.getIndent() == null) {
wrapper.setIndent((IndentImpl)Indent.getContinuationWithoutFirstIndent(myOptions.USE_RELATIVE_INDENTS));
}
}
}
}
private AbstractBlockWrapper processSimpleBlock(final Block rootBlock,
@Nullable final CompositeBlockWrapper parent,
final boolean readOnly,
final int index,
@Nullable Block parentBlock)
{
LeafBlockWrapper result = doProcessSimpleBlock(rootBlock, parent, readOnly, index, parentBlock);
myProgressCallback.afterWrappingBlock(result);
return result;
}
private LeafBlockWrapper doProcessSimpleBlock(final Block rootBlock,
@Nullable final CompositeBlockWrapper parent,
final boolean readOnly,
final int index,
@Nullable Block parentBlock)
{
if (!INLINE_TABS_ENABLED && !myCurrentWhiteSpace.containsLineFeeds()) {
myCurrentWhiteSpace.setForceSkipTabulationsUsage(true);
}
final LeafBlockWrapper info =
new LeafBlockWrapper(rootBlock, parent, myCurrentWhiteSpace, myModel, myOptions, myPreviousBlock, readOnly);
if (index == 0) {
info.arrangeParentTextRange();
}
switch (myFormatterTagHandler.getFormatterTag(rootBlock)) {
case ON:
myInsideFormatRestrictingTag = false;
break;
case OFF:
myInsideFormatRestrictingTag = true;
break;
case NONE:
break;
}
TextRange textRange = rootBlock.getTextRange();
if (textRange.getLength() == 0) {
assertInvalidRanges(
textRange.getStartOffset(),
textRange.getEndOffset(),
myModel,
"empty block"
);
}
if (myPreviousBlock != null) {
myPreviousBlock.setNextBlock(info);
}
if (myFirstTokenBlock == null) {
myFirstTokenBlock = info;
}
myLastTokenBlock = info;
if (currentWhiteSpaceIsReadOnly()) {
myCurrentWhiteSpace.setReadOnly(true);
}
if (myCurrentSpaceProperty != null) {
myCurrentWhiteSpace.setIsSafe(myCurrentSpaceProperty.isSafe());
myCurrentWhiteSpace.setKeepFirstColumn(myCurrentSpaceProperty.shouldKeepFirstColumn());
}
if (info.isEndOfCodeBlock()) {
myCurrentWhiteSpace.setBeforeCodeBlockEnd(true);
}
info.setSpaceProperty(myCurrentSpaceProperty);
myCurrentWhiteSpace = new WhiteSpace(textRange.getEndOffset(), false);
if (myInsideFormatRestrictingTag) myCurrentWhiteSpace.setReadOnly(true);
myPreviousBlock = info;
if (myPositionOfInterest != -1 && (textRange.contains(myPositionOfInterest) || textRange.getEndOffset() == myPositionOfInterest)) {
myResult.put(info, rootBlock);
if (parent != null) myResult.put(parent, parentBlock);
}
return info;
}
private boolean currentWhiteSpaceIsReadOnly() {
if (myCurrentSpaceProperty != null && myCurrentSpaceProperty.isReadOnly()) {
return true;
}
else {
if (myAffectedRanges == null) return false;
return myAffectedRanges.isWhitespaceReadOnly(myCurrentWhiteSpace.getTextRange());
}
}
private boolean isAffectedByFormatting(final Block block) {
if (myAffectedRanges == null) return true;
List<FormatTextRanges.FormatTextRange> allRanges = myAffectedRanges.getRanges();
Document document = myModel.getDocument();
int docLength = document.getTextLength();
for (FormatTextRanges.FormatTextRange range : allRanges) {
int startOffset = range.getStartOffset();
if (startOffset >= docLength) continue;
int lineNumber = document.getLineNumber(startOffset);
int lineEndOffset = document.getLineEndOffset(lineNumber);
int blockStartOffset = block.getTextRange().getStartOffset();
if (blockStartOffset >= startOffset && blockStartOffset < lineEndOffset) {
return true;
}
}
return false;
}
private boolean isInsideFormattingRanges(final Block block, boolean rootIsRightBlock) {
if (myAffectedRanges == null) return true;
return !myAffectedRanges.isReadOnly(block.getTextRange(), rootIsRightBlock);
}
public Map<AbstractBlockWrapper, Block> getBlockToInfoMap() {
return myResult;
}
public CompositeBlockWrapper getRootBlockWrapper() {
return myRootBlockWrapper;
}
public LeafBlockWrapper getFirstTokenBlock() {
return myFirstTokenBlock;
}
public LeafBlockWrapper getLastTokenBlock() {
return myLastTokenBlock;
}
public static void assertInvalidRanges(final int startOffset, final int newEndOffset, FormattingDocumentModel model, String message) {
@NonNls final StringBuilder buffer = new StringBuilder();
buffer.append("Invalid formatting blocks:").append(message).append("\n");
buffer.append("Start offset:");
buffer.append(startOffset);
buffer.append(" end offset:");
buffer.append(newEndOffset);
buffer.append("\n");
int minOffset = Math.max(Math.min(startOffset, newEndOffset) - 20, 0);
int maxOffset = Math.min(Math.max(startOffset, newEndOffset) + 20, model.getTextLength());
buffer.append("Affected text fragment:[").append(minOffset).append(",").append(maxOffset).append("] - '")
.append(model.getText(new TextRange(minOffset, maxOffset))).append("'\n");
final StringBuilder messageBuffer = new StringBuilder();
messageBuffer.append("Invalid ranges during formatting");
if (model instanceof FormattingDocumentModelImpl) {
messageBuffer.append(" in ").append(((FormattingDocumentModelImpl)model).getFile().getLanguage());
}
buffer.append("File text:(").append(model.getTextLength()).append(")\n'");
buffer.append(model.getText(new TextRange(0, model.getTextLength())).toString());
buffer.append("'\n");
buffer.append("model (").append(model.getClass()).append("): ").append(model);
Throwable currentThrowable = new Throwable();
if (model instanceof FormattingDocumentModelImpl) {
final FormattingDocumentModelImpl modelImpl = (FormattingDocumentModelImpl)model;
buffer.append("Psi Tree:\n");
final PsiFile file = modelImpl.getFile();
final List<PsiFile> roots = file.getViewProvider().getAllFiles();
for (PsiFile root : roots) {
buffer.append("Root ");
DebugUtil.treeToBuffer(buffer, root.getNode(), 0, false, true, true, true);
}
buffer.append('\n');
currentThrowable = makeLanguageStackTrace(currentThrowable, file);
}
LogMessageEx.error(LOG, messageBuffer.toString(), currentThrowable, buffer.toString());
}
private static Throwable makeLanguageStackTrace(@NotNull Throwable currentThrowable, @NotNull PsiFile file) {
Throwable langThrowable = new Throwable();
FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(file);
if (builder == null) return currentThrowable;
Class builderClass = builder.getClass();
Class declaringClass = builderClass.getDeclaringClass();
String guessedFileName = (declaringClass == null ? builderClass.getSimpleName() : declaringClass.getSimpleName()) + ".java";
StackTraceElement ste = new StackTraceElement(builder.getClass().getName(), "createModel", guessedFileName, 1);
StackTraceElement[] originalStackTrace = currentThrowable.getStackTrace();
StackTraceElement[] modifiedStackTrace = new StackTraceElement[originalStackTrace.length + 1];
System.arraycopy(originalStackTrace, 0, modifiedStackTrace, 1, originalStackTrace.length);
modifiedStackTrace[0] = ste;
langThrowable.setStackTrace(modifiedStackTrace);
return langThrowable;
}
public Set<Alignment> getAlignmentsInsideRangeToModify() {
return myAlignmentsInsideRangeToModify;
}
public void setCollectAlignmentsInsideFormattingRange(boolean value) {
myCollectAlignmentsInsideFormattingRange = value;
}
/**
* We want to wrap {@link Block code blocks} sequentially, hence, need to store a processing state and continue from the point
* where we stopped the processing last time.
* <p/>
* Current class defines common contract for the state required for such a processing.
*/
private class State {
public final Block parentBlock;
public final WrapImpl parentBlockWrap;
public final CompositeBlockWrapper wrappedBlock;
public final boolean readOnly;
public final boolean parentBlockIsRightBlock;
public Block previousBlock;
private final List<AbstractBlockWrapper> myWrappedChildren = new ArrayList<AbstractBlockWrapper>();
State(@NotNull Block parentBlock, @NotNull CompositeBlockWrapper wrappedBlock, @Nullable WrapImpl parentBlockWrap,
boolean readOnly, boolean parentBlockIsRightBlock)
{
this.parentBlock = parentBlock;
this.wrappedBlock = wrappedBlock;
this.parentBlockWrap = parentBlockWrap;
this.readOnly = readOnly;
this.parentBlockIsRightBlock = parentBlockIsRightBlock;
}
/**
* @return index of the first non-processed {@link Block#getSubBlocks() child block} of the {@link #parentBlock target block}
*/
public int getIndexOfChildBlockToProcess() {
return myWrappedChildren.size();
}
/**
* Notifies current state that child block is processed.
*
* @return <code>true</code> if all child blocks of the block denoted by the current state are processed;
* <code>false</code> otherwise
*/
public boolean childBlockProcessed(@NotNull Block child, @NotNull AbstractBlockWrapper wrappedChild) {
myWrappedChildren.add(wrappedChild);
previousBlock = child;
int subBlocksNumber = parentBlock.getSubBlocks().size();
if (myWrappedChildren.size() > subBlocksNumber) {
return true;
}
else if (myWrappedChildren.size() == subBlocksNumber) {
setDefaultIndents(myWrappedChildren);
wrappedBlock.setChildren(myWrappedChildren);
return true;
}
return false;
}
/**
* @return <code>true</code> if current state is processed (basically, if all {@link Block#getSubBlocks() child blocks})
* of the {@link #parentBlock target block} are processed; <code>false</code> otherwise
*/
public boolean isProcessed() {
return myWrappedChildren.size() == parentBlock.getSubBlocks().size();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen.ping.multicast;
import org.apache.lucene.util.Constants;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.io.stream.*;
import org.elasticsearch.common.network.MulticastChannel;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.zen.ping.PingContextProvider;
import org.elasticsearch.discovery.zen.ping.ZenPing;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.net.SocketAddress;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.cluster.node.DiscoveryNode.readNode;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
/**
*
*/
public class MulticastZenPing extends AbstractLifecycleComponent<ZenPing> implements ZenPing {
public static final String ACTION_NAME = "internal:discovery/zen/multicast";
private static final byte[] INTERNAL_HEADER = new byte[]{1, 9, 8, 4};
private final String address;
private final int port;
private final String group;
private final int bufferSize;
private final int ttl;
private final ThreadPool threadPool;
private final TransportService transportService;
private final ClusterName clusterName;
private final NetworkService networkService;
private final Version version;
private volatile PingContextProvider contextProvider;
private final boolean pingEnabled;
private volatile MulticastChannel multicastChannel;
private final AtomicInteger pingIdGenerator = new AtomicInteger();
private final Map<Integer, PingCollection> receivedResponses = newConcurrentMap();
public MulticastZenPing(ThreadPool threadPool, TransportService transportService, ClusterName clusterName, Version version) {
this(EMPTY_SETTINGS, threadPool, transportService, clusterName, new NetworkService(EMPTY_SETTINGS), version);
}
public MulticastZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName, NetworkService networkService, Version version) {
super(settings);
this.threadPool = threadPool;
this.transportService = transportService;
this.clusterName = clusterName;
this.networkService = networkService;
this.version = version;
this.address = componentSettings.get("address");
this.port = componentSettings.getAsInt("port", 54328);
this.group = componentSettings.get("group", "224.2.2.4");
this.bufferSize = componentSettings.getAsInt("buffer_size", 2048);
this.ttl = componentSettings.getAsInt("ttl", 3);
this.pingEnabled = componentSettings.getAsBoolean("ping.enabled", true);
logger.debug("using group [{}], with port [{}], ttl [{}], and address [{}]", group, port, ttl, address);
this.transportService.registerHandler(ACTION_NAME, new MulticastPingResponseRequestHandler());
}
@Override
public void setPingContextProvider(PingContextProvider nodesProvider) {
if (lifecycle.started()) {
throw new ElasticsearchIllegalStateException("Can't set nodes provider when started");
}
this.contextProvider = nodesProvider;
}
@Override
protected void doStart() throws ElasticsearchException {
try {
// we know OSX has bugs in the JVM when creating multiple instances of multicast sockets
// causing for "socket close" exceptions when receive and/or crashes
boolean shared = componentSettings.getAsBoolean("shared", Constants.MAC_OS_X);
multicastChannel = MulticastChannel.getChannel(nodeName(), shared,
new MulticastChannel.Config(port, group, bufferSize, ttl, networkService.resolvePublishHostAddress(address)),
new Receiver());
} catch (Throwable t) {
String msg = "multicast failed to start [{}], disabling. Consider using IPv4 only (by defining env. variable `ES_USE_IPV4`)";
if (logger.isDebugEnabled()) {
logger.debug(msg, t, ExceptionsHelper.detailedMessage(t));
} else {
logger.info(msg, ExceptionsHelper.detailedMessage(t));
}
}
}
@Override
protected void doStop() throws ElasticsearchException {
if (multicastChannel != null) {
multicastChannel.close();
multicastChannel = null;
}
}
@Override
protected void doClose() throws ElasticsearchException {
}
public PingResponse[] pingAndWait(TimeValue timeout) {
final AtomicReference<PingResponse[]> response = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
try {
ping(new PingListener() {
@Override
public void onPing(PingResponse[] pings) {
response.set(pings);
latch.countDown();
}
}, timeout);
} catch (EsRejectedExecutionException ex) {
logger.debug("Ping execution rejected", ex);
return PingResponse.EMPTY;
}
try {
latch.await();
return response.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return PingResponse.EMPTY;
}
}
@Override
public void ping(final PingListener listener, final TimeValue timeout) {
if (!pingEnabled || multicastChannel == null) {
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
listener.onPing(PingResponse.EMPTY);
}
});
return;
}
final int id = pingIdGenerator.incrementAndGet();
try {
receivedResponses.put(id, new PingCollection());
sendPingRequest(id);
// try and send another ping request halfway through (just in case someone woke up during it...)
// this can be a good trade-off to nailing the initial lookup or un-delivered messages
threadPool.schedule(TimeValue.timeValueMillis(timeout.millis() / 2), ThreadPool.Names.GENERIC, new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.warn("[{}] failed to send second ping request", t, id);
finalizePingCycle(id, listener);
}
@Override
public void doRun() {
sendPingRequest(id);
threadPool.schedule(TimeValue.timeValueMillis(timeout.millis() / 2), ThreadPool.Names.GENERIC, new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.warn("[{}] failed to send third ping request", t, id);
finalizePingCycle(id, listener);
}
@Override
public void doRun() {
// make one last ping, but finalize as soon as all nodes have responded or a timeout has past
PingCollection collection = receivedResponses.get(id);
FinalizingPingCollection finalizingPingCollection = new FinalizingPingCollection(id, collection, collection.size(), listener);
receivedResponses.put(id, finalizingPingCollection);
logger.trace("[{}] sending last pings", id);
sendPingRequest(id);
threadPool.schedule(TimeValue.timeValueMillis(timeout.millis() / 4), ThreadPool.Names.GENERIC, new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.warn("[{}] failed to finalize ping", t, id);
}
@Override
protected void doRun() throws Exception {
finalizePingCycle(id, listener);
}
});
}
});
}
});
} catch (Exception e) {
logger.warn("failed to ping", e);
finalizePingCycle(id, listener);
}
}
/**
* takes all pings collected for a given id and pass them to the given listener.
* this method is safe to call multiple times as is guaranteed to only finalize once.
*/
private void finalizePingCycle(int id, final PingListener listener) {
PingCollection responses = receivedResponses.remove(id);
if (responses != null) {
listener.onPing(responses.toArray());
}
}
private void sendPingRequest(int id) {
try {
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput out = new HandlesStreamOutput(bStream);
out.writeBytes(INTERNAL_HEADER);
// TODO: change to min_required version!
Version.writeVersion(version, out);
out.writeInt(id);
clusterName.writeTo(out);
contextProvider.nodes().localNode().writeTo(out);
out.close();
multicastChannel.send(bStream.bytes());
if (logger.isTraceEnabled()) {
logger.trace("[{}] sending ping request", id);
}
} catch (Exception e) {
if (lifecycle.stoppedOrClosed()) {
return;
}
if (logger.isDebugEnabled()) {
logger.debug("failed to send multicast ping request", e);
} else {
logger.warn("failed to send multicast ping request: {}", ExceptionsHelper.detailedMessage(e));
}
}
}
class FinalizingPingCollection extends PingCollection {
final private PingCollection internalCollection;
final private int expectedResponses;
final private AtomicInteger responseCount;
final private PingListener listener;
final private int id;
public FinalizingPingCollection(int id, PingCollection internalCollection, int expectedResponses, PingListener listener) {
this.id = id;
this.internalCollection = internalCollection;
this.expectedResponses = expectedResponses;
this.responseCount = new AtomicInteger();
this.listener = listener;
}
@Override
public synchronized boolean addPing(PingResponse ping) {
if (internalCollection.addPing(ping)) {
if (responseCount.incrementAndGet() >= expectedResponses) {
logger.trace("[{}] all nodes responded", id);
finish();
}
return true;
}
return false;
}
@Override
public synchronized void addPings(PingResponse[] pings) {
internalCollection.addPings(pings);
}
@Override
public synchronized PingResponse[] toArray() {
return internalCollection.toArray();
}
void finish() {
// spawn another thread as we may be running on a network thread
threadPool.generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Throwable t) {
logger.error("failed to call ping listener", t);
}
@Override
protected void doRun() throws Exception {
finalizePingCycle(id, listener);
}
});
}
}
class MulticastPingResponseRequestHandler extends BaseTransportRequestHandler<MulticastPingResponse> {
@Override
public MulticastPingResponse newInstance() {
return new MulticastPingResponse();
}
@Override
public void messageReceived(MulticastPingResponse request, TransportChannel channel) throws Exception {
if (logger.isTraceEnabled()) {
logger.trace("[{}] received {}", request.id, request.pingResponse);
}
PingCollection responses = receivedResponses.get(request.id);
if (responses == null) {
logger.warn("received ping response {} with no matching id [{}]", request.pingResponse, request.id);
} else {
responses.addPing(request.pingResponse);
}
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
static class MulticastPingResponse extends TransportRequest {
int id;
PingResponse pingResponse;
MulticastPingResponse() {
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readInt();
pingResponse = PingResponse.readPingResponse(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeInt(id);
pingResponse.writeTo(out);
}
}
private class Receiver implements MulticastChannel.Listener {
@Override
public void onMessage(BytesReference data, SocketAddress address) {
int id = -1;
DiscoveryNode requestingNodeX = null;
ClusterName clusterName = null;
Map<String, Object> externalPingData = null;
XContentType xContentType = null;
try {
boolean internal = false;
if (data.length() > 4) {
int counter = 0;
for (; counter < INTERNAL_HEADER.length; counter++) {
if (data.get(counter) != INTERNAL_HEADER[counter]) {
break;
}
}
if (counter == INTERNAL_HEADER.length) {
internal = true;
}
}
if (internal) {
StreamInput input = CachedStreamInput.cachedHandles(new BytesStreamInput(new BytesArray(data.toBytes(), INTERNAL_HEADER.length, data.length() - INTERNAL_HEADER.length)));
Version version = Version.readVersion(input);
input.setVersion(version);
id = input.readInt();
clusterName = ClusterName.readClusterName(input);
requestingNodeX = readNode(input);
} else {
xContentType = XContentFactory.xContentType(data);
if (xContentType != null) {
// an external ping
externalPingData = XContentFactory.xContent(xContentType)
.createParser(data)
.mapAndClose();
} else {
throw new ElasticsearchIllegalStateException("failed multicast message, probably message from previous version");
}
}
if (externalPingData != null) {
handleExternalPingRequest(externalPingData, xContentType, address);
} else {
handleNodePingRequest(id, requestingNodeX, clusterName);
}
} catch (Exception e) {
if (!lifecycle.started() || (e instanceof EsRejectedExecutionException)) {
logger.debug("failed to read requesting data from {}", e, address);
} else {
logger.warn("failed to read requesting data from {}", e, address);
}
}
}
@SuppressWarnings("unchecked")
private void handleExternalPingRequest(Map<String, Object> externalPingData, XContentType contentType, SocketAddress remoteAddress) {
if (externalPingData.containsKey("response")) {
// ignoring responses sent over the multicast channel
logger.trace("got an external ping response (ignoring) from {}, content {}", remoteAddress, externalPingData);
return;
}
if (multicastChannel == null) {
logger.debug("can't send ping response, no socket, from {}, content {}", remoteAddress, externalPingData);
return;
}
Map<String, Object> request = (Map<String, Object>) externalPingData.get("request");
if (request == null) {
logger.warn("malformed external ping request, no 'request' element from {}, content {}", remoteAddress, externalPingData);
return;
}
final String requestClusterName = request.containsKey("cluster_name") ? request.get("cluster_name").toString() : request.containsKey("clusterName") ? request.get("clusterName").toString() : null;
if (requestClusterName == null) {
logger.warn("malformed external ping request, missing 'cluster_name' element within request, from {}, content {}", remoteAddress, externalPingData);
return;
}
if (!requestClusterName.equals(clusterName.value())) {
logger.trace("got request for cluster_name {}, but our cluster_name is {}, from {}, content {}",
requestClusterName, clusterName.value(), remoteAddress, externalPingData);
return;
}
if (logger.isTraceEnabled()) {
logger.trace("got external ping request from {}, content {}", remoteAddress, externalPingData);
}
try {
DiscoveryNode localNode = contextProvider.nodes().localNode();
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.startObject().startObject("response");
builder.field("cluster_name", clusterName.value());
builder.startObject("version").field("number", version.number()).field("snapshot_build", version.snapshot).endObject();
builder.field("transport_address", localNode.address().toString());
if (contextProvider.nodeService() != null) {
for (Map.Entry<String, String> attr : contextProvider.nodeService().attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
}
builder.startObject("attributes");
for (Map.Entry<String, String> attr : localNode.attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
builder.endObject();
builder.endObject().endObject();
multicastChannel.send(builder.bytes());
if (logger.isTraceEnabled()) {
logger.trace("sending external ping response {}", builder.string());
}
} catch (Exception e) {
logger.warn("failed to send external multicast response", e);
}
}
private void handleNodePingRequest(int id, DiscoveryNode requestingNodeX, ClusterName requestClusterName) {
if (!pingEnabled || multicastChannel == null) {
return;
}
final DiscoveryNodes discoveryNodes = contextProvider.nodes();
final DiscoveryNode requestingNode = requestingNodeX;
if (requestingNode.id().equals(discoveryNodes.localNodeId())) {
// that's me, ignore
return;
}
if (!requestClusterName.equals(clusterName)) {
if (logger.isTraceEnabled()) {
logger.trace("[{}] received ping_request from [{}], but wrong cluster_name [{}], expected [{}], ignoring",
id, requestingNode, requestClusterName.value(), clusterName.value());
}
return;
}
// don't connect between two client nodes, no need for that...
if (!discoveryNodes.localNode().shouldConnectTo(requestingNode)) {
if (logger.isTraceEnabled()) {
logger.trace("[{}] received ping_request from [{}], both are client nodes, ignoring", id, requestingNode, requestClusterName);
}
return;
}
final MulticastPingResponse multicastPingResponse = new MulticastPingResponse();
multicastPingResponse.id = id;
multicastPingResponse.pingResponse = new PingResponse(discoveryNodes.localNode(), discoveryNodes.masterNode(), clusterName, contextProvider.nodeHasJoinedClusterOnce());
if (logger.isTraceEnabled()) {
logger.trace("[{}] received ping_request from [{}], sending {}", id, requestingNode, multicastPingResponse.pingResponse);
}
if (!transportService.nodeConnected(requestingNode)) {
// do the connect and send on a thread pool
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
// connect to the node if possible
try {
transportService.connectToNode(requestingNode);
transportService.sendRequest(requestingNode, ACTION_NAME, multicastPingResponse, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleException(TransportException exp) {
logger.warn("failed to receive confirmation on sent ping response to [{}]", exp, requestingNode);
}
});
} catch (Exception e) {
if (lifecycle.started()) {
logger.warn("failed to connect to requesting node {}", e, requestingNode);
}
}
}
});
} else {
transportService.sendRequest(requestingNode, ACTION_NAME, multicastPingResponse, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
public void handleException(TransportException exp) {
if (lifecycle.started()) {
logger.warn("failed to receive confirmation on sent ping response to [{}]", exp, requestingNode);
}
}
});
}
}
}
}
| |
/*
* WSO2 API Manager - Admin
* This document specifies a **RESTful API** for WSO2 **API Manager** - Admin Portal. Please see [full swagger definition](https://raw.githubusercontent.com/wso2/carbon-apimgt/v6.1.66/components/apimgt/org.wso2.carbon.apimgt.rest.api.admin/src/main/resources/admin-api.yaml) of the API which is written using [swagger 2.0](http://swagger.io/) specification.
*
* OpenAPI spec version: 0.11.0
* Contact: architecture@wso2.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package org.wso2.carbon.apimgt.samples.utils.admin.rest.client.api;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiCallback;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiClient;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiException;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ApiResponse;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.Configuration;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.Pair;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ProgressRequestBody;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.model.AdvancedThrottlePolicy;
import org.wso2.carbon.apimgt.samples.utils.admin.rest.client.model.Error;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class AdvancedPolicyIndividualApi {
private ApiClient apiClient;
public AdvancedPolicyIndividualApi() {
this(Configuration.getDefaultApiClient());
}
public AdvancedPolicyIndividualApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for throttlingPoliciesAdvancedPolicyIdDelete */
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdDeleteCall(String policyId, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/throttling/policies/advanced/{policyId}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "policyId" + "\\}", apiClient.escapeString(policyId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (ifMatch != null)
localVarHeaderParams.put("If-Match", apiClient.parameterToString(ifMatch));
if (ifUnmodifiedSince != null)
localVarHeaderParams.put("If-Unmodified-Since", apiClient.parameterToString(ifUnmodifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "DELETE", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdDeleteValidateBeforeCall(String policyId, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'policyId' is set
if (policyId == null) {
throw new ApiException("Missing the required parameter 'policyId' when calling throttlingPoliciesAdvancedPolicyIdDelete(Async)");
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdDeleteCall(policyId, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Delete an Advanced Throttling Policy
* Deletes an Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public void throttlingPoliciesAdvancedPolicyIdDelete(String policyId, String ifMatch, String ifUnmodifiedSince) throws ApiException {
throttlingPoliciesAdvancedPolicyIdDeleteWithHttpInfo(policyId, ifMatch, ifUnmodifiedSince);
}
/**
* Delete an Advanced Throttling Policy
* Deletes an Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<Void> throttlingPoliciesAdvancedPolicyIdDeleteWithHttpInfo(String policyId, String ifMatch, String ifUnmodifiedSince) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdDeleteValidateBeforeCall(policyId, ifMatch, ifUnmodifiedSince, null, null);
return apiClient.execute(call);
}
/**
* Delete an Advanced Throttling Policy (asynchronously)
* Deletes an Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdDeleteAsync(String policyId, String ifMatch, String ifUnmodifiedSince, final ApiCallback<Void> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdDeleteValidateBeforeCall(policyId, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
apiClient.executeAsync(call, callback);
return call;
}
/* Build call for throttlingPoliciesAdvancedPolicyIdGet */
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdGetCall(String policyId, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/throttling/policies/advanced/{policyId}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "policyId" + "\\}", apiClient.escapeString(policyId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (ifNoneMatch != null)
localVarHeaderParams.put("If-None-Match", apiClient.parameterToString(ifNoneMatch));
if (ifModifiedSince != null)
localVarHeaderParams.put("If-Modified-Since", apiClient.parameterToString(ifModifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdGetValidateBeforeCall(String policyId, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'policyId' is set
if (policyId == null) {
throw new ApiException("Missing the required parameter 'policyId' when calling throttlingPoliciesAdvancedPolicyIdGet(Async)");
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdGetCall(policyId, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Get an Advanced Policy
* Retrieves an Advanced Policy.
* @param policyId Thorttle policy UUID (required)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return AdvancedThrottlePolicy
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public AdvancedThrottlePolicy throttlingPoliciesAdvancedPolicyIdGet(String policyId, String ifNoneMatch, String ifModifiedSince) throws ApiException {
ApiResponse<AdvancedThrottlePolicy> resp = throttlingPoliciesAdvancedPolicyIdGetWithHttpInfo(policyId, ifNoneMatch, ifModifiedSince);
return resp.getData();
}
/**
* Get an Advanced Policy
* Retrieves an Advanced Policy.
* @param policyId Thorttle policy UUID (required)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return ApiResponse<AdvancedThrottlePolicy>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<AdvancedThrottlePolicy> throttlingPoliciesAdvancedPolicyIdGetWithHttpInfo(String policyId, String ifNoneMatch, String ifModifiedSince) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdGetValidateBeforeCall(policyId, ifNoneMatch, ifModifiedSince, null, null);
Type localVarReturnType = new TypeToken<AdvancedThrottlePolicy>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Get an Advanced Policy (asynchronously)
* Retrieves an Advanced Policy.
* @param policyId Thorttle policy UUID (required)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdGetAsync(String policyId, String ifNoneMatch, String ifModifiedSince, final ApiCallback<AdvancedThrottlePolicy> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdGetValidateBeforeCall(policyId, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<AdvancedThrottlePolicy>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for throttlingPoliciesAdvancedPolicyIdPut */
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdPutCall(String policyId, AdvancedThrottlePolicy body, String contentType, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = body;
// create path and map variables
String localVarPath = "/throttling/policies/advanced/{policyId}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "policyId" + "\\}", apiClient.escapeString(policyId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (contentType != null)
localVarHeaderParams.put("Content-Type", apiClient.parameterToString(contentType));
if (ifMatch != null)
localVarHeaderParams.put("If-Match", apiClient.parameterToString(ifMatch));
if (ifUnmodifiedSince != null)
localVarHeaderParams.put("If-Unmodified-Since", apiClient.parameterToString(ifUnmodifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "PUT", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdPutValidateBeforeCall(String policyId, AdvancedThrottlePolicy body, String contentType, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'policyId' is set
if (policyId == null) {
throw new ApiException("Missing the required parameter 'policyId' when calling throttlingPoliciesAdvancedPolicyIdPut(Async)");
}
// verify the required parameter 'body' is set
if (body == null) {
throw new ApiException("Missing the required parameter 'body' when calling throttlingPoliciesAdvancedPolicyIdPut(Async)");
}
// verify the required parameter 'contentType' is set
if (contentType == null) {
throw new ApiException("Missing the required parameter 'contentType' when calling throttlingPoliciesAdvancedPolicyIdPut(Async)");
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdPutCall(policyId, body, contentType, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Update an Advanced Throttling Policy
* Updates an existing Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param body Policy object that needs to be modified (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @return AdvancedThrottlePolicy
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public AdvancedThrottlePolicy throttlingPoliciesAdvancedPolicyIdPut(String policyId, AdvancedThrottlePolicy body, String contentType, String ifMatch, String ifUnmodifiedSince) throws ApiException {
ApiResponse<AdvancedThrottlePolicy> resp = throttlingPoliciesAdvancedPolicyIdPutWithHttpInfo(policyId, body, contentType, ifMatch, ifUnmodifiedSince);
return resp.getData();
}
/**
* Update an Advanced Throttling Policy
* Updates an existing Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param body Policy object that needs to be modified (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @return ApiResponse<AdvancedThrottlePolicy>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<AdvancedThrottlePolicy> throttlingPoliciesAdvancedPolicyIdPutWithHttpInfo(String policyId, AdvancedThrottlePolicy body, String contentType, String ifMatch, String ifUnmodifiedSince) throws ApiException {
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdPutValidateBeforeCall(policyId, body, contentType, ifMatch, ifUnmodifiedSince, null, null);
Type localVarReturnType = new TypeToken<AdvancedThrottlePolicy>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Update an Advanced Throttling Policy (asynchronously)
* Updates an existing Advanced level throttling policy.
* @param policyId Thorttle policy UUID (required)
* @param body Policy object that needs to be modified (required)
* @param contentType Media type of the entity in the body. Default is application/json. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call throttlingPoliciesAdvancedPolicyIdPutAsync(String policyId, AdvancedThrottlePolicy body, String contentType, String ifMatch, String ifUnmodifiedSince, final ApiCallback<AdvancedThrottlePolicy> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = throttlingPoliciesAdvancedPolicyIdPutValidateBeforeCall(policyId, body, contentType, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<AdvancedThrottlePolicy>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.apache.ignite.internal.util.typedef.F;
import org.jetbrains.annotations.Nullable;
/**
* Cache change requests to execute when receive {@link DynamicCacheChangeBatch} event.
*/
public class ExchangeActions {
/** */
private Map<String, ActionData> cachesToStart;
/** */
private Map<String, ActionData> clientCachesToStart;
/** */
private Map<String, ActionData> cachesToStop;
/** */
private Map<String, ActionData> cachesToClose;
/** */
private Map<String, ActionData> cachesToResetLostParts;
/** */
private ClusterState newState;
/**
* @return {@code True} if server nodes should not participate in exchange.
*/
boolean clientOnlyExchange() {
return F.isEmpty(cachesToStart) &&
F.isEmpty(cachesToStop) &&
F.isEmpty(cachesToResetLostParts);
}
/**
* @param nodeId Local node ID.
* @return Close cache requests.
*/
List<DynamicCacheChangeRequest> closeRequests(UUID nodeId) {
List<DynamicCacheChangeRequest> res = null;
if (cachesToClose != null) {
for (ActionData req : cachesToClose.values()) {
if (nodeId.equals(req.req.initiatingNodeId())) {
if (res == null)
res = new ArrayList<>(cachesToClose.size());
res.add(req.req);
}
}
}
return res != null ? res : Collections.<DynamicCacheChangeRequest>emptyList();
}
/**
* @return New caches start requests.
*/
Collection<ActionData> cacheStartRequests() {
return cachesToStart != null ? cachesToStart.values() : Collections.<ActionData>emptyList();
}
/**
* @return Start cache requests.
*/
Collection<ActionData> newAndClientCachesStartRequests() {
if (cachesToStart != null || clientCachesToStart != null) {
List<ActionData> res = new ArrayList<>();
if (cachesToStart != null)
res.addAll(cachesToStart.values());
if (clientCachesToStart != null)
res.addAll(clientCachesToStart.values());
return res;
}
return Collections.emptyList();
}
/**
* @return Stop cache requests.
*/
Collection<ActionData> cacheStopRequests() {
return cachesToStop != null ? cachesToStop.values() : Collections.<ActionData>emptyList();
}
/**
* @param ctx Context.
*/
public void completeRequestFutures(GridCacheSharedContext ctx) {
completeRequestFutures(cachesToStart, ctx);
completeRequestFutures(cachesToStop, ctx);
completeRequestFutures(cachesToClose, ctx);
completeRequestFutures(clientCachesToStart, ctx);
completeRequestFutures(cachesToResetLostParts, ctx);
}
/**
* @param map Actions map.
* @param ctx Context.
*/
private void completeRequestFutures(Map<String, ActionData> map, GridCacheSharedContext ctx) {
if (map != null) {
for (ActionData req : map.values())
ctx.cache().completeCacheStartFuture(req.req, null);
}
}
/**
* @return {@code True} if have cache stop requests.
*/
public boolean hasStop() {
return !F.isEmpty(cachesToStop);
}
/**
* @return Caches to reset lost partitions for.
*/
public Set<String> cachesToResetLostPartitions() {
Set<String> caches = null;
if (cachesToResetLostParts != null)
caches = new HashSet<>(cachesToResetLostParts.keySet());
return caches != null ? caches : Collections.<String>emptySet();
}
/**
* @param cacheId Cache ID.
* @return {@code True} if cache stop was requested.
*/
public boolean cacheStopped(int cacheId) {
if (cachesToStop != null) {
for (ActionData cache : cachesToStop.values()) {
if (cache.desc.cacheId() == cacheId)
return true;
}
}
return false;
}
/**
* @param cacheId Cache ID.
* @return {@code True} if cache start was requested.
*/
public boolean cacheStarted(int cacheId) {
if (cachesToStart != null) {
for (ActionData cache : cachesToStart.values()) {
if (cache.desc.cacheId() == cacheId)
return true;
}
}
return false;
}
/**
* @param nodeId Local node ID.
* @return {@code True} if client cache was started.
*/
public boolean clientCacheStarted(UUID nodeId) {
if (clientCachesToStart != null) {
for (ActionData cache : clientCachesToStart.values()) {
if (nodeId.equals(cache.req.initiatingNodeId()))
return true;
}
}
return false;
}
/**
* @param state New cluster state.
*/
void newClusterState(ClusterState state) {
assert state != null;
newState = state;
}
/**
* @return New cluster state if state change was requested.
*/
@Nullable public ClusterState newClusterState() {
return newState;
}
/**
* @param map Actions map.
* @param req Request.
* @param desc Cache descriptor.
* @return Actions map.
*/
private Map<String, ActionData> add(Map<String, ActionData> map,
DynamicCacheChangeRequest req,
DynamicCacheDescriptor desc) {
assert req != null;
assert desc != null;
if (map == null)
map = new HashMap<>();
ActionData old = map.put(req.cacheName(), new ActionData(req, desc));
assert old == null : old;
return map;
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToStart(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.start() : req;
cachesToStart = add(cachesToStart, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addClientCacheToStart(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.start() : req;
clientCachesToStart = add(clientCachesToStart, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToStop(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.stop() : req;
cachesToStop = add(cachesToStop, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToClose(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.close() : req;
cachesToClose = add(cachesToClose, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToResetLostPartitions(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.resetLostPartitions() : req;
cachesToResetLostParts = add(cachesToResetLostParts, req, desc);
}
/**
* @return {@code True} if there are no cache change actions.
*/
public boolean empty() {
return F.isEmpty(cachesToStart) &&
F.isEmpty(clientCachesToStart) &&
F.isEmpty(cachesToStop) &&
F.isEmpty(cachesToClose) &&
F.isEmpty(cachesToResetLostParts);
}
/**
*
*/
static class ActionData {
/** */
private DynamicCacheChangeRequest req;
/** */
private DynamicCacheDescriptor desc;
/**
* @param req Request.
* @param desc Cache descriptor.
*/
ActionData(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req != null;
assert desc != null;
this.req = req;
this.desc = desc;
}
/**
* @return Request.
*/
public DynamicCacheChangeRequest request() {
return req;
}
/**
* @return Cache descriptor.
*/
public DynamicCacheDescriptor descriptor() {
return desc;
}
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ipc.invalidation.ticl.android2.channel;
import com.google.android.gcm.GCMRegistrar;
import com.google.ipc.invalidation.common.GcmSharedConstants;
import com.google.ipc.invalidation.external.client.SystemResources.Logger;
import com.google.ipc.invalidation.external.client.android.service.AndroidLogger;
import com.google.ipc.invalidation.ticl.android2.AndroidTiclManifest;
import com.google.ipc.invalidation.ticl.android2.ProtocolIntents;
import com.google.ipc.invalidation.ticl.android2.channel.AndroidChannelConstants.AuthTokenConstants;
import com.google.ipc.invalidation.ticl.android2.channel.AndroidChannelConstants.HttpConstants;
import com.google.ipc.invalidation.ticl.android2.channel.AndroidChannelPreferences.GcmChannelType;
import com.google.ipc.invalidation.ticl.proto.AndroidService.AndroidNetworkSendRequest;
import com.google.ipc.invalidation.ticl.proto.ChannelCommon.NetworkEndpointId;
import com.google.ipc.invalidation.ticl.proto.CommonProtos;
import com.google.ipc.invalidation.util.Preconditions;
import com.google.ipc.invalidation.util.ProtoWrapper.ValidationException;
import android.app.IntentService;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.util.Base64;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.Arrays;
/**
* Service that sends messages to the data center using HTTP POSTs authenticated as a Google
* account.
* <p>
* Messages are sent as byte-serialized {@code ClientToServerMessage} protocol buffers.
* Additionally, the POST requests echo the latest value of the echo token received on C2DM
* messages from the data center.
*
*/
public class AndroidMessageSenderService extends IntentService {
/* This class is public so that it can be instantiated by the Android runtime. */
/**
* A prefix on the "auth token type" that indicates we're using an OAuth2 token to authenticate.
*/
private static final String OAUTH2_TOKEN_TYPE_PREFIX = "oauth2:";
/** An override of the URL, for testing. */
private static String channelUrlForTest = null;
private final Logger logger = AndroidLogger.forTag("MsgSenderSvc");
/** The last message sent, for tests. */
public static byte[] lastTiclMessageForTest = null;
public AndroidMessageSenderService() {
super("AndroidNetworkService");
setIntentRedelivery(true);
}
@Override
public void onCreate() {
super.onCreate();
// HTTP connection reuse was buggy pre-Froyo, so disable it on those platforms.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO) {
System.setProperty("http.keepAlive", "false");
}
}
@Override
protected void onHandleIntent(Intent intent) {
if (intent == null) {
return;
}
if (intent.hasExtra(ProtocolIntents.OUTBOUND_MESSAGE_KEY)) {
// Request from the Ticl service to send a message.
handleOutboundMessage(intent.getByteArrayExtra(ProtocolIntents.OUTBOUND_MESSAGE_KEY));
} else if (intent.hasExtra(AndroidChannelConstants.AuthTokenConstants.EXTRA_AUTH_TOKEN)) {
// Reply from the app with an auth token and a message to send.
handleAuthTokenResponse(intent);
} else if (intent.hasExtra(AndroidChannelConstants.MESSAGE_SENDER_SVC_GCM_REGID_CHANGE)) {
handleGcmRegIdChange();
} else {
logger.warning("Ignoring intent: %s", intent);
}
}
/**
* Handles a request to send a message to the data center. Validates the message and sends
* an intent to the application to obtain an auth token to use on the HTTP request to the
* data center.
*/
private void handleOutboundMessage(byte[] sendRequestBytes) {
// Parse and validate the send request.
final AndroidNetworkSendRequest sendRequest;
try {
sendRequest = AndroidNetworkSendRequest.parseFrom(sendRequestBytes);
} catch (ValidationException exception) {
logger.warning("Invalid AndroidNetworkSendRequest from %s: %s", sendRequestBytes, exception);
return;
}
// Request an auth token from the application to use when sending the message.
byte[] message = sendRequest.getMessage().getByteArray();
requestAuthTokenForMessage(message, null);
}
/**
* Requests an auth token from the application to use to send {@code message} to the data
* center.
* <p>
* If not {@code null}, {@code invalidAuthToken} is an auth token that was previously
* found to be invalid. The intent sent to the application to request the new token will include
* the invalid token so that the application can invalidate it in the {@code AccountManager}.
*/
private void requestAuthTokenForMessage(byte[] message, String invalidAuthToken) {
/*
* Send an intent requesting an auth token. This intent will contain a pending intent
* that the recipient can use to send back the token (by attaching the token as a string
* extra). That pending intent will also contain the message that we were just asked to send,
* so that it will be echoed back to us with the token. This avoids our having to persist
* the message while waiting for the token.
*/
// This is the intent that the application will send back to us (the pending intent allows
// it to send the intent). It contains the stored message. We require that it be delivered to
// this class only, as a security check.
Intent tokenResponseIntent = new Intent(this, getClass());
tokenResponseIntent.putExtra(AuthTokenConstants.EXTRA_STORED_MESSAGE, message);
// If we have an invalid auth token, set a bit in the intent that the application will send
// back to us. This will let us know that it is a retry; if sending subsequently fails again,
// we will not do any further retries.
tokenResponseIntent.putExtra(AuthTokenConstants.EXTRA_IS_RETRY, invalidAuthToken != null);
// The pending intent allows the application to send us the tokenResponseIntent.
PendingIntent pendingIntent = PendingIntent.getService(
this, Arrays.hashCode(message), tokenResponseIntent, PendingIntent.FLAG_ONE_SHOT);
// We send the pending intent as an extra in a normal intent to the application. The
// invalidation listener service must handle AUTH_TOKEN_REQUEST intents.
Intent requestTokenIntent = new Intent(AuthTokenConstants.ACTION_REQUEST_AUTH_TOKEN);
requestTokenIntent.putExtra(AuthTokenConstants.EXTRA_PENDING_INTENT, pendingIntent);
if (invalidAuthToken != null) {
requestTokenIntent.putExtra(AuthTokenConstants.EXTRA_INVALIDATE_AUTH_TOKEN, invalidAuthToken);
}
String simpleListenerClass =
new AndroidTiclManifest(getApplicationContext()).getListenerServiceClass();
requestTokenIntent.setClassName(getApplicationContext(), simpleListenerClass);
try {
startService(requestTokenIntent);
} catch (SecurityException exception) {
logger.warning("unable to request auth token: %s", exception);
}
}
/**
* Handles an intent received from the application that contains both a message to send and
* an auth token and type to use when sending it. This is called when the reply to the intent
* sent in {@link #requestAuthTokenForMessage(byte[], String)} is received.
*/
private void handleAuthTokenResponse(Intent intent) {
if (!(intent.hasExtra(AuthTokenConstants.EXTRA_STORED_MESSAGE)
&& intent.hasExtra(AuthTokenConstants.EXTRA_AUTH_TOKEN)
&& intent.hasExtra(AuthTokenConstants.EXTRA_AUTH_TOKEN_TYPE)
&& intent.hasExtra(AuthTokenConstants.EXTRA_IS_RETRY))) {
logger.warning(
"auth-token-response intent missing fields: %s, %s", intent, intent.getExtras());
return;
}
boolean isRetryForInvalidAuthToken =
intent.getBooleanExtra(AuthTokenConstants.EXTRA_IS_RETRY, false);
deliverOutboundMessage(
intent.getByteArrayExtra(AuthTokenConstants.EXTRA_STORED_MESSAGE),
intent.getStringExtra(AuthTokenConstants.EXTRA_AUTH_TOKEN),
intent.getStringExtra(AuthTokenConstants.EXTRA_AUTH_TOKEN_TYPE),
isRetryForInvalidAuthToken);
}
/**
* Sends {@code outgoingMessage} to the data center as a serialized ClientToServerMessage using an
* HTTP POST.
* <p>
* If the HTTP POST fails due to an authentication failure and this is not a retry for an invalid
* auth token ({@code isRetryForInvalidAuthToken} is {@code false}), then it will call
* {@link #requestAuthTokenForMessage(byte[], String)} with {@code authToken} to invalidate the
* token and retry.
*
* @param authToken the auth token to use in the HTTP POST
* @param authTokenType the type of the auth token
*/
private void deliverOutboundMessage(byte[] outgoingMessage, String authToken,
String authTokenType, boolean isRetryForInvalidAuthToken) {
NetworkEndpointId networkEndpointId = getNetworkEndpointId(this, logger);
if (networkEndpointId == null) {
// No GCM registration; buffer the message to send when we become registered.
logger.info("Buffering message to the data center: no GCM registration id");
AndroidChannelPreferences.bufferMessage(this, outgoingMessage);
return;
}
logger.fine("Delivering outbound message: %s bytes", outgoingMessage.length);
lastTiclMessageForTest = outgoingMessage;
URL url = null;
HttpURLConnection urlConnection = null;
try {
// Open the connection.
boolean isOAuth2Token = authTokenType.startsWith(OAUTH2_TOKEN_TYPE_PREFIX);
url = buildUrl(isOAuth2Token ? null : authTokenType, networkEndpointId);
urlConnection = createUrlConnectionForPost(this, url, authToken, isOAuth2Token);
// We are seeing EOFException errors when reusing connections. Request that the connection is
// closed on response to work around this issue. Client-to-server messages are batched and
// infrequent so there isn't much benefit in connection reuse here.
urlConnection.setRequestProperty("Connection", "close");
urlConnection.setFixedLengthStreamingMode(outgoingMessage.length);
urlConnection.connect();
// Write the outgoing message.
urlConnection.getOutputStream().write(outgoingMessage);
// Consume all of the response. We do not do anything with the response (except log it for
// non-200 response codes), and do not expect any, but certain versions of the Apache HTTP
// library have a bug that causes connections to leak when the response is not fully consumed;
// out of sheer paranoia, we do the same thing here.
String response = readCompleteStream(urlConnection.getInputStream());
// Retry authorization failures and log other non-200 response codes.
final int responseCode = urlConnection.getResponseCode();
switch (responseCode) {
case HttpURLConnection.HTTP_OK:
case HttpURLConnection.HTTP_NO_CONTENT:
break;
case HttpURLConnection.HTTP_UNAUTHORIZED:
if (!isRetryForInvalidAuthToken) {
// If we had an auth failure and this is not a retry of an auth failure, then ask the
// application to invalidate authToken and give us a new one with which to retry. We
// check that this attempt was not a retry to avoid infinite loops if authorization
// always fails.
requestAuthTokenForMessage(outgoingMessage, authToken);
}
break;
default:
logger.warning("Unexpected response code %s for HTTP POST to %s; response = %s",
responseCode, url, response);
}
} catch (MalformedURLException exception) {
logger.warning("Malformed URL: %s", exception);
} catch (IOException exception) {
logger.warning("IOException sending message (%s): %s", url, exception);
} catch (RuntimeException exception) {
// URL#openConnection occasionally throws a NullPointerException due to an inability to get
// the class loader for the current thread. There may be other unknown bugs in the network
// libraries so we just eat runtime exception here.
logger.warning(
"RuntimeException creating HTTP connection or sending message (%s): %s", url, exception);
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
}
}
/**
* Handles a change in the GCM registration id by sending the buffered client message (if any)
* to the data center.
*/
private void handleGcmRegIdChange() {
byte[] bufferedMessage = AndroidChannelPreferences.takeBufferedMessage(this);
if (bufferedMessage != null) {
// Rejoin the start of the code path that handles sending outbound messages.
requestAuthTokenForMessage(bufferedMessage, null);
}
}
/**
* Returns a URL to use to send a message to the data center.
*
* @param gaiaServiceId Gaia service for which the request will be authenticated (when using a
* GoogleLogin token), or {@code null} when using an OAuth2 token.
* @param networkEndpointId network id of the client
*/
private static URL buildUrl(String gaiaServiceId, NetworkEndpointId networkEndpointId)
throws MalformedURLException {
StringBuilder urlBuilder = new StringBuilder();
// Build base URL that targets the inbound request service with the encoded network endpoint
// id.
urlBuilder.append((channelUrlForTest != null) ? channelUrlForTest : HttpConstants.CHANNEL_URL);
urlBuilder.append(HttpConstants.REQUEST_URL);
// TODO: We should be sending a ClientGatewayMessage in the request body
// instead of appending the client's network endpoint id to the request URL. Once we do that, we
// should use a UriBuilder to build up a structured Uri object instead of the brittle string
// concatenation we're doing below.
urlBuilder.append(base64Encode(networkEndpointId.toByteArray()));
// Add query parameter indicating the service to authenticate against
if (gaiaServiceId != null) {
urlBuilder.append('?');
urlBuilder.append(HttpConstants.SERVICE_PARAMETER);
urlBuilder.append('=');
urlBuilder.append(gaiaServiceId);
}
return new URL(urlBuilder.toString());
}
/**
* Returns an {@link HttpURLConnection} to use to POST a message to the data center. Sets
* the content-type and user-agent headers; also sets the echo token header if we have an
* echo token.
*
* @param context Android context
* @param url URL to which to post
* @param authToken auth token to provide in the request header
* @param isOAuth2Token whether the token is an OAuth2 token (vs. a GoogleLogin token)
*/
public static HttpURLConnection createUrlConnectionForPost(
Context context, URL url, String authToken, boolean isOAuth2Token) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
try {
connection.setRequestMethod("POST");
} catch (ProtocolException exception) {
throw new RuntimeException("Cannot set request method to POST", exception);
}
connection.setDoOutput(true);
if (isOAuth2Token) {
connection.setRequestProperty("Authorization", "Bearer " + authToken);
} else {
connection.setRequestProperty("Authorization", "GoogleLogin auth=" + authToken);
}
connection.setRequestProperty("Content-Type", HttpConstants.PROTO_CONTENT_TYPE);
connection.setRequestProperty(
"User-Agent", context.getApplicationInfo().className + "(" + Build.VERSION.RELEASE + ")");
String echoToken = AndroidChannelPreferences.getEchoToken(context);
if (echoToken != null) {
// If we have a token to echo to the server, echo it.
connection.setRequestProperty(HttpConstants.ECHO_HEADER, echoToken);
}
return connection;
}
/** Reads and all data from {@code in}. */
private static String readCompleteStream(InputStream in) throws IOException {
StringBuffer buffer = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String line;
while ((line = reader.readLine()) != null) {
buffer.append(line);
}
return buffer.toString();
}
/** Returns a base-64 encoded version of {@code bytes}. */
private static String base64Encode(byte[] bytes) {
return Base64.encodeToString(bytes, Base64.URL_SAFE | Base64.NO_WRAP | Base64.NO_PADDING);
}
/** Returns the network id for this channel, or {@code null} if one cannot be determined. */
public static NetworkEndpointId getNetworkEndpointId(Context context, Logger logger) {
String registrationId;
String clientKey;
// Select the registration token to use.
if (AndroidChannelPreferences.getGcmChannelType(context) == GcmChannelType.UPDATED) {
registrationId = AndroidChannelPreferences.getRegistrationToken(context);
clientKey = GcmSharedConstants.ANDROID_ENDPOINT_ID_CLIENT_KEY;
} else {
// No client key when using old style registration id.
clientKey = "";
try {
registrationId = GCMRegistrar.getRegistrationId(context);
} catch (RuntimeException exception) {
// GCMRegistrar#getRegistrationId occasionally throws a runtime exception. Catching the
// exception rather than crashing.
logger.warning("Unable to get GCM registration id: %s", exception);
registrationId = null;
}
}
if ((registrationId == null) || registrationId.isEmpty()) {
// No registration with GCM; we cannot compute a network id. The GCM documentation says the
// string is never null, but we'll be paranoid.
logger.warning(
"No GCM registration id; cannot determine our network endpoint id: %s", registrationId);
return null;
}
return CommonProtos.newAndroidEndpointId(registrationId, clientKey,
context.getPackageName(), AndroidChannelConstants.CHANNEL_VERSION);
}
/** Sets the channel url to {@code url}, for tests. */
public static void setChannelUrlForTest(String url) {
channelUrlForTest = Preconditions.checkNotNull(url);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.javaFX.packaging;
import java.io.File;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.intellij.testFramework.UsefulTestCase;
/**
* User: anna
* Date: 3/28/13
*/
public abstract class JavaFxAntTaskTest extends UsefulTestCase{
private static final String PRELOADER_CLASS = "preloaderClass";
private static final String TITLE = "title";
private static final String PRELOADER_JAR = "preloaderJar";
private static final String SIGNED = "signed";
public void testJarDeployNoInfo() throws Exception {
doTest("<fx:fileset id=\"all_but_jarDeployNoInfo\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"<exclude name=\"jarDeployNoInfo.jar\">\n" +
"</exclude>\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_jarDeployNoInfo\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"</fx:fileset>\n" +
"<fx:application id=\"jarDeployNoInfo_id\" name=\"jarDeployNoInfo\" mainClass=\"Main\">\n" +
"</fx:application>\n" +
"<fx:jar destfile=\"temp/jarDeployNoInfo.jar\">\n" +
"<fx:application refid=\"jarDeployNoInfo_id\">\n" +
"</fx:application>\n" +
"<fileset dir=\"temp\" excludes=\"**/*.jar\">\n" +
"</fileset>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_but_jarDeployNoInfo\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:jar>\n" +
"<fx:deploy width=\"800\" height=\"400\" updatemode=\"background\" outdir=\"temp/deploy\" outfile=\"jarDeployNoInfo\">\n" +
"<fx:application refid=\"jarDeployNoInfo_id\">\n" +
"</fx:application>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_jarDeployNoInfo\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:deploy>\n", Collections.<String, String>emptyMap());
}
public void testJarDeployTitle() throws Exception {
doTest("<fx:fileset id=\"all_but_jarDeployTitle\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"<exclude name=\"jarDeployTitle.jar\">\n" +
"</exclude>\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_jarDeployTitle\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"</fx:fileset>\n" +
"<fx:application id=\"jarDeployTitle_id\" name=\"jarDeployTitle\" mainClass=\"Main\">\n" +
"</fx:application>\n" +
"<fx:jar destfile=\"temp/jarDeployTitle.jar\">\n" +
"<fx:application refid=\"jarDeployTitle_id\">\n" +
"</fx:application>\n" +
"<fileset dir=\"temp\" excludes=\"**/*.jar\">\n" +
"</fileset>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_but_jarDeployTitle\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:jar>\n" +
"<fx:deploy width=\"800\" height=\"400\" updatemode=\"background\" outdir=\"temp/deploy\" outfile=\"jarDeployTitle\">\n" +
"<fx:application refid=\"jarDeployTitle_id\">\n" +
"</fx:application>\n" +
"<fx:info title=\"My App\">\n" +
"</fx:info>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_jarDeployTitle\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:deploy>\n", Collections.singletonMap(TITLE, "My App"));
}
public void testJarDeploySigned() throws Exception {
doTest("<fx:fileset id=\"all_but_jarDeploySigned\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"<exclude name=\"jarDeploySigned.jar\">\n" +
"</exclude>\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_jarDeploySigned\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"</fx:fileset>\n" +
"<fx:application id=\"jarDeploySigned_id\" name=\"jarDeploySigned\" mainClass=\"Main\">\n" +
"</fx:application>\n" +
"<fx:jar destfile=\"temp" + File.separator + "jarDeploySigned.jar\">\n" +
"<fx:application refid=\"jarDeploySigned_id\">\n" +
"</fx:application>\n" +
"<fileset dir=\"temp\" excludes=\"**/*.jar\">\n" +
"</fileset>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_but_jarDeploySigned\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:jar>\n" +
"<fx:deploy width=\"800\" height=\"400\" updatemode=\"background\" outdir=\"temp" + File.separator + "deploy\" outfile=\"jarDeploySigned\">\n" +
"<fx:permissions elevated=\"true\">\n" +
"</fx:permissions>\n" +
"<fx:application refid=\"jarDeploySigned_id\">\n" +
"</fx:application>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"all_jarDeploySigned\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:deploy>\n", Collections.singletonMap(SIGNED, "true"));
}
public void testJarDeployPreloader() throws Exception {
final HashMap<String, String> options = new HashMap<String, String>();
options.put(PRELOADER_CLASS, "MyPreloader");
options.put(PRELOADER_JAR, "preloader.jar");
doTest("<fx:fileset id=\"jarDeployPreloader_preloader_files\" requiredFor=\"preloader\" dir=\"temp\" includes=\"preloader.jar\">\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_but_preloader_jarDeployPreloader\" dir=\"temp\" excludes=\"preloader.jar\" includes=\"**/*.jar\">\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_but_jarDeployPreloader\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"<exclude name=\"jarDeployPreloader.jar\">\n" +
"</exclude>\n" +
"<exclude name=\"preloader.jar\">\n" +
"</exclude>\n" +
"</fx:fileset>\n" +
"<fx:fileset id=\"all_jarDeployPreloader\" dir=\"temp\" includes=\"**/*.jar\">\n" +
"</fx:fileset>\n" +
"<fx:application id=\"jarDeployPreloader_id\" name=\"jarDeployPreloader\" mainClass=\"Main\" preloaderClass=\"MyPreloader\">\n" +
"</fx:application>\n" +
"<fx:jar destfile=\"temp/jarDeployPreloader.jar\">\n" +
"<fx:application refid=\"jarDeployPreloader_id\">\n" +
"</fx:application>\n" +
"<fileset dir=\"temp\" excludes=\"**/*.jar\">\n" +
"</fileset>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"jarDeployPreloader_preloader_files\">\n" +
"</fx:fileset>\n" +
"<fx:fileset refid=\"all_but_jarDeployPreloader\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:jar>\n" +
"<fx:deploy width=\"800\" height=\"400\" updatemode=\"background\" outdir=\"temp/deploy\" outfile=\"jarDeployPreloader\">\n" +
"<fx:application refid=\"jarDeployPreloader_id\">\n" +
"</fx:application>\n" +
"<fx:resources>\n" +
"<fx:fileset refid=\"jarDeployPreloader_preloader_files\">\n" +
"</fx:fileset>\n" +
"<fx:fileset refid=\"all_but_preloader_jarDeployPreloader\">\n" +
"</fx:fileset>\n" +
"</fx:resources>\n" +
"</fx:deploy>\n", options);
}
private void doTest(final String expected, Map<String, String> options) {
final String artifactName = getTestName(true);
final String artifactFileName = artifactName + ".jar";
final MockJavaFxPackager packager = new MockJavaFxPackager(artifactName + File.separator + artifactFileName);
final String title = options.get(TITLE);
if (title != null) {
packager.setTitle(title);
}
final String preloaderClass = options.get(PRELOADER_CLASS);
if (preloaderClass != null) {
packager.setPreloaderClass(preloaderClass);
}
final String preloaderJar = options.get(PRELOADER_JAR);
if (preloaderJar != null) {
packager.setPreloaderJar(preloaderJar);
}
if (options.containsKey(SIGNED)) {
packager.setSigned(true);
}
final List<JavaFxAntGenerator.SimpleTag> temp = JavaFxAntGenerator
.createJarAndDeployTasks(packager, artifactFileName, artifactName, "temp");
final StringBuilder buf = new StringBuilder();
for (JavaFxAntGenerator.SimpleTag tag : temp) {
tag.generate(buf);
}
assertEquals(expected
.replaceAll("temp/deploy", "temp\\" + File.separator + "deploy")
.replaceAll("temp/" + artifactFileName, "temp\\" + File.separator + artifactFileName),
buf.toString());
}
private static class MockJavaFxPackager extends AbstractJavaFxPackager {
private String myOutputPath;
private String myTitle;
private String myVendor;
private String myDescription;
private String myHtmlParams;
private String myParams;
private String myPreloaderClass;
private String myPreloaderJar;
private boolean myConvertCss2Bin;
private boolean mySigned;
private MockJavaFxPackager(String outputPath) {
myOutputPath = outputPath;
}
private void setTitle(String title) {
myTitle = title;
}
private void setVendor(String vendor) {
myVendor = vendor;
}
private void setDescription(String description) {
myDescription = description;
}
private void setHtmlParams(String htmlParams) {
myHtmlParams = htmlParams;
}
private void setParams(String params) {
myParams = params;
}
private void setPreloaderClass(String preloaderClass) {
myPreloaderClass = preloaderClass;
}
private void setPreloaderJar(String preloaderJar) {
myPreloaderJar = preloaderJar;
}
public void setSigned(boolean signed) {
mySigned = signed;
}
@Override
protected String getArtifactName() {
return getArtifactRootName();
}
@Override
protected String getArtifactOutputPath() {
return new File(myOutputPath).getParent();
}
@Override
protected String getArtifactOutputFilePath() {
return myOutputPath;
}
@Override
protected String getAppClass() {
return "Main";
}
@Override
protected String getTitle() {
return myTitle;
}
@Override
protected String getVendor() {
return myVendor;
}
@Override
protected String getDescription() {
return myDescription;
}
@Override
protected String getWidth() {
return "800";
}
@Override
protected String getHeight() {
return "400";
}
@Override
protected String getHtmlParamFile() {
return myHtmlParams;
}
@Override
protected String getParamFile() {
return myParams;
}
@Override
protected String getUpdateMode() {
return JavaFxPackagerConstants.UPDATE_MODE_BACKGROUND;
}
@Override
protected JavaFxPackagerConstants.NativeBundles getNativeBundle() {
return JavaFxPackagerConstants.NativeBundles.none;
}
@Override
protected void registerJavaFxPackagerError(String message) {
}
@Override
public String getKeypass() {
return null;
}
@Override
public String getStorepass() {
return null;
}
@Override
public String getKeystore() {
return null;
}
@Override
public String getAlias() {
return null;
}
@Override
public boolean isSelfSigning() {
return true;
}
@Override
public boolean isEnabledSigning() {
return mySigned;
}
@Override
public String getPreloaderClass() {
return myPreloaderClass;
}
@Override
public String getPreloaderJar() {
return myPreloaderJar;
}
@Override
public boolean convertCss2Bin() {
return myConvertCss2Bin;
}
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.provider.description.interestrate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.forex.method.FXMatrix;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.instrument.index.IndexON;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.ForwardSensitivity;
import com.opengamma.analytics.math.curve.DoublesCurve;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
import com.opengamma.util.tuple.DoublesPair;
/**
* Class describing a "market" with discounting and forward curves.
* The forward rate are computed directly.
*/
public class MulticurveProviderForward implements MulticurveProviderInterface {
/**
* A map with one (discounting) curve by currency.
*/
private final Map<Currency, YieldAndDiscountCurve> _discountingCurves;
/**
* A map with one (forward) curve by ON index.
*/
private final Map<IndexON, YieldAndDiscountCurve> _forwardONCurves;
/**
* A map with one (forward) curve by Ibor/OIS index.
*/
// TODO: Replace the DoublesCurve by a more flexible object, similar to yieldAndDiscountCurve
private final Map<IborIndex, DoublesCurve> _forwardIborCurves;
/**
* The matrix containing the exchange rates.
*/
private final FXMatrix _fxMatrix;
/**
* Map of all curves used in the provider.
*/
private Map<String, Object> _allCurves;
/**
* Constructor with empty maps for discounting, forward and price index.
*/
public MulticurveProviderForward() {
// TODO: Do we need a LinkedHashMap or a more efficient Map could be used?
_discountingCurves = new HashMap<>();
_forwardIborCurves = new HashMap<>();
_forwardONCurves = new LinkedHashMap<>();
_fxMatrix = new FXMatrix();
setAllCurves();
}
/**
* Constructor with empty maps for discounting, forward and price index.
* @param fxMatrix The FXMatrix.
*/
public MulticurveProviderForward(final FXMatrix fxMatrix) {
_discountingCurves = new HashMap<>();
_forwardIborCurves = new HashMap<>();
_forwardONCurves = new LinkedHashMap<>();
_fxMatrix = fxMatrix;
setAllCurves();
}
/**
* Constructor from an existing market. The given market maps are used for the new market (the same maps are used, not copied).
* @param discountingCurves A map with one (discounting) curve by currency.
* @param forwardIborCurves A map with one (forward) curve by Ibor index.
* @param forwardONCurves A map with one (forward) curve by ON index.
* @param fxMatrix The FXMatrix.
*/
public MulticurveProviderForward(final Map<Currency, YieldAndDiscountCurve> discountingCurves, final Map<IborIndex, DoublesCurve> forwardIborCurves,
final Map<IndexON, YieldAndDiscountCurve> forwardONCurves, final FXMatrix fxMatrix) {
_discountingCurves = discountingCurves;
_forwardIborCurves = forwardIborCurves;
_forwardONCurves = forwardONCurves;
_fxMatrix = fxMatrix;
setAllCurves();
}
/**
* Constructor from an existing market. The given market maps are used for the new market (the same maps are used, not copied).
* @param market The existing market.
*/
public MulticurveProviderForward(final MulticurveProviderForward market) {
_discountingCurves = market._discountingCurves;
_forwardIborCurves = market._forwardIborCurves;
_forwardONCurves = market._forwardONCurves;
_fxMatrix = market._fxMatrix;
setAllCurves();
}
@Override
public MulticurveProviderInterface getMulticurveProvider() {
return this;
}
@Override
public MulticurveProviderForward copy() {
final LinkedHashMap<Currency, YieldAndDiscountCurve> discountingCurves = new LinkedHashMap<>(_discountingCurves);
final LinkedHashMap<IborIndex, DoublesCurve> forwardIborCurves = new LinkedHashMap<>(_forwardIborCurves);
final LinkedHashMap<IndexON, YieldAndDiscountCurve> forwardONCurves = new LinkedHashMap<>(_forwardONCurves);
final FXMatrix fxMatrix = new FXMatrix(_fxMatrix);
return new MulticurveProviderForward(discountingCurves, forwardIborCurves, forwardONCurves, fxMatrix);
}
/**
* Adds all curves to a single map.
*/
private void setAllCurves() {
_allCurves = new LinkedHashMap<>();
final Set<Currency> ccySet = _discountingCurves.keySet();
for (final Currency ccy : ccySet) {
final String name = _discountingCurves.get(ccy).getName();
_allCurves.put(name, _discountingCurves.get(ccy));
}
final Set<IborIndex> indexSet = _forwardIborCurves.keySet();
for (final IborIndex index : indexSet) {
final String name = _forwardIborCurves.get(index).getName();
_allCurves.put(name, _forwardIborCurves.get(index));
}
final Set<IndexON> indexONSet = _forwardONCurves.keySet();
for (final IndexON index : indexONSet) {
final String name = _forwardONCurves.get(index).getName();
_allCurves.put(name, _forwardONCurves.get(index));
}
}
@Override
public double[] parameterSensitivity(final String name, final List<DoublesPair> pointSensitivity) {
final Object curveObject = _allCurves.get(name);
ArgumentChecker.isTrue(curveObject instanceof YieldAndDiscountCurve, "Curve not a YieldAndDiscountCurve, can not compute sensitivity");
final YieldAndDiscountCurve curve = (YieldAndDiscountCurve) curveObject;
final int nbParameters = curve.getNumberOfParameters();
final double[] result = new double[nbParameters];
if (pointSensitivity != null && pointSensitivity.size() > 0) {
for (final DoublesPair timeAndS : pointSensitivity) {
final double[] sensi1Point = curve.getInterestRateParameterSensitivity(timeAndS.getFirst());
for (int loopparam = 0; loopparam < nbParameters; loopparam++) {
result[loopparam] += timeAndS.getSecond() * sensi1Point[loopparam];
}
}
}
return result;
}
@Override
public double[] parameterForwardSensitivity(final String name, final List<ForwardSensitivity> pointSensitivity) {
final Object curveObject = _allCurves.get(name);
if (curveObject instanceof YieldAndDiscountCurve) {
final YieldAndDiscountCurve curve = (YieldAndDiscountCurve) curveObject;
final int nbParameters = curve.getNumberOfParameters();
final double[] result = new double[nbParameters];
if (pointSensitivity != null && pointSensitivity.size() > 0) {
for (final ForwardSensitivity timeAndS : pointSensitivity) {
final double startTime = timeAndS.getStartTime();
final double endTime = timeAndS.getEndTime();
final double accrualFactor = timeAndS.getAccrualFactor();
final double forwardBar = timeAndS.getValue();
// Implementation note: only the sensitivity to the forward is available. The sensitivity to the pseudo-discount factors need to be computed.
final double dfForwardStart = curve.getDiscountFactor(startTime);
final double dfForwardEnd = curve.getDiscountFactor(endTime);
final double dFwddyStart = -startTime * dfForwardStart / (dfForwardEnd * accrualFactor);
final double dFwddyEnd = endTime * dfForwardStart / (dfForwardEnd * accrualFactor);
final double[] sensiPtStart = curve.getInterestRateParameterSensitivity(startTime);
final double[] sensiPtEnd = curve.getInterestRateParameterSensitivity(endTime);
for (int loopparam = 0; loopparam < nbParameters; loopparam++) {
result[loopparam] += dFwddyStart * sensiPtStart[loopparam] * forwardBar;
result[loopparam] += dFwddyEnd * sensiPtEnd[loopparam] * forwardBar;
}
}
}
return result;
}
ArgumentChecker.isTrue(curveObject instanceof DoublesCurve, "Curve not a DoublesCurve, can not computed sensitivity");
final DoublesCurve curve = (DoublesCurve) curveObject;
final int nbParameters = curve.size();
final double[] result = new double[nbParameters];
if (pointSensitivity != null && pointSensitivity.size() > 0) {
for (final ForwardSensitivity timeAndS : pointSensitivity) {
final Double[] sensiPtStart = curve.getYValueParameterSensitivity(timeAndS.getStartTime());
// Implementation note: the forward rate are indexed by the start date.
for (int loopparam = 0; loopparam < nbParameters; loopparam++) {
result[loopparam] += timeAndS.getValue() * sensiPtStart[loopparam];
}
}
}
return result;
}
@Override
public Integer getNumberOfParameters(final String name) {
final Object curveObject = _allCurves.get(name);
if (curveObject instanceof YieldAndDiscountCurve) {
return ((YieldAndDiscountCurve) curveObject).getNumberOfParameters();
}
ArgumentChecker.isTrue(curveObject instanceof DoublesCurve, "Curve not a DoublesCurve; cannot get number of parameters");
return ((DoublesCurve) curveObject).size();
}
@Override
public List<String> getUnderlyingCurvesNames(final String name) {
final Object curveObject = _allCurves.get(name);
if (curveObject instanceof YieldAndDiscountCurve) {
return ((YieldAndDiscountCurve) curveObject).getUnderlyingCurvesNames();
}
//REVIEW emcleod 7-8-2013 What is the purpose of this?
ArgumentChecker.isTrue(curveObject instanceof DoublesCurve, "Curve not a DoublesCurve");
final List<String> list = new ArrayList<>();
list.add(name);
return list;
}
@Override
public double getDiscountFactor(final Currency ccy, final Double time) {
if (_discountingCurves.containsKey(ccy)) {
return _discountingCurves.get(ccy).getDiscountFactor(time);
}
throw new IllegalArgumentException("Currency discounting curve not found: " + ccy);
}
@Override
public String getName(final Currency ccy) {
if (_discountingCurves.containsKey(ccy)) {
return _discountingCurves.get(ccy).getName();
}
throw new IllegalArgumentException("Currency discounting curve not found: " + ccy);
}
@Override
public Set<Currency> getCurrencies() {
return _discountingCurves.keySet();
}
@Override
public double getInvestmentFactor(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
if (_forwardIborCurves.containsKey(index)) {
return 1 + accrualFactor * _forwardIborCurves.get(index).getYValue(startTime);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public double getSimplyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index).getYValue(startTime);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public double getSimplyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index).getYValue(startTime);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public double getAnnuallyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime, final double accrualFactor) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index).getYValue(startTime);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public double getAnnuallyCompoundForwardRate(final IborIndex index, final double startTime, final double endTime) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index).getYValue(startTime);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public String getName(final IborIndex index) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index).getName();
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public Set<IborIndex> getIndexesIbor() {
return _forwardIborCurves.keySet();
}
@Override
public double getInvestmentFactor(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
if (_forwardONCurves.containsKey(index)) {
return _forwardONCurves.get(index).getDiscountFactor(startTime) / _forwardONCurves.get(index).getDiscountFactor(endTime);
}
throw new IllegalArgumentException("Forward ON curve not found: " + index);
}
@Override
public double getSimplyCompoundForwardRate(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
if (_forwardONCurves.containsKey(index)) {
return (_forwardONCurves.get(index).getDiscountFactor(startTime) / _forwardONCurves.get(index).getDiscountFactor(endTime) - 1) / accrualFactor;
}
throw new IllegalArgumentException("Forward ON curve not found: " + index);
}
@Override
public double getSimplyCompoundForwardRate(final IndexON index, final double startTime, final double endTime) {
ArgumentChecker.isFalse(startTime == endTime, "sart time should be different from end time");
if (_forwardONCurves.containsKey(index)) {
return (_forwardONCurves.get(index).getDiscountFactor(startTime) / _forwardONCurves.get(index).getDiscountFactor(endTime) - 1) / (endTime - startTime);
}
throw new IllegalArgumentException("Forward ON curve not found: " + index);
}
@Override
public double getAnnuallyCompoundForwardRate(final IndexON index, final double startTime, final double endTime, final double accrualFactor) {
ArgumentChecker.isFalse(accrualFactor == 0.0, "The accrual factor can't be null");
if (_forwardONCurves.containsKey(index)) {
return (Math.pow(_forwardONCurves.get(index).getDiscountFactor(startTime) / _forwardONCurves.get(index).getDiscountFactor(endTime), 1 / accrualFactor) - 1);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public double getAnnuallyCompoundForwardRate(final IndexON index, final double startTime, final double endTime) {
ArgumentChecker.isFalse(startTime == endTime, "Start time should be different from end time");
final double accrualFactor = endTime - startTime;
return getAnnuallyCompoundForwardRate(index, startTime, endTime, accrualFactor);
}
@Override
public String getName(final IndexON index) {
if (_forwardONCurves.containsKey(index)) {
return _forwardONCurves.get(index).getName();
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
@Override
public Set<IndexON> getIndexesON() {
return _forwardONCurves.keySet();
}
/**
* Sets the discounting curve for a given currency.
* @param ccy The currency.
* @param curve The yield curve used for discounting.
*/
public void setCurve(final Currency ccy, final YieldAndDiscountCurve curve) {
ArgumentChecker.notNull(ccy, "currency");
ArgumentChecker.notNull(curve, "curve");
if (_discountingCurves.containsKey(ccy)) {
throw new IllegalArgumentException("Currency discounting curve already set: " + ccy.toString());
}
_discountingCurves.put(ccy, curve);
setAllCurves();
}
/**
* Sets the curve associated to an ON index.
* @param index The index.
* @param curve The curve.
*/
public void setCurve(final IndexON index, final YieldAndDiscountCurve curve) {
ArgumentChecker.notNull(index, "index");
ArgumentChecker.notNull(curve, "curve");
if (_forwardONCurves.containsKey(index)) {
throw new IllegalArgumentException("ON index forward curve already set: " + index.toString());
}
_forwardONCurves.put(index, curve);
setAllCurves();
}
/**
* Sets the curve associated to an Ibor index.
* @param index The index.
* @param curve The curve.
*/
public void setCurve(final IborIndex index, final DoublesCurve curve) {
ArgumentChecker.notNull(index, "index");
ArgumentChecker.notNull(curve, "curve");
if (_forwardIborCurves.containsKey(index)) {
throw new IllegalArgumentException("Ibor index forward curve already set: " + index.toString());
}
_forwardIborCurves.put(index, curve);
setAllCurves();
}
/**
* Set all the curves contains in another bundle. If a currency or index is already present in the map, the associated curve is changed.
* @param other The other bundle.
*/
// TODO: REVIEW: Should we check that the curve are already present?
public void setAll(final MulticurveProviderForward other) {
ArgumentChecker.notNull(other, "Market bundle");
_discountingCurves.putAll(other._discountingCurves);
_forwardIborCurves.putAll(other._forwardIborCurves);
_forwardONCurves.putAll(other._forwardONCurves);
setAllCurves();
}
@Override
public Set<String> getAllNames() {
return getAllCurveNames();
}
@Override
public Set<String> getAllCurveNames() {
return Collections.unmodifiableSortedSet(new TreeSet<>(_allCurves.keySet()));
}
/**
* Gets the discounting curve associated in a given currency in the market.
* @param ccy The currency.
* @return The curve.
*/
public YieldAndDiscountCurve getCurve(final Currency ccy) {
if (_discountingCurves.containsKey(ccy)) {
return _discountingCurves.get(ccy);
}
throw new IllegalArgumentException("Currency discounting curve not found: " + ccy);
}
/**
* Gets the forward curve associated to a given Ibor index in the market.
* @param index The Ibor index.
* @return The curve.
*/
public DoublesCurve getCurve(final IborIndex index) {
if (_forwardIborCurves.containsKey(index)) {
return _forwardIborCurves.get(index);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
/**
* Gets the forward curve associated to a given ON index in the market.
* @param index The ON index.
* @return The curve.
*/
public YieldAndDiscountCurve getCurve(final IndexON index) {
if (_forwardONCurves.containsKey(index)) {
return _forwardONCurves.get(index);
}
throw new IllegalArgumentException("Forward curve not found: " + index);
}
/**
* Replaces the discounting curve for a given currency.
* @param ccy The currency.
* @param curve The yield curve used for discounting.
* @throws IllegalArgumentException if curve name NOT already present
*/
public void replaceCurve(final Currency ccy, final YieldAndDiscountCurve curve) {
ArgumentChecker.notNull(ccy, "Currency");
ArgumentChecker.notNull(curve, "curve");
if (!_discountingCurves.containsKey(ccy)) {
throw new IllegalArgumentException("Currency discounting curve not in set: " + ccy);
}
_discountingCurves.put(ccy, curve);
setAllCurves();
}
/**
* Replaces the forward curve for a given index.
* @param index The index.
* @param curve The yield curve used for forward.
* @throws IllegalArgumentException if curve name NOT already present
*/
public void replaceCurve(final IborIndex index, final DoublesCurve curve) {
ArgumentChecker.notNull(index, "Index");
ArgumentChecker.notNull(curve, "curve");
if (!_forwardIborCurves.containsKey(index)) {
throw new IllegalArgumentException("Forward curve not in set: " + index);
}
_forwardIborCurves.put(index, curve);
setAllCurves();
}
@Override
public double getFxRate(final Currency ccy1, final Currency ccy2) {
return _fxMatrix.getFxRate(ccy1, ccy2);
}
/**
* Gets the underlying FXMatrix containing the exchange rates.
* @return The matrix.
*/
@Override
public FXMatrix getFxRates() {
return _fxMatrix;
}
/**
* Returns an unmodifiable copy of the currency to discounting curves map.
* @return The discounting curve map
*/
public Map<Currency, YieldAndDiscountCurve> getDiscountingCurves() {
return Collections.unmodifiableMap(_discountingCurves);
}
/**
* Returns an unmodifiable copy of the ibor index to forward curves map.
* @return The forward ibor curve map
*/
public Map<IborIndex, DoublesCurve> getForwardIborCurves() {
return Collections.unmodifiableMap(_forwardIborCurves);
}
/**
* Returns an unmodifiable copy of the overnight index to forward curves map.
* @return The forward overnight curve map
*/
public Map<IndexON, YieldAndDiscountCurve> getForwardONCurves() {
return Collections.unmodifiableMap(_forwardONCurves);
}
/**
* Replaces a discounting curve for a currency.
* @param ccy The currency
* @param replacement The replacement curve
* @return A new provider with the supplied discounting curve
*/
public MulticurveProviderForward withDiscountFactor(final Currency ccy, final YieldAndDiscountCurve replacement) {
// REVIEW: Is this too slow for the pricing of cash-flows?
final Map<Currency, YieldAndDiscountCurve> newDiscountCurves = new LinkedHashMap<>(_discountingCurves);
newDiscountCurves.put(ccy, replacement); //TODO think about ccy not existing in current map
final MulticurveProviderForward decorated = new MulticurveProviderForward(newDiscountCurves, _forwardIborCurves, _forwardONCurves, _fxMatrix);
return decorated;
}
/**
* Replaces an ibor curve for an index.
* @param index The index
* @param replacement The replacement curve
* @return A new provider with the supplied ibor curve
*/
public MulticurveProviderForward withForward(final IborIndex index, final DoublesCurve replacement) {
final Map<IborIndex, DoublesCurve> newForwardCurves = new LinkedHashMap<>(_forwardIborCurves);
newForwardCurves.put(index, replacement);
final MulticurveProviderForward decorated = new MulticurveProviderForward(_discountingCurves, newForwardCurves, _forwardONCurves, _fxMatrix);
return decorated;
}
/**
* Replaces an overnight curve for an index.
* @param index The index
* @param replacement The replacement curve
* @return A new provider with the supplied overnight curve
*/
public MulticurveProviderForward withForward(final IndexON index, final YieldAndDiscountCurve replacement) {
final Map<IndexON, YieldAndDiscountCurve> newForwardCurves = new LinkedHashMap<>(_forwardONCurves);
newForwardCurves.put(index, replacement);
final MulticurveProviderForward decorated = new MulticurveProviderForward(_discountingCurves, _forwardIborCurves, newForwardCurves, _fxMatrix);
return decorated;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _discountingCurves.hashCode();
result = prime * result + _forwardIborCurves.hashCode();
result = prime * result + _forwardONCurves.hashCode();
result = prime * result + _fxMatrix.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof MulticurveProviderForward)) {
return false;
}
final MulticurveProviderForward other = (MulticurveProviderForward) obj;
if (!ObjectUtils.equals(_discountingCurves, other._discountingCurves)) {
return false;
}
if (!ObjectUtils.equals(_forwardIborCurves, other._forwardIborCurves)) {
return false;
}
if (!ObjectUtils.equals(_forwardONCurves, other._forwardONCurves)) {
return false;
}
if (!ObjectUtils.equals(_fxMatrix, other._fxMatrix)) {
return false;
}
return true;
}
}
| |
package com.fsck.k9.mail.store.imap;
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import android.net.ConnectivityManager;
import android.util.Log;
import com.fsck.k9.mail.AuthType;
import com.fsck.k9.mail.ConnectionSecurity;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.K9MailLib;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.NetworkType;
import com.fsck.k9.mail.PushReceiver;
import com.fsck.k9.mail.Pusher;
import com.fsck.k9.mail.ServerSettings;
import com.fsck.k9.mail.ssl.TrustedSocketFactory;
import com.fsck.k9.mail.store.RemoteStore;
import com.fsck.k9.mail.store.StoreConfig;
import static com.fsck.k9.mail.K9MailLib.LOG_TAG;
/**
* <pre>
* TODO Need to start keeping track of UIDVALIDITY
* TODO Need a default response handler for things like folder updates
* </pre>
*/
public class ImapStore extends RemoteStore {
private Set<Flag> permanentFlagsIndex = EnumSet.noneOf(Flag.class);
private ConnectivityManager connectivityManager;
private String host;
private int port;
private String username;
private String password;
private String clientCertificateAlias;
private ConnectionSecurity connectionSecurity;
private AuthType authType;
private String pathPrefix;
private String combinedPrefix = null;
private String pathDelimiter = null;
private final LinkedList<ImapConnection> connections = new LinkedList<ImapConnection>();
private FolderNameCodec folderNameCodec;
/**
* Cache of ImapFolder objects. ImapFolders are attached to a given folder on the server
* and as long as their associated connection remains open they are reusable between
* requests. This cache lets us make sure we always reuse, if possible, for a given
* folder name.
*/
private final Map<String, ImapFolder> folderCache = new HashMap<String, ImapFolder>();
public static ImapStoreSettings decodeUri(String uri) {
return ImapStoreUriDecoder.decode(uri);
}
public static String createUri(ServerSettings server) {
return ImapStoreUriCreator.create(server);
}
public ImapStore(StoreConfig storeConfig, TrustedSocketFactory trustedSocketFactory,
ConnectivityManager connectivityManager) throws MessagingException {
super(storeConfig, trustedSocketFactory);
ImapStoreSettings settings;
try {
settings = decodeUri(storeConfig.getStoreUri());
} catch (IllegalArgumentException e) {
throw new MessagingException("Error while decoding store URI", e);
}
host = settings.host;
port = settings.port;
connectionSecurity = settings.connectionSecurity;
this.connectivityManager = connectivityManager;
authType = settings.authenticationType;
username = settings.username;
password = settings.password;
if (username == null) {
throw new MessagingException("username not set");
}
if (password == null) {
throw new MessagingException("password not set");
}
clientCertificateAlias = settings.clientCertificateAlias;
// Make extra sure pathPrefix is null if "auto-detect namespace" is configured
pathPrefix = (settings.autoDetectNamespace) ? null : settings.pathPrefix;
folderNameCodec = FolderNameCodec.newInstance();
}
@Override
public ImapFolder getFolder(String name) {
ImapFolder folder;
synchronized (folderCache) {
folder = folderCache.get(name);
if (folder == null) {
folder = new ImapFolder(this, name);
folderCache.put(name, folder);
}
}
return folder;
}
String getCombinedPrefix() {
if (combinedPrefix == null) {
if (pathPrefix != null) {
String tmpPrefix = pathPrefix.trim();
String tmpDelim = (pathDelimiter != null ? pathDelimiter.trim() : "");
if (tmpPrefix.endsWith(tmpDelim)) {
combinedPrefix = tmpPrefix;
} else if (tmpPrefix.length() > 0) {
combinedPrefix = tmpPrefix + tmpDelim;
} else {
combinedPrefix = "";
}
} else {
combinedPrefix = "";
}
}
return combinedPrefix;
}
@Override
public List<ImapFolder> getPersonalNamespaces(boolean forceListAll) throws MessagingException {
ImapConnection connection = getConnection();
try {
Set<String> folderNames = listFolders(connection, false);
if (forceListAll || !mStoreConfig.subscribedFoldersOnly()) {
return getFolders(folderNames);
}
Set<String> subscribedFolders = listFolders(connection, true);
folderNames.retainAll(subscribedFolders);
return getFolders(folderNames);
} catch (IOException | MessagingException ioe) {
connection.close();
throw new MessagingException("Unable to get folder list.", ioe);
} finally {
releaseConnection(connection);
}
}
private Set<String> listFolders(ImapConnection connection, boolean subscribedOnly) throws IOException,
MessagingException {
String commandResponse = subscribedOnly ? "LSUB" : "LIST";
List<ImapResponse> responses =
connection.executeSimpleCommand(String.format("%s \"\" %s", commandResponse,
ImapUtility.encodeString(getCombinedPrefix() + "*")));
List<ListResponse> listResponses = (subscribedOnly) ?
ListResponse.parseLsub(responses) : ListResponse.parseList(responses);
Set<String> folderNames = new HashSet<>(listResponses.size());
for (ListResponse listResponse : listResponses) {
boolean includeFolder = true;
String decodedFolderName;
try {
decodedFolderName = folderNameCodec.decode(listResponse.getName());
} catch (CharacterCodingException e) {
Log.w(LOG_TAG, "Folder name not correctly encoded with the UTF-7 variant " +
"as defined by RFC 3501: " + listResponse.getName(), e);
//TODO: Use the raw name returned by the server for all commands that require
// a folder name. Use the decoded name only for showing it to the user.
// We currently just skip folders with malformed names.
continue;
}
String folder = decodedFolderName;
if (pathDelimiter == null) {
pathDelimiter = listResponse.getHierarchyDelimiter();
combinedPrefix = null;
}
if (folder.equalsIgnoreCase(mStoreConfig.getInboxFolderName())) {
continue;
} else if (folder.equals(mStoreConfig.getOutboxFolderName())) {
/*
* There is a folder on the server with the same name as our local
* outbox. Until we have a good plan to deal with this situation
* we simply ignore the folder on the server.
*/
continue;
} else {
int prefixLength = getCombinedPrefix().length();
if (prefixLength > 0) {
// Strip prefix from the folder name
if (folder.length() >= prefixLength) {
folder = folder.substring(prefixLength);
}
if (!decodedFolderName.equalsIgnoreCase(getCombinedPrefix() + folder)) {
includeFolder = false;
}
}
}
if (listResponse.hasAttribute("\\NoSelect")) {
includeFolder = false;
}
if (includeFolder) {
folderNames.add(folder);
}
}
folderNames.add(mStoreConfig.getInboxFolderName());
return folderNames;
}
void autoconfigureFolders(final ImapConnection connection) throws IOException, MessagingException {
if (!connection.hasCapability(Capabilities.SPECIAL_USE)) {
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "No detected folder auto-configuration methods.");
}
return;
}
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration: Using RFC6154/SPECIAL-USE.");
}
String command = String.format("LIST (SPECIAL-USE) \"\" %s", ImapUtility.encodeString(getCombinedPrefix() + "*"));
List<ImapResponse> responses = connection.executeSimpleCommand(command);
List<ListResponse> listResponses = ListResponse.parseList(responses);
for (ListResponse listResponse : listResponses) {
String decodedFolderName;
try {
decodedFolderName = folderNameCodec.decode(listResponse.getName());
} catch (CharacterCodingException e) {
Log.w(LOG_TAG, "Folder name not correctly encoded with the UTF-7 variant " +
"as defined by RFC 3501: " + listResponse.getName(), e);
// We currently just skip folders with malformed names.
continue;
}
if (pathDelimiter == null) {
pathDelimiter = listResponse.getHierarchyDelimiter();
combinedPrefix = null;
}
if (listResponse.hasAttribute("\\Archive") || listResponse.hasAttribute("\\All")) {
mStoreConfig.setArchiveFolderName(decodedFolderName);
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration detected Archive folder: " + decodedFolderName);
}
} else if (listResponse.hasAttribute("\\Drafts")) {
mStoreConfig.setDraftsFolderName(decodedFolderName);
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration detected Drafts folder: " + decodedFolderName);
}
} else if (listResponse.hasAttribute("\\Sent")) {
mStoreConfig.setSentFolderName(decodedFolderName);
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration detected Sent folder: " + decodedFolderName);
}
} else if (listResponse.hasAttribute("\\Junk")) {
mStoreConfig.setSpamFolderName(decodedFolderName);
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration detected Spam folder: " + decodedFolderName);
}
} else if (listResponse.hasAttribute("\\Trash")) {
mStoreConfig.setTrashFolderName(decodedFolderName);
if (K9MailLib.isDebug()) {
Log.d(LOG_TAG, "Folder auto-configuration detected Trash folder: " + decodedFolderName);
}
}
}
}
@Override
public void checkSettings() throws MessagingException {
try {
ImapConnection connection = createImapConnection();
connection.open();
autoconfigureFolders(connection);
connection.close();
} catch (IOException ioe) {
throw new MessagingException("Unable to connect", ioe);
}
}
ImapConnection getConnection() throws MessagingException {
ImapConnection connection;
while ((connection = pollConnection()) != null) {
try {
connection.executeSimpleCommand(Commands.NOOP);
break;
} catch (IOException ioe) {
connection.close();
}
}
if (connection == null) {
connection = createImapConnection();
}
return connection;
}
private ImapConnection pollConnection() {
synchronized (connections) {
return connections.poll();
}
}
void releaseConnection(ImapConnection connection) {
if (connection != null && connection.isConnected()) {
synchronized (connections) {
connections.offer(connection);
}
}
}
ImapConnection createImapConnection() {
return new ImapConnection(new StoreImapSettings(), mTrustedSocketFactory, connectivityManager);
}
FolderNameCodec getFolderNameCodec() {
return folderNameCodec;
}
private List<ImapFolder> getFolders(Collection<String> folderNames) {
List<ImapFolder> folders = new ArrayList<>(folderNames.size());
for (String folderName : folderNames) {
ImapFolder imapFolder = getFolder(folderName);
folders.add(imapFolder);
}
return folders;
}
@Override
public boolean isMoveCapable() {
return true;
}
@Override
public boolean isCopyCapable() {
return true;
}
@Override
public boolean isPushCapable() {
return true;
}
@Override
public boolean isExpungeCapable() {
return true;
}
StoreConfig getStoreConfig() {
return mStoreConfig;
}
Set<Flag> getPermanentFlagsIndex() {
return permanentFlagsIndex;
}
@Override
public Pusher getPusher(PushReceiver receiver) {
return new ImapPusher(this, receiver);
}
private class StoreImapSettings implements ImapSettings {
@Override
public String getHost() {
return host;
}
@Override
public int getPort() {
return port;
}
@Override
public ConnectionSecurity getConnectionSecurity() {
return connectionSecurity;
}
@Override
public AuthType getAuthType() {
return authType;
}
@Override
public String getUsername() {
return username;
}
@Override
public String getPassword() {
return password;
}
@Override
public String getClientCertificateAlias() {
return clientCertificateAlias;
}
@Override
public boolean useCompression(final NetworkType type) {
return mStoreConfig.useCompression(type);
}
@Override
public String getPathPrefix() {
return pathPrefix;
}
@Override
public void setPathPrefix(String prefix) {
pathPrefix = prefix;
}
@Override
public String getPathDelimiter() {
return pathDelimiter;
}
@Override
public void setPathDelimiter(String delimiter) {
pathDelimiter = delimiter;
}
@Override
public String getCombinedPrefix() {
return combinedPrefix;
}
@Override
public void setCombinedPrefix(String prefix) {
combinedPrefix = prefix;
}
}
}
| |
package net.strongdesign.stg;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.strongdesign.desij.CLW;
import net.strongdesign.desij.DesiJException;
import net.strongdesign.util.FileSupport;
import net.strongdesign.util.ParsingException;
public abstract class STGCreator {
public static STG getPredefinedSTG(String parameter) throws STGException {
String[] modelParam = parameter.split(":");
if (modelParam[0].equals("art")) {
String[] values;
if (modelParam.length!=2 || (values = modelParam[1].split(",")).length != 2)
throw new DesiJException(
"Model art expects number of pipelines and their length as parameters." +
" Given was: " + parameter);
try {
Integer nroPipelines = Integer.parseInt(values[0]);
Integer length = Integer.parseInt(values[1]);
return getArtJorPipeline(nroPipelines, length);
}
catch (NumberFormatException e) {
throw new DesiJException("Could not parse art parameters. Expected two numbers. " + parameter);
}
}
else if (modelParam[0].equals("seq")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model seq expects identification string." +
" Given was: " + parameter);
return getSequencer(modelParam[1], 0);
}
else if (modelParam[0].equals("par")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model par expects identification string." +
" Given was: " + parameter);
return getParalleliser(modelParam[1], 0);
}
else if (modelParam[0].equals("multipar")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model multipar expects identification string." +
" Given was: " + parameter);
return getParTree(Integer.parseInt(modelParam[1]));
}
else if (modelParam[0].equals("multiseq")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model multiseq expects identification string." +
" Given was: " + parameter);
return getSeqTree(Integer.parseInt(modelParam[1]));
}
else if (modelParam[0].equals("seqpartree")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model seqpartree expects identification string." +
" Given was: " + parameter);
return getParSeqTree(Integer.parseInt(modelParam[1]), true);
}
else if (modelParam[0].equals("parseqtree")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model parseqtree expects identification string." +
" Given was: " + parameter);
return getParSeqTree(Integer.parseInt(modelParam[1]), false);
}
else if (modelParam[0].equals("merge")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model merger expects identification string." +
" Given was: " + parameter);
return getMerger(modelParam[1], 0);
}
else if (modelParam[0].equals("mix")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model mixer expects identification string." +
" Given was: " + parameter);
return getMixer(modelParam[1], 0);
}
else if (modelParam[0].equals("multimix")) {
if (modelParam.length!=2)
throw new DesiJException(
"Model multimixer expects identification string." +
" Given was: " + parameter);
return getMixTree(Integer.parseInt(modelParam[1]));
}
else
throw new DesiJException("Unknown model name: " + modelParam[0] );
}
public static STG getArtJorPipeline(int nroPipelines, int length) throws STGException {
if (nroPipelines < 2 || length < 2)
throw new IllegalArgumentException("Need at least 2 pipelines with minimum length 2.");
STG stg = new STG();
int signal = -1;
for (int pipeline = 1; pipeline <= nroPipelines; ++pipeline) {
for (int pos = 1; pos <= length; ++pos) {
stg.setSignalName(++signal, "x_"+pipeline+"_"+pos);
}
}
for (int pipeline = 2; pipeline <= nroPipelines; ++pipeline) {
stg.setSignalName(++signal, "sincr_"+pipeline);
}
stg.setSignature(stg.getSignals(), Signature.OUTPUT);
Transition pipeConUp = null;
Transition pipeConDown = null;
List<Transition> pipeStarts = new LinkedList<Transition>();
for (int pipeline = 1; pipeline <= nroPipelines; ++pipeline) {
Transition pipeStart = stg.addTransition(
new SignalEdge(
stg.getSignalNumber("x_"+pipeline+"_1"),
EdgeDirection.UP));
pipeStarts.add(pipeStart);
if (pipeConDown != null) {
connect(pipeStart, pipeConDown, stg);
connect(pipeConUp, pipeStart, stg);
}
if (pipeline < nroPipelines) {
pipeConUp = stg.addTransition(
new SignalEdge(
stg.getSignalNumber("sincr_"+ (pipeline + 1)),
EdgeDirection.UP));
pipeConDown = stg.addTransition(
new SignalEdge(
stg.getSignalNumber("sincr_"+ (pipeline + 1)),
EdgeDirection.DOWN));
connect(pipeStart, pipeConUp, stg);
connect(pipeConDown, pipeStart, stg).setMarking(1);
}
}
int pipeline = 0;
for (Transition start : pipeStarts) {
++pipeline;
Transition lastTransition = start;
for (int p = 2; p<=length; ++p) {
Transition newTrans = stg.addTransition(
new SignalEdge(
stg.getSignalNumber("x_" + pipeline + "_" + p),
EdgeDirection.UP));
connect(lastTransition, newTrans, stg);
lastTransition = newTrans;
}
for (int p = 1; p<=length; ++p) {
Transition newTrans = stg.addTransition(
new SignalEdge(
stg.getSignalNumber("x_" + pipeline + "_" + p),
EdgeDirection.DOWN));
connect(lastTransition, newTrans, stg);
lastTransition = newTrans;
}
connect(lastTransition, start, stg).setMarking(1);
}
return stg;
}
protected static Place connect(Transition source, Transition target, STG stg) {
Place place = stg.addPlace("p", 0);
source.setChildValue(place, 1);
target.setParentValue(place, 1);
return place;
}
/**
* Generate a 4-phase handshake sequencer.<br>
* The up channel is (in,out), the down channels are (a_in,a_out) and (b_in,b_out).
* The real identifier are prefixed with id.
*
* @param id The channel name prefix
* @param signal The first signal which is used.
* @return
* @throws STGException
*/
public static STG getSequencer(String id, int signal) throws STGException {
STG stg = new STG();
// Define the signal names
final String _in = id + "in";
final String _out = id + "out";
final String _a_in = id + "a_in";
final String _a_out = id + "a_out";
final String _b_in = id + "b_in";
final String _b_out = id + "b_out";
final String _csc = id + "csc";
// Associate them to signal numbers and define the signatures
final int in = signal++;
stg.setSignalName(in, _in);
stg.setSignature(in, Signature.INPUT);
final int out = signal++;
stg.setSignalName(out, _out);
stg.setSignature(out, Signature.OUTPUT);
final int a_in = signal++;
stg.setSignalName(a_in, _a_in);
stg.setSignature(a_in, Signature.INPUT);
final int a_out = signal++;
stg.setSignalName(a_out, _a_out);
stg.setSignature(a_out, Signature.OUTPUT);
final int b_in = signal++;
stg.setSignalName(b_in, _b_in);
stg.setSignature(b_in, Signature.INPUT);
final int b_out = signal++;
stg.setSignalName(b_out, _b_out);
stg.setSignature(b_out, Signature.OUTPUT);
final int csc = signal++;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
stg.setSignalName(csc, _csc);
stg.setSignature(csc, Signature.INTERNAL);
}
// Intermediate transitions
Transition t1 = null, t2 = null;
// Should be clear from here, essentially seq is a simple cycle and we only have to remember the first transition
// start sequencer with in+
Transition first = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
// this triggers the a handshake
t1 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.UP));
connect(first, t1, stg);
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
t2 = stg.addTransition(new SignalEdge(csc, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
}
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
// this triggers the b handshake
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
t2 = stg.addTransition(new SignalEdge(csc, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
}
// complete up handshake
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1, t2, stg);
// close the cycle
connect(t2, first, stg).setMarking(1);
// clean up
stg.clearUndoStack();
return stg;
}
/**
* Generate a 4-phase handshake paralleliser.<br>
* The up channel is (in,out), the down channels are (a_in,a_out) and (b_in,b_out).
* The real identifier are prefixed with id.
*
* @param id The channel name prefix
* @param signal The first signal which is used.
* @return
* @throws STGException
*/
public static STG getParalleliser(String id, int signal) throws STGException {
STG stg = new STG();
final String _in = id + "in";
final String _out = id + "out";
final String _a_in = id + "a_in";
final String _a_out = id + "a_out";
final String _b_in = id + "b_in";
final String _b_out = id + "b_out";
final String _csc_a = id + "csc_a";
final String _csc_b = id + "csc_b";
final int in = signal++;
stg.setSignalName(in, _in);
stg.setSignature(in, Signature.INPUT);
final int out = signal++;
stg.setSignalName(out, _out);
stg.setSignature(out, Signature.OUTPUT);
final int a_in = signal++;
stg.setSignalName(a_in, _a_in);
stg.setSignature(a_in, Signature.INPUT);
final int a_out = signal++;
stg.setSignalName(a_out, _a_out);
stg.setSignature(a_out, Signature.OUTPUT);
final int b_in = signal++;
stg.setSignalName(b_in, _b_in);
stg.setSignature(b_in, Signature.INPUT);
final int b_out = signal++;
stg.setSignalName(b_out, _b_out);
stg.setSignature(b_out, Signature.OUTPUT);
final int csc_a = signal++;
final int csc_b = signal++;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
stg.setSignalName(csc_a, _csc_a);
stg.setSignature(csc_a, Signature.INTERNAL);
stg.setSignalName(csc_b, _csc_b);
stg.setSignature(csc_b, Signature.INTERNAL);
}
// intermediate transitions
Transition t1 = null, t2 = null, t3 = null;
// start of paralleleiser
Transition first = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
// part of up handshake, needed later explicitely for synchronisation
t3 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
// in+ triggers the a handshake
t1 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.UP));
connect(first, t1, stg);
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
t2 = stg.addTransition(new SignalEdge(csc_a, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
}
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
// synchronise
connect(t2, t3, stg);
// in+ triggers also the b handshake
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.UP));
connect(first, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
t2 = stg.addTransition(new SignalEdge(csc_b, EdgeDirection.UP));
connect(t1, t2, stg);
t1 = t2;
}
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.DOWN));
connect(t1, t2, stg);
// synchronise
connect(t2, t3, stg);
// complete up handshake
t1 = t3;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
if (CLW.instance.HANDSHAKE_COMPONENT_CSC.isEnabled()) {
t2 = stg.addTransition(new SignalEdge(csc_a, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(csc_b, EdgeDirection.DOWN));
connect(t1, t2, stg);
t1 = t2;
}
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1, t2, stg);
// close cycle
connect(t2, first, stg).setMarking(1);
// clean up
stg.clearUndoStack();
return stg;
}
public static STG getMerger(String id, int signal) throws STGException {
STG stg = new STG();
final String _in = id + "in";
final String _out = id + "out";
final String _a_in = id + "a_in";
final String _a_out = id + "a_out";
final String _b_in = id + "b_in";
final String _b_out = id + "b_out";
final int in = signal++;
stg.setSignalName(in, _in);
stg.setSignature(in, Signature.INPUT);
final int out = signal++;
stg.setSignalName(out, _out);
stg.setSignature(out, Signature.OUTPUT);
final int a_in = signal++;
stg.setSignalName(a_in, _a_in);
stg.setSignature(a_in, Signature.INPUT);
final int a_out = signal++;
stg.setSignalName(a_out, _a_out);
stg.setSignature(a_out, Signature.OUTPUT);
final int b_in = signal++;
stg.setSignalName(b_in, _b_in);
stg.setSignature(b_in, Signature.INPUT);
final int b_out = signal++;
stg.setSignalName(b_out, _b_out);
stg.setSignature(b_out, Signature.OUTPUT);
// intermediate transitions
Transition t1 = null, t2 = null;
Place p = stg.addPlace("p1", 1);
t1 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.UP));
p.setChildValue(t1, 1);
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.UP));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t2.setChildValue(p, 1);
t1 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.UP));
p.setChildValue(t1, 1);
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.UP));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t2.setChildValue(p, 1);
// clean up
stg.clearUndoStack();
return stg;
}
public static STG getMixer(String id, int signal) throws STGException {
STG stg = new STG();
final String _in = id + "in";
final String _out = id + "out";
final String _a_in = id + "a_in";
final String _a_out = id + "a_out";
final String _b_in = id + "b_in";
final String _b_out = id + "b_out";
final int in = signal++;
stg.setSignalName(in, _in);
stg.setSignature(in, Signature.INPUT);
final int out = signal++;
stg.setSignalName(out, _out);
stg.setSignature(out, Signature.OUTPUT);
final int a_in = signal++;
stg.setSignalName(a_in, _a_in);
stg.setSignature(a_in, Signature.INPUT);
final int a_out = signal++;
stg.setSignalName(a_out, _a_out);
stg.setSignature(a_out, Signature.OUTPUT);
final int b_in = signal++;
stg.setSignalName(b_in, _b_in);
stg.setSignature(b_in, Signature.INPUT);
final int b_out = signal++;
stg.setSignalName(b_out, _b_out);
stg.setSignature(b_out, Signature.OUTPUT);
// intermediate transitions
Transition t1 = null, t2 = null;
Place p = stg.addPlace("p1", 1);
t1 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.UP));
p.setChildValue(t1, 1);
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.UP));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(a_out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t2.setChildValue(p, 1);
t1 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.UP));
p.setChildValue(t1, 1);
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.UP));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.UP));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1=t2;
t2 = stg.addTransition(new SignalEdge(in, EdgeDirection.DOWN));
connect(t1,t2,stg);
t1 = t2;
t2 = stg.addTransition(new SignalEdge(b_out, EdgeDirection.DOWN));
connect(t1,t2,stg);
t2.setChildValue(p, 1);
// clean up
stg.clearUndoStack();
return stg;
}
public static STG getParTree(int height) throws STGException {
if (height<1)
height = 1;
List<String> hiddenHandshakes = new LinkedList<String>();
LinkedList<STG> stgs = new LinkedList<STG>();
stgs.add(getParalleliser("par_0_0_", 0));
if (height>1) {
hiddenHandshakes.add("par_0_0_a_out");
hiddenHandshakes.add("par_0_0_b_out");
}
for (int curLevel=1; curLevel < height; ++curLevel) {
for (int curPar=0; curPar < Math.pow(2,curLevel); ++curPar) {
String curPrefix = "par_" + curLevel + "_" + curPar + "_";
STG curSTG = getParalleliser(curPrefix, 0);
int parentLevel=curLevel -1 ;
int parentNr = curPar / 2;
String parentPrefix = "par_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
Map<String,String> renaming = new HashMap<String, String>();
renaming.put(curPrefix + "in", parentPrefix + "out");
renaming.put(curPrefix + "out", parentPrefix + "in");
curSTG.renameSignals(renaming);
stgs.add(curSTG);
hiddenHandshakes.add(parentPrefix + "in");
if (curLevel<height-1) {
hiddenHandshakes.add(curPrefix + "a_out");
hiddenHandshakes.add(curPrefix + "b_out");
}
}
}
STG parallelComposition = STG.parallelComposition(stgs);
if (CLW.instance.DUMMIFY_INTERNALHANDSHAKES.isEnabled())
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.DUMMY);
else
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.INTERNAL);
return parallelComposition;
}
public static STG getSeqTree(int height) throws STGException {
if (height<1)
height = 1;
List<String> hiddenHandshakes = new LinkedList<String>();
LinkedList<STG> stgs = new LinkedList<STG>();
stgs.add(getSequencer("seq_0_0_", 0));
if (height>1) {
hiddenHandshakes.add("seq_0_0_a_out");
hiddenHandshakes.add("seq_0_0_b_out");
}
for (int curLevel=1; curLevel < height; ++curLevel) {
for (int curPar=0; curPar < Math.pow(2,curLevel); ++curPar) {
String curPrefix = "seq_" + curLevel + "_" + curPar + "_";
STG curSTG = getSequencer(curPrefix, 0);
int parentLevel=curLevel -1 ;
int parentNr = curPar / 2;
String parentPrefix = "seq_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
Map<String,String> renaming = new HashMap<String, String>();
renaming.put(curPrefix + "in", parentPrefix + "out");
renaming.put(curPrefix + "out", parentPrefix + "in");
curSTG.renameSignals(renaming);
stgs.add(curSTG);
hiddenHandshakes.add(parentPrefix + "in");
if (curLevel<height-1) {
hiddenHandshakes.add(curPrefix + "a_out");
hiddenHandshakes.add(curPrefix + "b_out");
}
}
}
STG parallelComposition = STG.parallelComposition(stgs);
if (CLW.instance.DUMMIFY_INTERNALHANDSHAKES.isEnabled())
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.DUMMY);
else
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.INTERNAL);
return parallelComposition;
}
public static STG getMixTree(int height) throws STGException {
if (height < 1)
height = 1;
List<String> hiddenHandshakes = new LinkedList<String>();
LinkedList<STG> stgs = new LinkedList<STG>();
stgs.add(getMixer("mix_0_", 0));
if (height > 1) {
hiddenHandshakes.add("mix_0_out");
}
for (int curLevel=1; curLevel < height; ++curLevel) {
String curPrefix = "mix_" + curLevel + "_";
STG curSTG = getMixer(curPrefix, 0);
int parentLevel = curLevel - 1 ;
String parentPrefix = "mix_" + parentLevel + "_";
Map<String,String> renaming = new HashMap<String, String>();
renaming.put(curPrefix + "a_in", parentPrefix + "out");
renaming.put(curPrefix + "a_out", parentPrefix + "in");
curSTG.renameSignals(renaming);
stgs.add(curSTG);
hiddenHandshakes.add(parentPrefix + "in");
if (curLevel < height-1) {
hiddenHandshakes.add(curPrefix + "out");
}
}
STG parallelComposition = STG.parallelComposition(stgs);
if (CLW.instance.DUMMIFY_INTERNALHANDSHAKES.isEnabled())
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.DUMMY);
else
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.INTERNAL);
return parallelComposition;
}
public static STG getParSeqTree(int height, boolean startWithSeq) throws STGException {
if (height<1)
height = 1;
List<String> hiddenHandshakes = new LinkedList<String>();
LinkedList<STG> stgs = new LinkedList<STG>();
if (startWithSeq) {
stgs.add(getSequencer("seq_0_0_", 0));
if (height>1) {
hiddenHandshakes.add("seq_0_0_a_out");
hiddenHandshakes.add("seq_0_0_b_out");
}
}
else {
stgs.add(getParalleliser("par_0_0_", 0));
if (height>1) {
hiddenHandshakes.add("par_0_0_a_out");
hiddenHandshakes.add("par_0_0_b_out");
}
}
startWithSeq = ! startWithSeq;
for (int curLevel=1; curLevel < height; ++curLevel) {
for (int curPar=0; curPar < Math.pow(2,curLevel); ++curPar) {
System.out.println("" + curLevel + " " + curPar);
String curPrefix = "";
STG curSTG = null;
if (startWithSeq) {
curPrefix = "seq_" + curLevel + "_" + curPar + "_";
curSTG = getSequencer(curPrefix, 0);
}
else {
curPrefix = "par_" + curLevel + "_" + curPar + "_";
curSTG = getParalleliser(curPrefix, 0);
}
int parentLevel=curLevel -1 ;
int parentNr = curPar / 2;
String parentPrefix = "";
if (startWithSeq) {
parentPrefix = "par_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
}
else {
parentPrefix = "seq_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
}
Map<String,String> renaming = new HashMap<String, String>();
renaming.put(curPrefix + "in", parentPrefix + "out");
renaming.put(curPrefix + "out", parentPrefix + "in");
curSTG.renameSignals(renaming);
stgs.add(curSTG);
hiddenHandshakes.add(parentPrefix + "in");
if (curLevel<height-1) {
hiddenHandshakes.add(curPrefix + "a_out");
hiddenHandshakes.add(curPrefix + "b_out");
}
}
startWithSeq = ! startWithSeq;
}
STG parallelComposition = STG.parallelComposition(stgs);
if (CLW.instance.DUMMIFY_INTERNALHANDSHAKES.isEnabled())
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.DUMMY);
else
parallelComposition.setSignature(parallelComposition.getSignalNumbers(hiddenHandshakes), Signature.INTERNAL);
return parallelComposition;
}
@SuppressWarnings("unused")
private static void getParSeqTreePartition(int height, boolean startWithSeq) throws STGException, IOException {
LinkedList<STG> stgs = new LinkedList<STG>();
if (startWithSeq)
stgs.add(getSequencer("seq_0_0_", 0));
else
stgs.add(getParalleliser("par_0_0_", 0));
startWithSeq = ! startWithSeq;
for (int curLevel=1; curLevel < height; ++curLevel) {
for (int curPar=0; curPar < Math.pow(2,curLevel); ++curPar) {
System.out.println("" + curLevel + " " + curPar);
String curPrefix = "";
STG curSTG = null;
if (startWithSeq) {
curPrefix = "seq_" + curLevel + "_" + curPar + "_";
curSTG = getSequencer(curPrefix, 0);
}
else {
curPrefix = "par_" + curLevel + "_" + curPar + "_";
curSTG = getParalleliser(curPrefix, 0);
}
int parentLevel=curLevel -1 ;
int parentNr = curPar / 2;
String parentPrefix = "";
if (startWithSeq) {
parentPrefix = "par_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
}
else {
parentPrefix = "seq_" + parentLevel + "_" + parentNr + "_" + ((curPar % 2 == 0) ? "a_" : "b_") ;
}
Map<String,String> renaming = new HashMap<String, String>();
renaming.put(curPrefix + "in", parentPrefix + "out");
renaming.put(curPrefix + "out", parentPrefix + "in");
curSTG.renameSignals(renaming);
stgs.add(curSTG);
}
startWithSeq = ! startWithSeq;
}
StringBuilder sb = new StringBuilder();
for (STG stg : stgs) {
sb.append(stg.getSignalNames((stg.getSignals(Signature.OUTPUT))).toString().replaceAll("[,\\[\\]]", ""));
sb.append(" ");
sb.append(stg.getSignalNames((stg.getSignals(Signature.INTERNAL))).toString().replaceAll("[,\\[\\]]", ""));
sb.append("\n");
}
FileSupport.saveToDisk(sb.toString(), "partition");
}
public static void main(String[] args) throws NumberFormatException, STGException, IOException, ParsingException {
// String s[] = new String[2];
// s[0] = "-cH";
// s[1] = "aaaa";
//
// CLW.instance = new
// CLW(s);
// getParSeqTreePartition(Integer.parseInt(args[0]), args[1].equals("seq"));
for (int i = 2; i<=12 ; ++i)
System.out.println("" + i + " : " + getNroStates(i,true));
}
private static BigDecimal getNroStates(int n, boolean seq) {
if (n==1) {
if (seq)
return new BigDecimal(12);
else
return new BigDecimal(28);
}
else {
BigDecimal h = getNroStates(n-1, !seq);
if (seq) {
h = h.multiply(new BigDecimal(2));
h = h.add(new BigDecimal(4));
}
else {
h = h.multiply(h);
h = h.add(new BigDecimal(28));
}
return h;
}
}
}
| |
// Copyright 2015 The Project Buendia Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at: http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distrib-
// uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
// OR CONDITIONS OF ANY KIND, either express or implied. See the License for
// specific language governing permissions and limitations under the License.
package org.projectbuendia.client.ui.dialogs;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager.LayoutParams;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.LocalDate;
import org.projectbuendia.client.R;
import org.projectbuendia.client.events.actions.OrderDeleteRequestedEvent;
import org.projectbuendia.client.events.actions.OrderSaveRequestedEvent;
import org.projectbuendia.client.models.Order;
import org.projectbuendia.client.utils.Utils;
import butterknife.ButterKnife;
import butterknife.InjectView;
import de.greenrobot.event.EventBus;
/** A {@link DialogFragment} for adding a new user. */
public class OrderDialogFragment extends DialogFragment {
@InjectView(R.id.order_medication) EditText mMedication;
@InjectView(R.id.order_dosage) EditText mDosage;
@InjectView(R.id.order_frequency) EditText mFrequency;
@InjectView(R.id.order_give_for_days) EditText mGiveForDays;
@InjectView(R.id.order_give_for_days_label) TextView mGiveForDaysLabel;
@InjectView(R.id.order_duration_label) TextView mDurationLabel;
@InjectView(R.id.order_delete) Button mDelete;
private LayoutInflater mInflater;
/** Creates a new instance and registers the given UI, if specified. */
public static OrderDialogFragment newInstance(String patientUuid, Order order) {
Bundle args = new Bundle();
args.putString("patientUuid", patientUuid);
args.putBoolean("new", order == null);
// This time is used as the current time for any calculations in this dialog.
// Use this value throughout instead of calling now(). This is necessary to maintain UI
// consistency (e.g. if the dialog is opened before midnight and submitted after midnight).
args.putLong("now_millis", DateTime.now().getMillis());
if (order != null) {
args.putString("uuid", order.uuid);
args.putString("instructions", order.instructions);
args.putLong("start_millis", order.start.getMillis());
if (order.stop != null) {
args.putLong("stop_millis", order.stop.getMillis());
}
}
OrderDialogFragment fragment = new OrderDialogFragment();
fragment.setArguments(args);
return fragment;
}
@Override public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mInflater = LayoutInflater.from(getActivity());
}
@Override public void onResume() {
super.onResume();
// Replace the existing button listener so we can control whether the dialog is dismissed.
final AlertDialog dialog = (AlertDialog) getDialog();
dialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
onSubmit(dialog);
}
}
);
}
private void populateFields(Bundle args) {
String instructions = args.getString("instructions");
mMedication.setText(Order.getMedication(instructions));
mDosage.setText(Order.getDosage(instructions));
mFrequency.setText(Order.getFrequency(instructions));
DateTime now = Utils.getDateTime(args, "now_millis");
Long stopMillis = Utils.getLong(args, "stop_millis");
if (stopMillis != null) {
LocalDate lastDay = new DateTime(stopMillis).toLocalDate();
int days = Days.daysBetween(now.toLocalDate(), lastDay).getDays();
if (days >= 0) {
mGiveForDays.setText("" + (days + 1)); // 1 day means stop after today
}
}
updateLabels();
}
public void onSubmit(Dialog dialog) {
String uuid = getArguments().getString("uuid");
String patientUuid = getArguments().getString("patientUuid");
String medication = mMedication.getText().toString().trim();
String dosage = mDosage.getText().toString().trim();
String frequency = mFrequency.getText().toString().trim();
String instructions = Order.getInstructions(medication, dosage, frequency);
String durationStr = mGiveForDays.getText().toString().trim();
Integer durationDays = durationStr.isEmpty() ? null : Integer.valueOf(durationStr);
boolean valid = true;
if (medication.isEmpty()) {
setError(mMedication, R.string.enter_medication);
valid = false;
}
if (durationDays != null && durationDays == 0) {
setError(mGiveForDays, R.string.order_give_for_days_cannot_be_zero);
valid = false;
}
Utils.logUserAction("order_submitted",
"valid", "" + valid,
"uuid", uuid,
"medication", medication,
"dosage", dosage,
"frequency", frequency,
"instructions", instructions,
"durationDays", "" + durationDays);
if (!valid) {
return;
}
dialog.dismiss();
DateTime now = Utils.getDateTime(getArguments(), "now_millis");
DateTime start = Utils.getDateTime(getArguments(), "start_millis");
start = Utils.valueOrDefault(start, now);
if (durationDays != null) {
// Adjust durationDays to account for a start date in the past. Entering "2"
// always means two more days, stopping after tomorrow, regardless of start date.
LocalDate firstDay = start.toLocalDate();
LocalDate lastDay = now.toLocalDate().plusDays(durationDays - 1);
durationDays = Days.daysBetween(firstDay, lastDay).getDays() + 1;
}
// Post an event that triggers the PatientChartController to save the order.
EventBus.getDefault().post(new OrderSaveRequestedEvent(
uuid, patientUuid, instructions, start, durationDays));
}
public void onDelete(Dialog dialog, final String orderUuid) {
dialog.dismiss();
new AlertDialog.Builder(getActivity())
.setMessage(R.string.confirm_order_delete)
.setTitle(R.string.title_confirmation)
.setPositiveButton(R.string.delete, new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int i) {
EventBus.getDefault().post(new OrderDeleteRequestedEvent(orderUuid));
}
})
.setNegativeButton(R.string.cancel, null)
.create().show();
}
private void setError(EditText field, int resourceId) {
field.setError(getResources().getString(resourceId));
field.invalidate();
field.requestFocus();
}
@Override public @NonNull Dialog onCreateDialog(Bundle savedInstanceState) {
View fragment = mInflater.inflate(R.layout.order_dialog_fragment, null);
ButterKnife.inject(this, fragment);
mGiveForDays.addTextChangedListener(new DurationDaysWatcher());
Bundle args = getArguments();
boolean newOrder = args.getBoolean("new");
String title = getString(newOrder ? R.string.title_new_order : R.string.title_edit_order);
final String orderUuid = args.getString("uuid");
populateFields(args);
final Dialog dialog = new AlertDialog.Builder(getActivity())
.setCancelable(false) // Disable auto-cancel.
.setTitle(title)
// The positive button uses dialog, so we have to set it below, after dialog is assigned.
.setNegativeButton(R.string.cancel, null)
.setView(fragment)
.create();
((AlertDialog) dialog).setButton(Dialog.BUTTON_POSITIVE,
getResources().getString(R.string.ok), new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialogInterface, int i) {
onSubmit(dialog);
}
});
mDelete.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View view) {
onDelete(dialog, orderUuid);
}
});
// Hide or show the "Stop" and "Delete" buttons appropriately.
Long stopMillis = Utils.getLong(args, "stop_millis");
Long nowMillis = Utils.getLong(args, "now_millis");
Utils.showIf(mDelete, !newOrder);
// Open the keyboard, ready to type into the medication field.
dialog.getWindow().setSoftInputMode(LayoutParams.SOFT_INPUT_STATE_VISIBLE);
mMedication.requestFocus();
return dialog;
}
/** Updates the various labels in the form that react to changes in input fields. */
void updateLabels() {
DateTime now = Utils.getDateTime(getArguments(), "now_millis");
String text = mGiveForDays.getText().toString().trim();
int days = text.isEmpty() ? 0 : Integer.parseInt(text);
LocalDate lastDay = now.toLocalDate().plusDays(days - 1);
mGiveForDaysLabel.setText(
days == 0 ? R.string.order_give_for_days :
days == 1 ? R.string.order_give_for_day :
R.string.order_give_for_days);
mDurationLabel.setText(getResources().getString(
days == 0 ? R.string.order_duration_unspecified :
days == 1 ? R.string.order_duration_stop_after_today :
days == 2 ? R.string.order_duration_stop_after_tomorrow :
R.string.order_duration_stop_after_date
).replace("%s", Utils.toShortString(lastDay)));
}
class DurationDaysWatcher implements TextWatcher {
@Override public void beforeTextChanged(CharSequence c, int x, int y, int z) {
}
@Override public void onTextChanged(CharSequence c, int x, int y, int z) {
}
@Override public void afterTextChanged(Editable editable) {
updateLabels();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.intention.impl;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.daemon.impl.ShowIntentionsPass;
import com.intellij.codeInsight.hint.*;
import com.intellij.codeInsight.intention.HighPriorityAction;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInsight.intention.impl.config.IntentionActionWrapper;
import com.intellij.codeInsight.intention.impl.config.IntentionManagerSettings;
import com.intellij.codeInsight.intention.impl.config.IntentionSettingsConfigurable;
import com.intellij.codeInsight.unwrap.ScopeHighlighter;
import com.intellij.codeInspection.SuppressIntentionActionFromFix;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.VisualPosition;
import com.intellij.openapi.editor.actions.EditorActionUtil;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.event.EditorFactoryAdapter;
import com.intellij.openapi.editor.event.EditorFactoryEvent;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.*;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.refactoring.BaseRefactoringIntentionAction;
import com.intellij.ui.HintHint;
import com.intellij.ui.LightweightHint;
import com.intellij.ui.PopupMenuListenerAdapter;
import com.intellij.ui.RowIcon;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ThreeState;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EmptyIcon;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Collections;
import java.util.List;
/**
* @author max
* @author Mike
* @author Valentin
* @author Eugene Belyaev
* @author Konstantin Bulenkov
* @author and me too (Chinee?)
*/
public class IntentionHintComponent implements Disposable, ScrollAwareHint {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.intention.impl.IntentionHintComponent.ListPopupRunnable");
private static final Icon ourInactiveArrowIcon = new EmptyIcon(AllIcons.General.ArrowDown.getIconWidth(), AllIcons.General.ArrowDown.getIconHeight());
private static final int NORMAL_BORDER_SIZE = 6;
private static final int SMALL_BORDER_SIZE = 4;
private static final Border INACTIVE_BORDER = BorderFactory.createEmptyBorder(NORMAL_BORDER_SIZE, NORMAL_BORDER_SIZE, NORMAL_BORDER_SIZE, NORMAL_BORDER_SIZE);
private static final Border INACTIVE_BORDER_SMALL = BorderFactory.createEmptyBorder(SMALL_BORDER_SIZE, SMALL_BORDER_SIZE, SMALL_BORDER_SIZE, SMALL_BORDER_SIZE);
private static Border createActiveBorder() {
return BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(getBorderColor(), 1), BorderFactory.createEmptyBorder(NORMAL_BORDER_SIZE - 1, NORMAL_BORDER_SIZE-1, NORMAL_BORDER_SIZE-1, NORMAL_BORDER_SIZE-1));
}
private static Border createActiveBorderSmall() {
return BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(getBorderColor(), 1), BorderFactory.createEmptyBorder(SMALL_BORDER_SIZE-1, SMALL_BORDER_SIZE-1, SMALL_BORDER_SIZE-1, SMALL_BORDER_SIZE-1));
}
private static Color getBorderColor() {
return EditorColorsManager.getInstance().getGlobalScheme().getColor(EditorColors.SELECTED_TEARLINE_COLOR);
}
public boolean isVisible() {
return myPanel.isVisible();
}
private final Editor myEditor;
private static final Alarm myAlarm = new Alarm();
private final RowIcon myHighlightedIcon;
private final JLabel myIconLabel;
private final RowIcon myInactiveIcon;
private static final int DELAY = 500;
private final MyComponentHint myComponentHint;
private volatile boolean myPopupShown;
private boolean myDisposed;
private volatile ListPopup myPopup;
private final PsiFile myFile;
private final JPanel myPanel = new JPanel();
private PopupMenuListener myOuterComboboxPopupListener;
@NotNull
public static IntentionHintComponent showIntentionHint(@NotNull Project project,
@NotNull PsiFile file,
@NotNull Editor editor,
@NotNull ShowIntentionsPass.IntentionsInfo intentions,
boolean showExpanded) {
ApplicationManager.getApplication().assertIsDispatchThread();
final Point position = getHintPosition(editor);
return showIntentionHint(project, file, editor, intentions, showExpanded, position);
}
@NotNull
public static IntentionHintComponent showIntentionHint(@NotNull final Project project,
@NotNull PsiFile file,
@NotNull final Editor editor,
@NotNull ShowIntentionsPass.IntentionsInfo intentions,
boolean showExpanded,
@NotNull Point position) {
ApplicationManager.getApplication().assertIsDispatchThread();
final IntentionHintComponent component = new IntentionHintComponent(project, file, editor, intentions);
component.showIntentionHintImpl(!showExpanded, position);
Disposer.register(project, component);
if (showExpanded) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!editor.isDisposed() && editor.getComponent().isShowing()) {
component.showPopup(false);
}
}
}, project.getDisposed());
}
return component;
}
@TestOnly
public boolean isDisposed() {
return myDisposed;
}
@Override
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
myDisposed = true;
myComponentHint.hide();
myPanel.hide();
if (myOuterComboboxPopupListener != null) {
final Container ancestor = SwingUtilities.getAncestorOfClass(JComboBox.class, myEditor.getContentComponent());
if (ancestor != null) {
((JComboBox)ancestor).removePopupMenuListener(myOuterComboboxPopupListener);
}
myOuterComboboxPopupListener = null;
}
}
@Override
public void editorScrolled() {
closePopup();
}
public boolean isForEditor(@NotNull Editor editor) {
return editor == myEditor;
}
public enum PopupUpdateResult {
NOTHING_CHANGED, // intentions did not change
CHANGED_INVISIBLE, // intentions changed but the popup has not been shown yet, so can recreate list silently
HIDE_AND_RECREATE // ahh, has to close already shown popup, recreate and re-show again
}
//true if actions updated, there is nothing to do
//false if has to recreate popup, no need to reshow
//null if has to reshow
@NotNull
public PopupUpdateResult updateActions(@NotNull ShowIntentionsPass.IntentionsInfo intentions) {
if (myPopup.isDisposed() || !myFile.isValid()) {
return PopupUpdateResult.HIDE_AND_RECREATE;
}
IntentionListStep step = (IntentionListStep)myPopup.getListStep();
if (!step.wrapAndUpdateActions(intentions, true)) {
return PopupUpdateResult.NOTHING_CHANGED;
}
if (!myPopupShown) {
return PopupUpdateResult.CHANGED_INVISIBLE;
}
return PopupUpdateResult.HIDE_AND_RECREATE;
}
@Nullable
@TestOnly
public IntentionAction getAction(int index) {
if (myPopup == null || myPopup.isDisposed()) {
return null;
}
ListPopupStep listStep = myPopup.getListStep();
List<IntentionActionWithTextCaching> values = listStep.getValues();
if (values.size() <= index) {
return null;
}
return values.get(index).getAction();
}
public void recreate() {
ApplicationManager.getApplication().assertIsDispatchThread();
ListPopupStep step = myPopup.getListStep();
recreateMyPopup(step);
}
private void showIntentionHintImpl(final boolean delay, @NotNull Point position) {
final int offset = myEditor.getCaretModel().getOffset();
myComponentHint.setShouldDelay(delay);
HintManagerImpl hintManager = HintManagerImpl.getInstanceImpl();
QuestionAction action = new PriorityQuestionAction() {
@Override
public boolean execute() {
showPopup(false);
return true;
}
@Override
public int getPriority() {
return -10;
}
};
if (hintManager.canShowQuestionAction(action)) {
hintManager.showQuestionHint(myEditor, position, offset, offset, myComponentHint, action, HintManager.ABOVE);
}
}
@NotNull
private static Point getHintPosition(Editor editor) {
if (ApplicationManager.getApplication().isUnitTestMode()) return new Point();
final int offset = editor.getCaretModel().getOffset();
final VisualPosition pos = editor.offsetToVisualPosition(offset);
int line = pos.line;
final Point position = editor.visualPositionToXY(new VisualPosition(line, 0));
LOG.assertTrue(editor.getComponent().isDisplayable());
JComponent convertComponent = editor.getContentComponent();
Point realPoint;
final boolean oneLineEditor = editor.isOneLineMode();
if (oneLineEditor) {
// place bulb at the corner of the surrounding component
final JComponent contentComponent = editor.getContentComponent();
Container ancestorOfClass = SwingUtilities.getAncestorOfClass(JComboBox.class, contentComponent);
if (ancestorOfClass != null) {
convertComponent = (JComponent) ancestorOfClass;
} else {
ancestorOfClass = SwingUtilities.getAncestorOfClass(JTextField.class, contentComponent);
if (ancestorOfClass != null) {
convertComponent = (JComponent) ancestorOfClass;
}
}
realPoint = new Point(- (AllIcons.Actions.RealIntentionBulb.getIconWidth() / 2) - 4, - (AllIcons.Actions.RealIntentionBulb
.getIconHeight() / 2));
} else {
// try to place bulb on the same line
int yShift = -(NORMAL_BORDER_SIZE + AllIcons.Actions.RealIntentionBulb.getIconHeight());
if (canPlaceBulbOnTheSameLine(editor)) {
final int borderHeight = NORMAL_BORDER_SIZE;
yShift = -(borderHeight + (AllIcons.Actions.RealIntentionBulb.getIconHeight() - editor.getLineHeight()) /2 + 3);
}
final int xShift = AllIcons.Actions.RealIntentionBulb.getIconWidth();
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
realPoint = new Point(Math.max(0,visibleArea.x - xShift), position.y + yShift);
}
Point location = SwingUtilities.convertPoint(convertComponent, realPoint, editor.getComponent().getRootPane().getLayeredPane());
return new Point(location.x, location.y);
}
private static boolean canPlaceBulbOnTheSameLine(Editor editor) {
if (ApplicationManager.getApplication().isUnitTestMode() || editor.isOneLineMode()) return false;
final int offset = editor.getCaretModel().getOffset();
final VisualPosition pos = editor.offsetToVisualPosition(offset);
int line = pos.line;
final int firstNonSpaceColumnOnTheLine = EditorActionUtil.findFirstNonSpaceColumnOnTheLine(editor, line);
if (firstNonSpaceColumnOnTheLine == -1) return false;
final Point point = editor.visualPositionToXY(new VisualPosition(line, firstNonSpaceColumnOnTheLine));
return point.x > AllIcons.Actions.RealIntentionBulb.getIconWidth() + (editor.isOneLineMode() ? SMALL_BORDER_SIZE : NORMAL_BORDER_SIZE) * 2;
}
private IntentionHintComponent(@NotNull Project project,
@NotNull PsiFile file,
@NotNull final Editor editor,
@NotNull ShowIntentionsPass.IntentionsInfo intentions) {
ApplicationManager.getApplication().assertIsDispatchThread();
myFile = file;
myEditor = editor;
myPanel.setLayout(new BorderLayout());
myPanel.setOpaque(false);
boolean showRefactoringsBulb = ContainerUtil.exists(intentions.inspectionFixesToShow, new Condition<HighlightInfo.IntentionActionDescriptor>() {
@Override
public boolean value(HighlightInfo.IntentionActionDescriptor descriptor) {
return descriptor.getAction() instanceof BaseRefactoringIntentionAction;
}
});
boolean showFix = !showRefactoringsBulb && ContainerUtil.exists(intentions.errorFixesToShow, new Condition<HighlightInfo.IntentionActionDescriptor>() {
@Override
public boolean value(HighlightInfo.IntentionActionDescriptor descriptor) {
return IntentionManagerSettings.getInstance().isShowLightBulb(descriptor.getAction());
}
});
Icon smartTagIcon = showRefactoringsBulb ? AllIcons.Actions.RefactoringBulb : showFix ? AllIcons.Actions.QuickfixBulb : AllIcons.Actions.IntentionBulb;
myHighlightedIcon = new RowIcon(2);
myHighlightedIcon.setIcon(smartTagIcon, 0);
myHighlightedIcon.setIcon(AllIcons.General.ArrowDown, 1);
myInactiveIcon = new RowIcon(2);
myInactiveIcon.setIcon(smartTagIcon, 0);
myInactiveIcon.setIcon(ourInactiveArrowIcon, 1);
myIconLabel = new JLabel(myInactiveIcon);
myIconLabel.setOpaque(false);
myPanel.add(myIconLabel, BorderLayout.CENTER);
myPanel.setBorder(editor.isOneLineMode() ? INACTIVE_BORDER_SMALL : INACTIVE_BORDER);
myIconLabel.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(@NotNull MouseEvent e) {
if (!e.isPopupTrigger() && e.getButton() == MouseEvent.BUTTON1) {
showPopup(true);
}
}
@Override
public void mouseEntered(@NotNull MouseEvent e) {
onMouseEnter(editor.isOneLineMode());
}
@Override
public void mouseExited(@NotNull MouseEvent e) {
onMouseExit(editor.isOneLineMode());
}
});
myComponentHint = new MyComponentHint(myPanel);
ListPopupStep step = new IntentionListStep(this, intentions, myEditor, myFile, project);
recreateMyPopup(step);
// dispose myself when editor closed
EditorFactory.getInstance().addEditorFactoryListener(new EditorFactoryAdapter() {
@Override
public void editorReleased(@NotNull EditorFactoryEvent event) {
if (event.getEditor() == myEditor) {
hide();
}
}
}, this);
}
public void hide() {
Disposer.dispose(this);
}
private void onMouseExit(final boolean small) {
Window ancestor = SwingUtilities.getWindowAncestor(myPopup.getContent());
if (ancestor == null) {
myIconLabel.setIcon(myInactiveIcon);
myPanel.setBorder(small ? INACTIVE_BORDER_SMALL : INACTIVE_BORDER);
}
}
private void onMouseEnter(final boolean small) {
myIconLabel.setIcon(myHighlightedIcon);
myPanel.setBorder(small ? createActiveBorderSmall() : createActiveBorder());
String acceleratorsText = KeymapUtil.getFirstKeyboardShortcutText(
ActionManager.getInstance().getAction(IdeActions.ACTION_SHOW_INTENTION_ACTIONS));
if (!acceleratorsText.isEmpty()) {
myIconLabel.setToolTipText(CodeInsightBundle.message("lightbulb.tooltip", acceleratorsText));
}
}
@TestOnly
public LightweightHint getComponentHint() {
return myComponentHint;
}
private void closePopup() {
ApplicationManager.getApplication().assertIsDispatchThread();
myPopup.cancel();
myPopupShown = false;
}
private void showPopup(boolean mouseClick) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myPopup == null || myPopup.isDisposed()) return;
if (mouseClick && myPanel.isShowing()) {
final RelativePoint swCorner = RelativePoint.getSouthWestOf(myPanel);
final int yOffset = canPlaceBulbOnTheSameLine(myEditor) ? 0 : myEditor.getLineHeight() - (myEditor.isOneLineMode() ? SMALL_BORDER_SIZE : NORMAL_BORDER_SIZE);
myPopup.show(new RelativePoint(swCorner.getComponent(), new Point(swCorner.getPoint().x, swCorner.getPoint().y + yOffset)));
}
else {
myPopup.showInBestPositionFor(myEditor);
}
myPopupShown = true;
}
private void recreateMyPopup(@NotNull ListPopupStep step) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myPopup != null) {
Disposer.dispose(myPopup);
}
myPopup = JBPopupFactory.getInstance().createListPopup(step);
boolean committed = PsiDocumentManager.getInstance(myFile.getProject()).isCommitted(myEditor.getDocument());
final PsiFile injectedFile = committed ? InjectedLanguageUtil.findInjectedPsiNoCommit(myFile, myEditor.getCaretModel().getOffset()) : null;
final Editor injectedEditor = InjectedLanguageUtil.getInjectedEditorForInjectedFile(myEditor, injectedFile);
final ScopeHighlighter highlighter = new ScopeHighlighter(myEditor);
final ScopeHighlighter injectionHighlighter = new ScopeHighlighter(injectedEditor);
myPopup.addListener(new JBPopupListener.Adapter() {
@Override
public void onClosed(LightweightWindowEvent event) {
highlighter.dropHighlight();
injectionHighlighter.dropHighlight();
myPopupShown = false;
}
});
myPopup.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(@NotNull ListSelectionEvent e) {
final Object source = e.getSource();
highlighter.dropHighlight();
injectionHighlighter.dropHighlight();
if (source instanceof DataProvider) {
final Object selectedItem = PlatformDataKeys.SELECTED_ITEM.getData((DataProvider)source);
if (selectedItem instanceof IntentionActionWithTextCaching) {
final IntentionAction action = ((IntentionActionWithTextCaching)selectedItem).getAction();
if (action instanceof SuppressIntentionActionFromFix) {
if (injectedFile != null && ((SuppressIntentionActionFromFix)action).isShouldBeAppliedToInjectionHost() == ThreeState.NO) {
final PsiElement at = injectedFile.findElementAt(injectedEditor.getCaretModel().getOffset());
final PsiElement container = ((SuppressIntentionActionFromFix)action).getContainer(at);
if (container != null) {
injectionHighlighter.highlight(container, Collections.singletonList(container));
}
}
else {
final PsiElement at = myFile.findElementAt(myEditor.getCaretModel().getOffset());
final PsiElement container = ((SuppressIntentionActionFromFix)action).getContainer(at);
if (container != null) {
highlighter.highlight(container, Collections.singletonList(container));
}
}
}
}
}
}
});
if (myEditor.isOneLineMode()) {
// hide popup on combobox popup show
final Container ancestor = SwingUtilities.getAncestorOfClass(JComboBox.class, myEditor.getContentComponent());
if (ancestor != null) {
final JComboBox comboBox = (JComboBox)ancestor;
myOuterComboboxPopupListener = new PopupMenuListenerAdapter() {
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
hide();
}
};
comboBox.addPopupMenuListener(myOuterComboboxPopupListener);
}
}
Disposer.register(this, myPopup);
Disposer.register(myPopup, new Disposable() {
@Override
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
}
});
}
void canceled(@NotNull ListPopupStep intentionListStep) {
if (myPopup.getListStep() != intentionListStep || myDisposed) {
return;
}
// Root canceled. Create new popup. This one cannot be reused.
recreateMyPopup(intentionListStep);
}
private static class MyComponentHint extends LightweightHint {
private boolean myVisible;
private boolean myShouldDelay;
private MyComponentHint(JComponent component) {
super(component);
}
@Override
public void show(@NotNull final JComponent parentComponent,
final int x,
final int y,
final JComponent focusBackComponent,
@NotNull HintHint hintHint) {
myVisible = true;
if (myShouldDelay) {
myAlarm.cancelAllRequests();
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
showImpl(parentComponent, x, y, focusBackComponent);
}
}, DELAY);
}
else {
showImpl(parentComponent, x, y, focusBackComponent);
}
}
private void showImpl(JComponent parentComponent, int x, int y, JComponent focusBackComponent) {
if (!parentComponent.isShowing()) return;
super.show(parentComponent, x, y, focusBackComponent, new HintHint(parentComponent, new Point(x, y)));
}
@Override
public void hide() {
super.hide();
myVisible = false;
myAlarm.cancelAllRequests();
}
@Override
public boolean isVisible() {
return myVisible || super.isVisible();
}
private void setShouldDelay(boolean shouldDelay) {
myShouldDelay = shouldDelay;
}
}
public static class EnableDisableIntentionAction extends AbstractEditIntentionSettingsAction {
private final IntentionManagerSettings mySettings = IntentionManagerSettings.getInstance();
private final IntentionAction myAction;
public EnableDisableIntentionAction(IntentionAction action) {
super(action);
myAction = action;
// needed for checking errors in user written actions
//noinspection ConstantConditions
LOG.assertTrue(myFamilyName != null, "action "+action.getClass()+" family returned null");
}
@Override
@NotNull
public String getText() {
return mySettings.isEnabled(myAction) ?
CodeInsightBundle.message("disable.intention.action", myFamilyName) :
CodeInsightBundle.message("enable.intention.action", myFamilyName);
}
@Override
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
mySettings.setEnabled(myAction, !mySettings.isEnabled(myAction));
}
@Override
public String toString() {
return getText();
}
}
public static class EditIntentionSettingsAction extends AbstractEditIntentionSettingsAction implements HighPriorityAction {
public EditIntentionSettingsAction(IntentionAction action) {
super(action);
}
@NotNull
@Override
public String getText() {
return "Edit intention settings";
}
@Override
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
final IntentionSettingsConfigurable configurable = new IntentionSettingsConfigurable();
ShowSettingsUtil.getInstance().editConfigurable(project, configurable, new Runnable() {
@Override
public void run() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
configurable.selectIntention(myFamilyName);
}
});
}
});
}
}
private abstract static class AbstractEditIntentionSettingsAction implements IntentionAction {
final String myFamilyName;
private final boolean myDisabled;
private AbstractEditIntentionSettingsAction(IntentionAction action) {
myFamilyName = action.getFamilyName();
myDisabled = action instanceof IntentionActionWrapper &&
Comparing.equal(action.getFamilyName(), ((IntentionActionWrapper)action).getFullFamilyName());
}
@NotNull
@Override
public String getFamilyName() {
return getText();
}
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
return !myDisabled;
}
@Override
public boolean startInWriteAction() {
return false;
}
}
}
| |
/*
* Copyright 2012-2022 The Feign Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package feign;
import static feign.assertj.FeignAssertions.assertThat;
import static java.util.Arrays.asList;
import static org.assertj.core.data.MapEntry.entry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import feign.Request.HttpMethod;
import feign.template.UriUtils;
public class RequestTemplateTest {
@Rule
public final ExpectedException thrown = ExpectedException.none();
/**
* Avoid depending on guava solely for map literals.
*/
private static <K, V> Map<K, V> mapOf(K key, V val) {
Map<K, V> result = new LinkedHashMap<>();
result.put(key, val);
return result;
}
private static <K, V> Map<K, V> mapOf(K k1, V v1, K k2, V v2) {
Map<K, V> result = mapOf(k1, v1);
result.put(k2, v2);
return result;
}
private static <K, V> Map<K, V> mapOf(K k1, V v1, K k2, V v2, K k3, V v3) {
Map<K, V> result = mapOf(k1, v1, k2, v2);
result.put(k3, v3);
return result;
}
private static String expand(String template, Map<String, Object> variables) {
RequestTemplate requestTemplate = new RequestTemplate();
requestTemplate.uri(template);
return requestTemplate.resolve(variables).url();
}
@Test
public void expandUrlEncoded() {
for (String val : Arrays.asList("apples", "sp ace", "unic???de", "qu?stion")) {
assertThat(expand("/users/{user}", mapOf("user", val)))
.isEqualTo("/users/" + UriUtils.encode(val, Util.UTF_8));
}
}
@Test
public void expandMultipleParams() {
assertThat(expand("/users/{user}/{repo}", mapOf("user", "unic???de", "repo", "foo")))
.isEqualTo("/users/unic%3F%3F%3Fde/foo");
}
@Test
public void expandParamKeyHyphen() {
assertThat(expand("/{user-dir}", mapOf("user-dir", "foo")))
.isEqualTo("/foo");
}
@Test
public void expandMissingParamProceeds() {
assertThat(expand("/{user-dir}", mapOf("user_dir", "foo")))
.isEqualTo("/");
}
@Test
public void resolveTemplateWithParameterizedPathSkipsEncodingSlash() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("{zoneId}");
template = template.resolve(mapOf("zoneId", "/hostedzone/Z1PA6795UKMFR9"));
assertThat(template)
.hasUrl("/hostedzone/Z1PA6795UKMFR9");
}
@Test
public void resolveTemplateWithBinaryBody() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("{zoneId}")
.body(new byte[] {7, 3, -3, -7}, null);
template = template.resolve(mapOf("zoneId", "/hostedzone/Z1PA6795UKMFR9"));
assertThat(template)
.hasUrl("/hostedzone/Z1PA6795UKMFR9");
}
@Test
public void canInsertAbsoluteHref() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/hostedzone/Z1PA6795UKMFR9");
template.target("https://route53.amazonaws.com/2012-12-12");
assertThat(template)
.hasUrl("https://route53.amazonaws.com/2012-12-12/hostedzone/Z1PA6795UKMFR9");
}
@Test
public void resolveTemplateWithRelativeUriWithQuery() {
RequestTemplate template = new RequestTemplate()
.method(HttpMethod.GET)
.uri("/wsdl/testcase?wsdl")
.target("https://api.example.com");
assertThat(template).hasUrl("https://api.example.com/wsdl/testcase?wsdl");
}
@Test
public void resolveTemplateWithBaseAndParameterizedQuery() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/?Action=DescribeRegions").query("RegionName.1", "{region}");
template = template.resolve(mapOf("region", "eu-west-1"));
assertThat(template)
.hasQueries(
entry("Action", Collections.singletonList("DescribeRegions")),
entry("RegionName.1", Collections.singletonList("eu-west-1")));
}
@Test
public void resolveTemplateWithBaseAndParameterizedIterableQuery() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/?Query=one").query("Queries", "{queries}");
template = template.resolve(mapOf("queries", Arrays.asList("us-east-1", "eu-west-1")));
assertThat(template)
.hasQueries(
entry("Query", Collections.singletonList("one")),
entry("Queries", asList("us-east-1", "eu-west-1")));
}
@Test
public void resolveTemplateWithMixedCollectionFormatsByQuery() {
RequestTemplate template = new RequestTemplate()
.method(HttpMethod.GET)
.collectionFormat(CollectionFormat.EXPLODED)
.uri("/api/collections")
.query("keys", "{keys}") // default collection format
.query("values[]", Collections.singletonList("{values[]}"), CollectionFormat.CSV);
template = template.resolve(mapOf("keys", Arrays.asList("one", "two"),
"values[]", Arrays.asList("1", "2")));
assertThat(template.url())
.isEqualToIgnoringCase("/api/collections?keys=one&keys=two&values%5B%5D=1%2C2");
}
@Test
public void resolveTemplateWithHeaderSubstitutions() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.header("Auth-Token", "{authToken}");
template = template.resolve(mapOf("authToken", "1234"));
assertThat(template)
.hasHeaders(entry("Auth-Token", Collections.singletonList("1234")));
}
@Test
public void resolveTemplateWithHeaderSubstitutionsNotAtStart() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.header("Authorization", "Bearer {token}");
template = template.resolve(mapOf("token", "1234"));
assertThat(template)
.hasHeaders(entry("Authorization", Collections.singletonList("Bearer 1234")));
}
@Test
public void resolveTemplateWithHeaderWithEscapedCurlyBrace() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.header("Encoded", "{{{{dont_expand_me}}");
template.resolve(mapOf("dont_expand_me", "1234"));
assertThat(template)
.hasHeaders(entry("Encoded", Collections.singletonList("{{{{dont_expand_me}}")));
}
/**
* This ensures we don't mess up vnd types
*/
@Test
public void resolveTemplateWithHeaderIncludingSpecialCharacters() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.header("Accept", "application/vnd.github.v3+{type}");
template = template.resolve(mapOf("type", "json"));
assertThat(template)
.hasHeaders(entry("Accept", Collections.singletonList("application/vnd.github.v3+json")));
}
@Test
public void resolveTemplateWithHeaderEmptyResult() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.header("Encoded", "{var}");
template = template.resolve(mapOf("var", ""));
assertThat(template)
.hasNoHeader("Encoded");
}
@Test
public void resolveTemplateWithMixedRequestLineParams() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)//
.uri("/domains/{domainId}/records")//
.query("name", "{name}")//
.query("type", "{type}");
template = template.resolve(
mapOf("domainId", 1001, "name", "denominator.io", "type", "CNAME"));
assertThat(template)
.hasQueries(
entry("name", Collections.singletonList("denominator.io")),
entry("type", Collections.singletonList("CNAME")));
}
@Test
public void insertHasQueryParams() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)//
.uri("/domains/1001/records")//
.query("name", "denominator.io")//
.query("type", "CNAME");
template.target("https://host/v1.0/1234?provider=foo");
assertThat(template)
.hasPath("https://host/v1.0/1234/domains/1001/records")
.hasQueries(
entry("name", Collections.singletonList("denominator.io")),
entry("type", Collections.singletonList("CNAME")),
entry("provider", Collections.singletonList("foo")));
}
@Test
public void resolveTemplateWithBodyTemplateSetsBodyAndContentLength() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.POST)
.bodyTemplate(
"%7B\"customer_name\": \"{customer_name}\", \"user_name\": \"{user_name}\", " +
"\"password\": \"{password}\"%7D",
Util.UTF_8);
template = template.resolve(
mapOf(
"customer_name", "netflix",
"user_name", "denominator",
"password", "password"));
assertThat(template)
.hasBody(
"{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"password\"}")
.hasHeaders(
entry("Content-Length",
Collections.singletonList(String.valueOf(template.body().length))));
}
@Test
public void resolveTemplateWithBodyTemplateDoesNotDoubleDecode() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.POST)
.bodyTemplate(
"%7B\"customer_name\": \"{customer_name}\", \"user_name\": \"{user_name}\", \"password\": \"{password}\"%7D",
Util.UTF_8);
template = template.resolve(
mapOf(
"customer_name", "netflix",
"user_name", "denominator",
"password", "abc+123%25d8"));
assertThat(template)
.hasBody(
"{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"abc+123%25d8\"}");
}
@Test
public void skipUnresolvedQueries() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/domains/{domainId}/records")//
.query("optional", "{optional}")//
.query("name", "{nameVariable}");
template = template.resolve(mapOf(
"domainId", 1001,
"nameVariable", "denominator.io"));
assertThat(template)
.hasQueries(
entry("name", Collections.singletonList("denominator.io")));
}
@Test
public void allQueriesUnresolvable() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)//
.uri("/domains/{domainId}/records")//
.query("optional", "{optional}")//
.query("optional2", "{optional2}");
template = template.resolve(mapOf("domainId", 1001));
assertThat(template)
.hasUrl("/domains/1001/records")
.hasQueries();
}
@Test
public void spaceEncodingInUrlParam() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)//
.uri("/api/{value1}?key={value2}");
template = template.resolve(mapOf("value1", "ABC 123", "value2", "XYZ 123"));
assertThat(template.request().url())
.isEqualTo("/api/ABC%20123?key=XYZ%20123");
}
@Test
public void useCaseInsensitiveHeaderFieldNames() {
final RequestTemplate template = new RequestTemplate();
final String value = "value1";
template.header("TEST", value);
final String value2 = "value2";
template.header("tEST", value2);
final Collection<String> test = template.headers().get("test");
final String assertionMessage = "Header field names should be case insensitive";
assertNotNull(assertionMessage, test);
assertTrue(assertionMessage, test.contains(value));
assertTrue(assertionMessage, test.contains(value2));
assertEquals(1, template.headers().size());
assertEquals(2, template.headers().get("tesT").size());
}
@Test
public void encodeSlashTest() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/api/{vhost}")
.decodeSlash(false);
template = template.resolve(mapOf("vhost", "/"));
assertThat(template)
.hasUrl("/api/%2F");
}
/**
* Implementations have a bug if they pass junk as the http method.
*/
@SuppressWarnings("deprecation")
@Test
public void uriStuffedIntoMethod() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Invalid HTTP Method: /path?queryParam={queryParam}");
new RequestTemplate().method("/path?queryParam={queryParam}");
}
@Test
public void encodedQueryClearedOnNull() {
RequestTemplate template = new RequestTemplate();
template.query("param[]", "value");
assertThat(template).hasQueries(entry("param[]", Collections.singletonList("value")));
template.query("param[]", (String[]) null);
assertThat(template.queries()).isEmpty();
}
@Test
public void encodedQuery() {
RequestTemplate template = new RequestTemplate().query("params[]", "foo%20bar");
assertThat(template.queryLine()).isEqualTo("?params%5B%5D=foo%20bar");
assertThat(template).hasQueries(entry("params[]", Collections.singletonList("foo%20bar")));
}
@Test
public void encodedQueryWithUnsafeCharactersMixedWithUnencoded() {
RequestTemplate template = new RequestTemplate()
.query("params[]", "not encoded") // stored as "param%5D%5B"
.query("params[]", "encoded"); // stored as "param[]"
assertThat(template.queryLine()).isEqualTo("?params%5B%5D=not%20encoded¶ms%5B%5D=encoded");
Map<String, Collection<String>> queries = template.queries();
assertThat(queries).containsKey("params[]");
assertThat(queries.get("params[]")).contains("encoded").contains("not%20encoded");
}
@SuppressWarnings("unchecked")
@Test
public void shouldRetrieveHeadersWithoutNull() {
RequestTemplate template = new RequestTemplate()
.header("key1", (String) null)
.header("key2", Collections.emptyList())
.header("key3", (Collection) null)
.header("key4", "valid")
.header("key5", "valid")
.header("key6", "valid")
.header("key7", "valid");
assertThat(template.headers()).hasSize(4);
assertThat(template.headers().keySet()).containsExactly("key4", "key5", "key6", "key7");
}
@SuppressWarnings("ConstantConditions")
@Test(expected = UnsupportedOperationException.class)
public void shouldNotInsertHeadersImmutableMap() {
RequestTemplate template = new RequestTemplate()
.header("key1", "valid");
assertThat(template.headers()).hasSize(1);
assertThat(template.headers().keySet()).containsExactly("key1");
template.headers().put("key2", Collections.singletonList("other value"));
}
@Test
public void fragmentShouldNotBeEncodedInUri() {
RequestTemplate template = new RequestTemplate()
.method(HttpMethod.GET)
.uri("/path#fragment")
.queries(mapOf("key1", Collections.singletonList("value1")));
assertThat(template.url()).isEqualTo("/path?key1=value1#fragment");
}
@Test
public void fragmentShouldNotBeEncodedInTarget() {
RequestTemplate template = new RequestTemplate()
.method(HttpMethod.GET)
.target("https://example.com/path#fragment")
.queries(mapOf("key1", Collections.singletonList("value1")));
assertThat(template.url()).isEqualTo("https://example.com/path?key1=value1#fragment");
}
@Test
public void slashShouldNotBeAppendedForMatrixParams() {
RequestTemplate template = new RequestTemplate().method(HttpMethod.GET)
.uri("/path;key1=value1;key2=value2", true);
assertThat(template.url()).isEqualTo("/path;key1=value1;key2=value2");
}
@Test
public void encodedReservedPreserveSlash() {
RequestTemplate template = new RequestTemplate();
template.uri("/get?url={url}");
template.method(HttpMethod.GET);
template = template.resolve(Collections.singletonMap("url", "https://www.google.com"));
assertThat(template.url()).isEqualToIgnoringCase("/get?url=https%3A//www.google.com");
}
@Test
public void encodedReservedEncodeSlash() {
RequestTemplate template = new RequestTemplate();
template.uri("/get?url={url}");
template.decodeSlash(false);
template.method(HttpMethod.GET);
template = template.resolve(Collections.singletonMap("url", "https://www.google.com"));
assertThat(template.url()).isEqualToIgnoringCase("/get?url=https%3A%2F%2Fwww.google.com");
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.keys.DefaultRuleKeyBuilderFactory;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.util.FileHashCache;
import com.facebook.buck.util.NullFileHashCache;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.hash.HashCode;
import com.google.common.io.Files;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
public class DirArtifactCacheTest {
@Rule
public TemporaryFolder tmpDir = new TemporaryFolder();
private FileHashCache fileHashCache = new NullFileHashCache();
private DirArtifactCache dirArtifactCache;
@After
public void tearDown() {
if (dirArtifactCache != null) {
dirArtifactCache.close();
}
}
@Test
public void testCacheCreation() throws IOException {
File cacheDir = tmpDir.newFolder();
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(0L));
}
@Test
public void testCacheFetchMiss() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(fileX.toPath(), HashCode.fromInt(0)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(0L));
Files.write("x", fileX, Charsets.UTF_8);
BuildRule inputRuleX = new BuildRuleForTest(fileX);
SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver(ImmutableSet.of(
inputRuleX)));
RuleKey ruleKeyX = new DefaultRuleKeyBuilderFactory(fileHashCache, resolver)
.newInstance(inputRuleX)
.build();
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
}
@Test
public void testCacheStoreAndFetchHit() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(fileX.toPath(), HashCode.fromInt(0)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.<Long>absent());
Files.write("x", fileX, Charsets.UTF_8);
BuildRule inputRuleX = new BuildRuleForTest(fileX);
SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver(ImmutableSet.of(
inputRuleX)));
RuleKey ruleKeyX = new DefaultRuleKeyBuilderFactory(fileHashCache, resolver)
.newInstance(inputRuleX)
.build();
dirArtifactCache.store(ImmutableSet.of(ruleKeyX), ImmutableMap.<String, String>of(), fileX);
// Test that artifact overwrite works.
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(inputRuleX, new BuildRuleForTest(fileX));
// Test that artifact creation works.
assertTrue(fileX.delete());
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(inputRuleX, new BuildRuleForTest(fileX));
}
@Test
public void testCacheStoreOverwrite() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(fileX.toPath(), HashCode.fromInt(0)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(0L));
Files.write("x", fileX, Charsets.UTF_8);
BuildRule inputRuleX = new BuildRuleForTest(fileX);
SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver(ImmutableSet.of(
inputRuleX)));
RuleKey ruleKeyX = new DefaultRuleKeyBuilderFactory(fileHashCache, resolver)
.newInstance(inputRuleX)
.build();
dirArtifactCache.store(ImmutableSet.of(ruleKeyX), ImmutableMap.<String, String>of(), fileX);
// Overwrite.
dirArtifactCache.store(ImmutableSet.of(ruleKeyX), ImmutableMap.<String, String>of(), fileX);
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(inputRuleX, new BuildRuleForTest(fileX));
}
@Test
public void testCacheStoresAndFetchHits() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
File fileY = tmpDir.newFile("y");
File fileZ = tmpDir.newFile("z");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(
fileX.toPath(), HashCode.fromInt(0),
fileY.toPath(), HashCode.fromInt(1),
fileZ.toPath(), HashCode.fromInt(2)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(0L));
Files.write("x", fileX, Charsets.UTF_8);
Files.write("y", fileY, Charsets.UTF_8);
Files.write("z", fileZ, Charsets.UTF_8);
BuildRule inputRuleX = new BuildRuleForTest(fileX);
BuildRule inputRuleY = new BuildRuleForTest(fileY);
BuildRule inputRuleZ = new BuildRuleForTest(fileZ);
assertFalse(inputRuleX.equals(inputRuleY));
assertFalse(inputRuleX.equals(inputRuleZ));
assertFalse(inputRuleY.equals(inputRuleZ));
SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver(ImmutableSet.of(
inputRuleX,
inputRuleY,
inputRuleZ)));
DefaultRuleKeyBuilderFactory fakeRuleKeyBuilderFactory =
new DefaultRuleKeyBuilderFactory(fileHashCache, resolver);
RuleKey ruleKeyX = fakeRuleKeyBuilderFactory
.newInstance(inputRuleX)
.build();
RuleKey ruleKeyY = fakeRuleKeyBuilderFactory
.newInstance(inputRuleY)
.build();
RuleKey ruleKeyZ = fakeRuleKeyBuilderFactory
.newInstance(inputRuleZ)
.build();
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyY, fileY).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyZ, fileZ).getType());
dirArtifactCache.store(ImmutableSet.of(ruleKeyX), ImmutableMap.<String, String>of(), fileX);
dirArtifactCache.store(ImmutableSet.of(ruleKeyY), ImmutableMap.<String, String>of(), fileY);
dirArtifactCache.store(ImmutableSet.of(ruleKeyZ), ImmutableMap.<String, String>of(), fileZ);
assertTrue(fileX.delete());
assertTrue(fileY.delete());
assertTrue(fileZ.delete());
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyY, fileY).getType());
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKeyZ, fileZ).getType());
assertEquals(inputRuleX, new BuildRuleForTest(fileX));
assertEquals(inputRuleY, new BuildRuleForTest(fileY));
assertEquals(inputRuleZ, new BuildRuleForTest(fileZ));
assertEquals(6, cacheDir.listFiles().length);
dirArtifactCache.deleteOldFiles();
assertEquals(0, cacheDir.listFiles().length);
}
@Test
public void testNoStoreMisses() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
File fileY = tmpDir.newFile("y");
File fileZ = tmpDir.newFile("z");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(
fileX.toPath(), HashCode.fromInt(0),
fileY.toPath(), HashCode.fromInt(1),
fileZ.toPath(), HashCode.fromInt(2)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ false,
/* maxCacheSizeBytes */ Optional.of(0L));
Files.write("x", fileX, Charsets.UTF_8);
Files.write("y", fileY, Charsets.UTF_8);
Files.write("z", fileZ, Charsets.UTF_8);
BuildRule inputRuleX = new BuildRuleForTest(fileX);
BuildRule inputRuleY = new BuildRuleForTest(fileY);
BuildRule inputRuleZ = new BuildRuleForTest(fileZ);
assertFalse(inputRuleX.equals(inputRuleY));
assertFalse(inputRuleX.equals(inputRuleZ));
assertFalse(inputRuleY.equals(inputRuleZ));
SourcePathResolver resolver = new SourcePathResolver(new BuildRuleResolver(ImmutableSet.of(
inputRuleX,
inputRuleY,
inputRuleZ)));
DefaultRuleKeyBuilderFactory fakeRuleKeyBuilderFactory =
new DefaultRuleKeyBuilderFactory(fileHashCache, resolver);
RuleKey ruleKeyX = fakeRuleKeyBuilderFactory
.newInstance(inputRuleX)
.build();
RuleKey ruleKeyY = fakeRuleKeyBuilderFactory
.newInstance(inputRuleY)
.build();
RuleKey ruleKeyZ = fakeRuleKeyBuilderFactory
.newInstance(inputRuleZ)
.build();
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyY, fileY).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyZ, fileZ).getType());
dirArtifactCache.store(ImmutableSet.of(ruleKeyX), ImmutableMap.<String, String>of(), fileX);
dirArtifactCache.store(ImmutableSet.of(ruleKeyY), ImmutableMap.<String, String>of(), fileY);
dirArtifactCache.store(ImmutableSet.of(ruleKeyZ), ImmutableMap.<String, String>of(), fileZ);
assertTrue(fileX.delete());
assertTrue(fileY.delete());
assertTrue(fileZ.delete());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyX, fileX).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyY, fileY).getType());
assertEquals(CacheResult.Type.MISS, dirArtifactCache.fetch(ruleKeyZ, fileZ).getType());
assertEquals(inputRuleX, new BuildRuleForTest(fileX));
assertEquals(inputRuleY, new BuildRuleForTest(fileY));
assertEquals(inputRuleZ, new BuildRuleForTest(fileZ));
assertEquals(0, cacheDir.listFiles().length);
}
@Test
public void testDeleteNothing() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = new File(cacheDir, "x");
File fileY = new File(cacheDir, "y");
File fileZ = new File(cacheDir, "z");
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(tmpDir.getRoot().toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(1024L));
Files.write("x", fileX, Charsets.UTF_8);
Files.write("y", fileY, Charsets.UTF_8);
Files.write("z", fileZ, Charsets.UTF_8);
assertEquals(3, cacheDir.listFiles().length);
dirArtifactCache.deleteOldFiles();
assertEquals(3, cacheDir.listFiles().length);
}
@Test
public void testDeleteNothingAbsentLimit() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = new File(cacheDir, "x");
File fileY = new File(cacheDir, "y");
File fileZ = new File(cacheDir, "z");
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(tmpDir.getRoot().toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.<Long>absent());
Files.write("x", fileX, Charsets.UTF_8);
Files.write("y", fileY, Charsets.UTF_8);
Files.write("z", fileZ, Charsets.UTF_8);
assertEquals(3, cacheDir.listFiles().length);
dirArtifactCache.deleteOldFiles();
assertEquals(3, cacheDir.listFiles().length);
}
@Test
public void testDeleteSome() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileW = new File(cacheDir, "w");
File fileX = new File(cacheDir, "x");
File fileY = new File(cacheDir, "y");
File fileZ = new File(cacheDir, "z");
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.of(2L));
Files.write("w", fileW, Charsets.UTF_8);
Files.write("x", fileX, Charsets.UTF_8);
Files.write("y", fileY, Charsets.UTF_8);
Files.write("z", fileZ, Charsets.UTF_8);
java.nio.file.Files.setAttribute(fileW.toPath(), "lastAccessTime", FileTime.fromMillis(9000));
java.nio.file.Files.setAttribute(fileX.toPath(), "lastAccessTime", FileTime.fromMillis(0));
java.nio.file.Files.setAttribute(fileY.toPath(), "lastAccessTime", FileTime.fromMillis(1000));
java.nio.file.Files.setAttribute(fileZ.toPath(), "lastAccessTime", FileTime.fromMillis(2000));
assertEquals(4, cacheDir.listFiles().length);
dirArtifactCache.deleteOldFiles();
assertEquals(ImmutableSet.of(fileZ, fileW), ImmutableSet.copyOf(cacheDir.listFiles()));
}
@Test
public void testCacheStoreMultipleKeys() throws IOException {
File cacheDir = tmpDir.newFolder();
File fileX = tmpDir.newFile("x");
fileHashCache =
new FakeFileHashCache(
ImmutableMap.of(fileX.toPath(), HashCode.fromInt(0)));
dirArtifactCache = new DirArtifactCache(
"dir",
new ProjectFilesystem(cacheDir.toPath()),
Paths.get("."),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.<Long>absent());
Files.write("x", fileX, Charsets.UTF_8);
RuleKey ruleKey1 = new RuleKey("aaaa");
RuleKey ruleKey2 = new RuleKey("bbbb");
dirArtifactCache.store(
ImmutableSet.of(ruleKey1, ruleKey2),
ImmutableMap.<String, String>of(),
fileX);
// Test that artifact is available via both keys.
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKey1, fileX).getType());
assertEquals(CacheResult.Type.HIT, dirArtifactCache.fetch(ruleKey2, fileX).getType());
}
@Test
public void testCacheStoreAndFetchMetadata() throws IOException {
FakeProjectFilesystem filesystem = new FakeProjectFilesystem();
DirArtifactCache cache =
new DirArtifactCache(
"dir",
filesystem,
Paths.get("cache"),
/* doStore */ true,
/* maxCacheSizeBytes */ Optional.<Long>absent());
RuleKey ruleKey = new RuleKey("0000");
ImmutableMap<String, String> metadata = ImmutableMap.of("some", "metadata");
// Create a dummy data file.
Path data = Paths.get("data");
filesystem.touch(data);
// Store the artifact with metadata then re-fetch.
cache.store(ImmutableSet.of(ruleKey), metadata, data.toFile());
CacheResult result = cache.fetch(ruleKey, Paths.get("out-data").toFile());
// Verify that the metadata is correct.
assertThat(
result.getType(),
Matchers.equalTo(CacheResult.Type.HIT));
assertThat(
result.getMetadata(),
Matchers.equalTo(metadata));
cache.close();
}
private static class BuildRuleForTest extends FakeBuildRule {
@SuppressWarnings("PMD.UnusedPrivateField")
@AddToRuleKey
private final Path file;
private BuildRuleForTest(File file) {
super(
BuildTarget.builder("//foo", file.getName()).build(),
new SourcePathResolver(new BuildRuleResolver()));
this.file = file.toPath();
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.streamlookup;
import java.math.BigDecimal;
import java.text.DateFormat;
import java.util.Collections;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.hash.ByteArrayHashIndex;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Looks up information by first reading data into a hash table (in memory)
*
* TODO: add warning with conflicting types OR modify the lookup values to the input row type. (this is harder to do as
* currently we don't know the types)
*
* @author Matt
* @since 26-apr-2003
*/
public class StreamLookup extends BaseStep implements StepInterface {
private static Class<?> PKG = StreamLookupMeta.class; // for i18n purposes, needed by Translator2!!
private StreamLookupMeta meta;
private StreamLookupData data;
public StreamLookup( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
private void handleNullIf() {
data.nullIf = new Object[meta.getValue().length];
for ( int i = 0; i < meta.getValue().length; i++ ) {
if ( meta.getValueDefaultType()[i] < 0 ) {
//CHECKSTYLE:Indentation:OFF
meta.getValueDefaultType()[i] = ValueMetaInterface.TYPE_STRING;
}
data.nullIf[i] = null;
switch ( meta.getValueDefaultType()[i] ) {
case ValueMetaInterface.TYPE_STRING:
if ( Const.isEmpty( meta.getValueDefault()[i] ) ) {
data.nullIf[i] = null;
} else {
data.nullIf[i] = meta.getValueDefault()[i];
}
break;
case ValueMetaInterface.TYPE_DATE:
try {
data.nullIf[i] = DateFormat.getInstance().parse( meta.getValueDefault()[i] );
} catch ( Exception e ) {
// Ignore errors
}
break;
case ValueMetaInterface.TYPE_NUMBER:
try {
data.nullIf[i] = Double.parseDouble( meta.getValueDefault()[i] );
} catch ( Exception e ) {
// Ignore errors
}
break;
case ValueMetaInterface.TYPE_INTEGER:
try {
data.nullIf[i] = Long.parseLong( meta.getValueDefault()[i] );
} catch ( Exception e ) {
// Ignore errors
}
break;
case ValueMetaInterface.TYPE_BOOLEAN:
if ( "TRUE".equalsIgnoreCase( meta.getValueDefault()[i] )
|| "Y".equalsIgnoreCase( meta.getValueDefault()[i] ) ) {
data.nullIf[i] = Boolean.TRUE;
} else {
data.nullIf[i] = Boolean.FALSE;
}
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
try {
data.nullIf[i] = new BigDecimal( meta.getValueDefault()[i] );
} catch ( Exception e ) {
// Ignore errors
}
break;
default:
// if a default value is given and no conversion is implemented throw an error
if ( meta.getValueDefault()[i] != null && meta.getValueDefault()[i].trim().length() > 0 ) {
throw new RuntimeException( BaseMessages.getString(
PKG, "StreamLookup.Exception.ConversionNotImplemented" )
+ " " + ValueMeta.getTypeDesc( meta.getValueDefaultType()[i] ) );
} else {
// no default value given: just set it to null
data.nullIf[i] = null;
break;
}
}
}
}
private boolean readLookupValues() throws KettleException {
data.infoStream = meta.getStepIOMeta().getInfoStreams().get( 0 );
if ( data.infoStream.getStepMeta() == null ) {
logError( BaseMessages.getString( PKG, "StreamLookup.Log.NoLookupStepSpecified" ) );
return false;
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "StreamLookup.Log.ReadingFromStream" )
+ data.infoStream.getStepname() + "]" );
}
int[] keyNrs = new int[meta.getKeylookup().length];
int[] valueNrs = new int[meta.getValue().length];
boolean firstRun = true;
// Which row set do we read from?
//
RowSet rowSet = findInputRowSet( data.infoStream.getStepname() );
Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from"
while ( rowData != null ) {
if ( log.isRowLevel() ) {
logRowlevel( BaseMessages.getString( PKG, "StreamLookup.Log.ReadLookupRow" )
+ rowSet.getRowMeta().getString( rowData ) );
}
if ( firstRun ) {
firstRun = false;
data.hasLookupRows = true;
data.infoMeta = rowSet.getRowMeta().clone();
RowMetaInterface cacheKeyMeta = new RowMeta();
RowMetaInterface cacheValueMeta = new RowMeta();
// Look up the keys in the source rows
for ( int i = 0; i < meta.getKeylookup().length; i++ ) {
keyNrs[i] = rowSet.getRowMeta().indexOfValue( meta.getKeylookup()[i] );
if ( keyNrs[i] < 0 ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "StreamLookup.Exception.UnableToFindField", meta.getKeylookup()[i] ) );
}
cacheKeyMeta.addValueMeta( rowSet.getRowMeta().getValueMeta( keyNrs[i] ) );
}
// Save the data types of the keys to optionally convert input rows later on...
if ( data.keyTypes == null ) {
data.keyTypes = cacheKeyMeta.clone();
}
// Cache keys are stored as normal types, not binary
for ( int i = 0; i < keyNrs.length; i++ ) {
cacheKeyMeta.getValueMeta( i ).setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
}
for ( int v = 0; v < meta.getValue().length; v++ ) {
valueNrs[v] = rowSet.getRowMeta().indexOfValue( meta.getValue()[v] );
if ( valueNrs[v] < 0 ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "StreamLookup.Exception.UnableToFindField", meta.getValue()[v] ) );
}
cacheValueMeta.addValueMeta( rowSet.getRowMeta().getValueMeta( valueNrs[v] ) );
}
data.cacheKeyMeta = cacheKeyMeta;
data.cacheValueMeta = cacheValueMeta;
}
Object[] keyData = new Object[keyNrs.length];
for ( int i = 0; i < keyNrs.length; i++ ) {
ValueMetaInterface keyMeta = data.keyTypes.getValueMeta( i );
// Convert keys to normal storage type
keyData[i] = keyMeta.convertToNormalStorageType( rowData[keyNrs[i]] );
}
Object[] valueData = new Object[valueNrs.length];
for ( int i = 0; i < valueNrs.length; i++ ) {
// Store value as is, avoid preliminary binary->normal storage type conversion
valueData[i] = rowData[valueNrs[i]];
}
addToCache( data.cacheKeyMeta, keyData, data.cacheValueMeta, valueData );
rowData = getRowFrom( rowSet );
}
return true;
}
private Object[] lookupValues( RowMetaInterface rowMeta, Object[] row ) throws KettleException {
// See if we need to stop.
if ( isStopped() ) {
return null;
}
if ( data.lookupColumnIndex == null ) {
String[] names = data.lookupMeta.getFieldNames();
data.lookupColumnIndex = new int[names.length];
for ( int i = 0; i < names.length; i++ ) {
data.lookupColumnIndex[i] = rowMeta.indexOfValue( names[i] );
if ( data.lookupColumnIndex[i] < 0 ) {
// we should not get here
throw new KettleStepException( "The lookup column '" + names[i] + "' could not be found" );
}
}
}
// Copy value references to lookup table.
//
Object[] lu = new Object[data.keynrs.length];
for ( int i = 0; i < data.keynrs.length; i++ ) {
// If the input is binary storage data, we convert it to normal storage.
//
if ( data.convertKeysToNative[i] ) {
lu[i] = data.lookupMeta.getValueMeta( i ).convertBinaryStringToNativeType( (byte[]) row[data.keynrs[i]] );
} else {
lu[i] = row[data.keynrs[i]];
}
}
// Handle conflicting types (Number-Integer-String conversion to lookup type in hashtable)
if ( data.keyTypes != null ) {
for ( int i = 0; i < data.lookupMeta.size(); i++ ) {
ValueMetaInterface inputValue = data.lookupMeta.getValueMeta( i );
ValueMetaInterface lookupValue = data.keyTypes.getValueMeta( i );
if ( inputValue.getType() != lookupValue.getType() ) {
try {
// Change the input value to match the lookup value
//
lu[i] = lookupValue.convertDataCompatible( inputValue, lu[i] );
} catch ( KettleValueException e ) {
throw new KettleStepException( "Error converting data while looking up value", e );
}
}
}
}
Object[] add = null;
if ( data.hasLookupRows ) {
try {
if ( meta.getKeystream().length > 0 ) {
add = getFromCache( data.cacheKeyMeta, lu );
} else {
// Just take the first element in the hashtable...
throw new KettleStepException( BaseMessages.getString( PKG, "StreamLookup.Log.GotRowWithoutKeys" ) );
}
} catch ( Exception e ) {
throw new KettleStepException( e );
}
}
if ( add == null ) { // nothing was found, unknown code: add the specified default value...
add = data.nullIf;
}
return RowDataUtil.addRowData( row, rowMeta.size(), add );
}
private void addToCache( RowMetaInterface keyMeta, Object[] keyData, RowMetaInterface valueMeta,
Object[] valueData ) throws KettleValueException {
if ( meta.isMemoryPreservationActive() ) {
if ( meta.isUsingSortedList() ) {
KeyValue keyValue = new KeyValue( keyData, valueData );
int idx = Collections.binarySearch( data.list, keyValue, data.comparator );
if ( idx < 0 ) {
int index = -idx - 1; // this is the insertion point
data.list.add( index, keyValue ); // insert to keep sorted.
} else {
data.list.set( idx, keyValue ); // Overwrite to simulate Hashtable behaviour
}
} else {
if ( meta.isUsingIntegerPair() ) {
if ( !data.metadataVerifiedIntegerPair ) {
data.metadataVerifiedIntegerPair = true;
if ( keyMeta.size() != 1
|| valueMeta.size() != 1 || !keyMeta.getValueMeta( 0 ).isInteger()
|| !valueMeta.getValueMeta( 0 ).isInteger() ) {
throw new KettleValueException( BaseMessages.getString(
PKG, "StreamLookup.Exception.CanNotUseIntegerPairAlgorithm" ) );
}
}
Long key = keyMeta.getInteger( keyData, 0 );
Long value = valueMeta.getInteger( valueData, 0 );
data.longIndex.put( key, value );
} else {
if ( data.hashIndex == null ) {
data.hashIndex = new ByteArrayHashIndex( keyMeta );
}
data.hashIndex
.put( RowMeta.extractData( keyMeta, keyData ), RowMeta.extractData( valueMeta, valueData ) );
}
}
} else {
// We can't just put Object[] in the map The compare function is not in it.
// We need to wrap in and use that. Let's use RowMetaAndData for this one.
data.look.put( new RowMetaAndData( keyMeta, keyData ), valueData );
}
}
private Object[] getFromCache( RowMetaInterface keyMeta, Object[] keyData ) throws KettleValueException {
if ( meta.isMemoryPreservationActive() ) {
if ( meta.isUsingSortedList() ) {
KeyValue keyValue = new KeyValue( keyData, null );
int idx = Collections.binarySearch( data.list, keyValue, data.comparator );
if ( idx < 0 ) {
return null; // nothing found
}
keyValue = data.list.get( idx );
return keyValue.getValue();
} else {
if ( meta.isUsingIntegerPair() ) {
Long value = data.longIndex.get( keyMeta.getInteger( keyData, 0 ) );
if ( value == null ) {
return null;
}
return new Object[] { value, };
} else {
try {
byte[] value = data.hashIndex.get( RowMeta.extractData( keyMeta, keyData ) );
if ( value == null ) {
return null;
}
return RowMeta.getRow( data.cacheValueMeta, value );
} catch ( Exception e ) {
logError( "Oops", e );
throw new RuntimeException( e );
}
}
}
} else {
return data.look.get( new RowMetaAndData( keyMeta, keyData ) );
}
}
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (StreamLookupMeta) smi;
data = (StreamLookupData) sdi;
if ( data.readLookupValues ) {
data.readLookupValues = false;
if ( !readLookupValues() ) {
// Read values in lookup table (look)
logError( BaseMessages.getString( PKG, "StreamLookup.Log.UnableToReadDataFromLookupStream" ) );
setErrors( 1 );
stopAll();
return false;
}
return true;
}
Object[] r = getRow(); // Get row from input rowset & set row busy!
if ( r == null ) {
// no more input to be expected...
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "StreamLookup.Log.StoppedProcessingWithEmpty", getLinesRead()
+ "" ) );
}
setOutputDone();
return false;
}
if ( first ) {
first = false;
// read the lookup values!
data.keynrs = new int[meta.getKeystream().length];
data.lookupMeta = new RowMeta();
data.convertKeysToNative = new boolean[meta.getKeystream().length];
for ( int i = 0; i < meta.getKeystream().length; i++ ) {
// Find the keynr in the row (only once)
data.keynrs[i] = getInputRowMeta().indexOfValue( meta.getKeystream()[i] );
if ( data.keynrs[i] < 0 ) {
throw new KettleStepException(
BaseMessages
.getString(
PKG,
"StreamLookup.Log.FieldNotFound", meta.getKeystream()[i], "" + getInputRowMeta().getString( r ) ) );
} else {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "StreamLookup.Log.FieldInfo", meta.getKeystream()[i], "" + data.keynrs[i] ) );
}
}
data.lookupMeta.addValueMeta( getInputRowMeta().getValueMeta( data.keynrs[i] ).clone() );
// If we have binary storage data coming in, we convert it to normal data storage.
// The storage in the lookup data store is also normal data storage. TODO: enforce normal data storage??
//
data.convertKeysToNative[i] = getInputRowMeta().getValueMeta( data.keynrs[i] ).isStorageBinaryString();
}
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(
data.outputRowMeta, getStepname(), new RowMetaInterface[] { data.infoMeta }, null, this, repository,
metaStore );
// Handle the NULL values (not found...)
handleNullIf();
}
Object[] outputRow = lookupValues( getInputRowMeta(), r ); // Do the actual lookup in the hastable.
if ( outputRow == null ) {
setOutputDone(); // signal end to receiver(s)
return false;
}
putRow( data.outputRowMeta, outputRow ); // copy row to output rowset(s);
if ( checkFeedback( getLinesRead() ) ) {
if ( log.isBasic() ) {
logBasic( BaseMessages.getString( PKG, "StreamLookup.Log.LineNumber" ) + getLinesRead() );
}
}
return true;
}
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (StreamLookupMeta) smi;
data = (StreamLookupData) sdi;
if ( super.init( smi, sdi ) ) {
data.readLookupValues = true;
return true;
}
return false;
}
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) {
// Recover memory immediately, allow in-memory data to be garbage collected
//
data.look = null;
data.list = null;
data.hashIndex = null;
data.longIndex = null;
super.dispose( smi, sdi );
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model;
import java.io.Serializable;
/**
* <p>
* Describes the settings for a configuration set.
* </p>
*/
public class CreateConfigurationTemplateResult implements Serializable,
Cloneable {
/**
* <p>
* The name of the solution stack this configuration set uses.
* </p>
*/
private String solutionStackName;
/**
* <p>
* The name of the application associated with this configuration set.
* </p>
*/
private String applicationName;
/**
* <p>
* If not <code>null</code>, the name of the configuration template for this
* configuration set.
* </p>
*/
private String templateName;
/**
* <p>
* Describes this configuration set.
* </p>
*/
private String description;
/**
* <p>
* If not <code>null</code>, the name of the environment for this
* configuration set.
* </p>
*/
private String environmentName;
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*/
private String deploymentStatus;
/**
* <p>
* The date (in UTC time) when this configuration set was created.
* </p>
*/
private java.util.Date dateCreated;
/**
* <p>
* The date (in UTC time) when this configuration set was last modified.
* </p>
*/
private java.util.Date dateUpdated;
/**
* <p>
* A list of the configuration options and their values in this
* configuration set.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ConfigurationOptionSetting> optionSettings;
/**
* <p>
* The name of the solution stack this configuration set uses.
* </p>
*
* @param solutionStackName
* The name of the solution stack this configuration set uses.
*/
public void setSolutionStackName(String solutionStackName) {
this.solutionStackName = solutionStackName;
}
/**
* <p>
* The name of the solution stack this configuration set uses.
* </p>
*
* @return The name of the solution stack this configuration set uses.
*/
public String getSolutionStackName() {
return this.solutionStackName;
}
/**
* <p>
* The name of the solution stack this configuration set uses.
* </p>
*
* @param solutionStackName
* The name of the solution stack this configuration set uses.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withSolutionStackName(
String solutionStackName) {
setSolutionStackName(solutionStackName);
return this;
}
/**
* <p>
* The name of the application associated with this configuration set.
* </p>
*
* @param applicationName
* The name of the application associated with this configuration
* set.
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
* <p>
* The name of the application associated with this configuration set.
* </p>
*
* @return The name of the application associated with this configuration
* set.
*/
public String getApplicationName() {
return this.applicationName;
}
/**
* <p>
* The name of the application associated with this configuration set.
* </p>
*
* @param applicationName
* The name of the application associated with this configuration
* set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withApplicationName(
String applicationName) {
setApplicationName(applicationName);
return this;
}
/**
* <p>
* If not <code>null</code>, the name of the configuration template for this
* configuration set.
* </p>
*
* @param templateName
* If not <code>null</code>, the name of the configuration template
* for this configuration set.
*/
public void setTemplateName(String templateName) {
this.templateName = templateName;
}
/**
* <p>
* If not <code>null</code>, the name of the configuration template for this
* configuration set.
* </p>
*
* @return If not <code>null</code>, the name of the configuration template
* for this configuration set.
*/
public String getTemplateName() {
return this.templateName;
}
/**
* <p>
* If not <code>null</code>, the name of the configuration template for this
* configuration set.
* </p>
*
* @param templateName
* If not <code>null</code>, the name of the configuration template
* for this configuration set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withTemplateName(
String templateName) {
setTemplateName(templateName);
return this;
}
/**
* <p>
* Describes this configuration set.
* </p>
*
* @param description
* Describes this configuration set.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* Describes this configuration set.
* </p>
*
* @return Describes this configuration set.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* Describes this configuration set.
* </p>
*
* @param description
* Describes this configuration set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* If not <code>null</code>, the name of the environment for this
* configuration set.
* </p>
*
* @param environmentName
* If not <code>null</code>, the name of the environment for this
* configuration set.
*/
public void setEnvironmentName(String environmentName) {
this.environmentName = environmentName;
}
/**
* <p>
* If not <code>null</code>, the name of the environment for this
* configuration set.
* </p>
*
* @return If not <code>null</code>, the name of the environment for this
* configuration set.
*/
public String getEnvironmentName() {
return this.environmentName;
}
/**
* <p>
* If not <code>null</code>, the name of the environment for this
* configuration set.
* </p>
*
* @param environmentName
* If not <code>null</code>, the name of the environment for this
* configuration set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withEnvironmentName(
String environmentName) {
setEnvironmentName(environmentName);
return this;
}
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*
* @param deploymentStatus
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment
* status of this configuration set: </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is
* not deployed to the associated environment but is in the process
* of deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is
* currently deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed
* to successfully deploy.</li>
* @see ConfigurationDeploymentStatus
*/
public void setDeploymentStatus(String deploymentStatus) {
this.deploymentStatus = deploymentStatus;
}
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*
* @return If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment
* status of this configuration set: </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with
* a running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is
* not deployed to the associated environment but is in the process
* of deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is
* currently deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that
* failed to successfully deploy.</li>
* @see ConfigurationDeploymentStatus
*/
public String getDeploymentStatus() {
return this.deploymentStatus;
}
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*
* @param deploymentStatus
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment
* status of this configuration set: </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is
* not deployed to the associated environment but is in the process
* of deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is
* currently deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed
* to successfully deploy.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ConfigurationDeploymentStatus
*/
public CreateConfigurationTemplateResult withDeploymentStatus(
String deploymentStatus) {
setDeploymentStatus(deploymentStatus);
return this;
}
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*
* @param deploymentStatus
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment
* status of this configuration set: </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is
* not deployed to the associated environment but is in the process
* of deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is
* currently deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed
* to successfully deploy.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ConfigurationDeploymentStatus
*/
public void setDeploymentStatus(
ConfigurationDeploymentStatus deploymentStatus) {
this.deploymentStatus = deploymentStatus.toString();
}
/**
* <p>
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment status
* of this configuration set:
* </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is not
* deployed to the associated environment but is in the process of
* deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is currently
* deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed to
* successfully deploy.</li>
* </ul>
*
* @param deploymentStatus
* If this configuration set is associated with an environment, the
* <code>DeploymentStatus</code> parameter indicates the deployment
* status of this configuration set: </p>
* <ul>
* <li> <code>null</code>: This configuration is not associated with a
* running environment.</li>
* <li> <code>pending</code>: This is a draft configuration that is
* not deployed to the associated environment but is in the process
* of deploying.</li>
* <li> <code>deployed</code>: This is the configuration that is
* currently deployed to the associated running environment.</li>
* <li> <code>failed</code>: This is a draft configuration that failed
* to successfully deploy.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ConfigurationDeploymentStatus
*/
public CreateConfigurationTemplateResult withDeploymentStatus(
ConfigurationDeploymentStatus deploymentStatus) {
setDeploymentStatus(deploymentStatus);
return this;
}
/**
* <p>
* The date (in UTC time) when this configuration set was created.
* </p>
*
* @param dateCreated
* The date (in UTC time) when this configuration set was created.
*/
public void setDateCreated(java.util.Date dateCreated) {
this.dateCreated = dateCreated;
}
/**
* <p>
* The date (in UTC time) when this configuration set was created.
* </p>
*
* @return The date (in UTC time) when this configuration set was created.
*/
public java.util.Date getDateCreated() {
return this.dateCreated;
}
/**
* <p>
* The date (in UTC time) when this configuration set was created.
* </p>
*
* @param dateCreated
* The date (in UTC time) when this configuration set was created.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withDateCreated(
java.util.Date dateCreated) {
setDateCreated(dateCreated);
return this;
}
/**
* <p>
* The date (in UTC time) when this configuration set was last modified.
* </p>
*
* @param dateUpdated
* The date (in UTC time) when this configuration set was last
* modified.
*/
public void setDateUpdated(java.util.Date dateUpdated) {
this.dateUpdated = dateUpdated;
}
/**
* <p>
* The date (in UTC time) when this configuration set was last modified.
* </p>
*
* @return The date (in UTC time) when this configuration set was last
* modified.
*/
public java.util.Date getDateUpdated() {
return this.dateUpdated;
}
/**
* <p>
* The date (in UTC time) when this configuration set was last modified.
* </p>
*
* @param dateUpdated
* The date (in UTC time) when this configuration set was last
* modified.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withDateUpdated(
java.util.Date dateUpdated) {
setDateUpdated(dateUpdated);
return this;
}
/**
* <p>
* A list of the configuration options and their values in this
* configuration set.
* </p>
*
* @return A list of the configuration options and their values in this
* configuration set.
*/
public java.util.List<ConfigurationOptionSetting> getOptionSettings() {
if (optionSettings == null) {
optionSettings = new com.amazonaws.internal.SdkInternalList<ConfigurationOptionSetting>();
}
return optionSettings;
}
/**
* <p>
* A list of the configuration options and their values in this
* configuration set.
* </p>
*
* @param optionSettings
* A list of the configuration options and their values in this
* configuration set.
*/
public void setOptionSettings(
java.util.Collection<ConfigurationOptionSetting> optionSettings) {
if (optionSettings == null) {
this.optionSettings = null;
return;
}
this.optionSettings = new com.amazonaws.internal.SdkInternalList<ConfigurationOptionSetting>(
optionSettings);
}
/**
* <p>
* A list of the configuration options and their values in this
* configuration set.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setOptionSettings(java.util.Collection)} or
* {@link #withOptionSettings(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param optionSettings
* A list of the configuration options and their values in this
* configuration set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withOptionSettings(
ConfigurationOptionSetting... optionSettings) {
if (this.optionSettings == null) {
setOptionSettings(new com.amazonaws.internal.SdkInternalList<ConfigurationOptionSetting>(
optionSettings.length));
}
for (ConfigurationOptionSetting ele : optionSettings) {
this.optionSettings.add(ele);
}
return this;
}
/**
* <p>
* A list of the configuration options and their values in this
* configuration set.
* </p>
*
* @param optionSettings
* A list of the configuration options and their values in this
* configuration set.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateConfigurationTemplateResult withOptionSettings(
java.util.Collection<ConfigurationOptionSetting> optionSettings) {
setOptionSettings(optionSettings);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSolutionStackName() != null)
sb.append("SolutionStackName: " + getSolutionStackName() + ",");
if (getApplicationName() != null)
sb.append("ApplicationName: " + getApplicationName() + ",");
if (getTemplateName() != null)
sb.append("TemplateName: " + getTemplateName() + ",");
if (getDescription() != null)
sb.append("Description: " + getDescription() + ",");
if (getEnvironmentName() != null)
sb.append("EnvironmentName: " + getEnvironmentName() + ",");
if (getDeploymentStatus() != null)
sb.append("DeploymentStatus: " + getDeploymentStatus() + ",");
if (getDateCreated() != null)
sb.append("DateCreated: " + getDateCreated() + ",");
if (getDateUpdated() != null)
sb.append("DateUpdated: " + getDateUpdated() + ",");
if (getOptionSettings() != null)
sb.append("OptionSettings: " + getOptionSettings());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateConfigurationTemplateResult == false)
return false;
CreateConfigurationTemplateResult other = (CreateConfigurationTemplateResult) obj;
if (other.getSolutionStackName() == null
^ this.getSolutionStackName() == null)
return false;
if (other.getSolutionStackName() != null
&& other.getSolutionStackName().equals(
this.getSolutionStackName()) == false)
return false;
if (other.getApplicationName() == null
^ this.getApplicationName() == null)
return false;
if (other.getApplicationName() != null
&& other.getApplicationName().equals(this.getApplicationName()) == false)
return false;
if (other.getTemplateName() == null ^ this.getTemplateName() == null)
return false;
if (other.getTemplateName() != null
&& other.getTemplateName().equals(this.getTemplateName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null
&& other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getEnvironmentName() == null
^ this.getEnvironmentName() == null)
return false;
if (other.getEnvironmentName() != null
&& other.getEnvironmentName().equals(this.getEnvironmentName()) == false)
return false;
if (other.getDeploymentStatus() == null
^ this.getDeploymentStatus() == null)
return false;
if (other.getDeploymentStatus() != null
&& other.getDeploymentStatus().equals(
this.getDeploymentStatus()) == false)
return false;
if (other.getDateCreated() == null ^ this.getDateCreated() == null)
return false;
if (other.getDateCreated() != null
&& other.getDateCreated().equals(this.getDateCreated()) == false)
return false;
if (other.getDateUpdated() == null ^ this.getDateUpdated() == null)
return false;
if (other.getDateUpdated() != null
&& other.getDateUpdated().equals(this.getDateUpdated()) == false)
return false;
if (other.getOptionSettings() == null
^ this.getOptionSettings() == null)
return false;
if (other.getOptionSettings() != null
&& other.getOptionSettings().equals(this.getOptionSettings()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getSolutionStackName() == null) ? 0
: getSolutionStackName().hashCode());
hashCode = prime
* hashCode
+ ((getApplicationName() == null) ? 0 : getApplicationName()
.hashCode());
hashCode = prime
* hashCode
+ ((getTemplateName() == null) ? 0 : getTemplateName()
.hashCode());
hashCode = prime
* hashCode
+ ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime
* hashCode
+ ((getEnvironmentName() == null) ? 0 : getEnvironmentName()
.hashCode());
hashCode = prime
* hashCode
+ ((getDeploymentStatus() == null) ? 0 : getDeploymentStatus()
.hashCode());
hashCode = prime
* hashCode
+ ((getDateCreated() == null) ? 0 : getDateCreated().hashCode());
hashCode = prime
* hashCode
+ ((getDateUpdated() == null) ? 0 : getDateUpdated().hashCode());
hashCode = prime
* hashCode
+ ((getOptionSettings() == null) ? 0 : getOptionSettings()
.hashCode());
return hashCode;
}
@Override
public CreateConfigurationTemplateResult clone() {
try {
return (CreateConfigurationTemplateResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 2014 Haixing Hu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.github.haixing_hu.util.expand;
import java.lang.reflect.Array;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
import com.github.haixing_hu.CommonsConfig;
import com.github.haixing_hu.config.Config;
/**
* The {@link ExpansionPolicy} is used to calculate the capacity while expanding
* the dynamic arrays.
* <p>
* The implementation MUST be thread-safe.
* </p>
*
* @author Haixing Hu
*/
@ThreadSafe
public abstract class ExpansionPolicy {
/**
* The value of this property is the class name of the implementation of the
* default {@link ExpansionPolicy}.
* <table border="1">
* <tr>
* <th>Type</th>
* <th>Count</th>
* <th>Value</th>
* <th>Required</th>
* <th>Default</th>
* <th>Range</th>
* </tr>
* <tr>
* <td>class</td>
* <td>1</td>
* <td>the class name of the implementation of the default
* {@link ExpansionPolicy}.</td>
* <td>no</td>
* <td>{@link MemorySavingExpansionPolicy#INSTANCE}</td>
* <td></td>
* </tr>
* </table>
*/
public static final String PROPERTY_DEFAULT = "com.github.haixing_hu.util.expand.ExpansionPolicy.default";
/**
* The value of this property is the initial capacity used by the
* {@link ExpansionPolicy}.
* <table border="1">
* <tr>
* <th>Type</th>
* <th>Count</th>
* <th>Value</th>
* <th>Required</th>
* <th>Default</th>
* <th>Range</th>
* </tr>
* <tr>
* <td>int</td>
* <td>1</td>
* <td>the initial capacity used by the {@link ExpansionPolicy}.</td>
* <td>no</td>
* <td>{@link DEFAULT_INITIAL_CAPACITY}</td>
* <td>[1, {@link Integer#MAX_VALUE}]</td>
* </tr>
* </table>
*
* @see #DEFAULT_INITIAL_CAPACITY
*/
public static final String PROPERTY_INITIAL_CAPACITY = "com.github.haixing_hu.util.expand.ExpansionPolicy.initialCapacity";
/**
* The default value of the property {@link PROPERTY_INITIAL_CAPACITY}.
*
* @see PROPERTY_INITIAL_CAPACITY
*/
public static final int DEFAULT_INITIAL_CAPACITY = 16;
private static final String INVALID_IMPLEMENTATION =
"Invalid implementation of the abstract method.";
private static volatile ExpansionPolicy defaultPolicy = null;
/**
* Gets the default {@link ExpansionPoicy}.
*
* @return the default {@link ExpansionPoicy}.
*/
public static ExpansionPolicy getDefault() {
if (defaultPolicy == null) {
synchronized (ExpansionPolicy.class) {
if (defaultPolicy == null) {
final Config config = CommonsConfig.get();
defaultPolicy = config.getInstance(PROPERTY_DEFAULT,
MemorySavingExpansionPolicy.INSTANCE);
}
}
}
return defaultPolicy;
}
private static volatile int initialCapacity = - 1;
/**
* Gets the suggested initial capacity.
*
* @return the suggested initial capacity.
*/
public static int getInitialCapacity() {
if (initialCapacity < 0) {
synchronized (ExpansionPolicy.class) {
if (initialCapacity < 0) {
final Config config = CommonsConfig.get();
initialCapacity = config.getInt(PROPERTY_INITIAL_CAPACITY,
DEFAULT_INITIAL_CAPACITY);
}
}
}
return initialCapacity;
}
public abstract int getNextCapacity(int oldCapacity, int newLength);
public abstract boolean needShrink(int length, int capacity);
public abstract int getShrinkCapacity(int length, int capacity);
public final boolean[] expand(@Nullable final boolean[] oldBuffer,
final int oldLength, final int newLength) {
return (boolean[]) doExpand(oldBuffer, oldLength, newLength, Boolean.TYPE);
}
public final boolean[] shrink(@Nullable final boolean[] oldBuffer,
final int oldLength) {
return (boolean[]) doShrink(oldBuffer, oldLength, Boolean.TYPE);
}
public final boolean[] resize(@Nullable final boolean[] oldBuffer,
final int oldLength, final int newLength) {
return (boolean[]) doResize(oldBuffer, oldLength, newLength, Boolean.TYPE);
}
public final char[] expand(@Nullable final char[] oldBuffer,
final int oldLength, final int newLength) {
return (char[]) doExpand(oldBuffer, oldLength, newLength, Character.TYPE);
}
public final char[] shrink(@Nullable final char[] oldBuffer,
final int oldLength) {
return (char[]) doShrink(oldBuffer, oldLength, Character.TYPE);
}
public final char[] resize(@Nullable final char[] oldBuffer,
final int oldLength, final int newLength) {
return (char[]) doResize(oldBuffer, oldLength, newLength, Character.TYPE);
}
public final byte[] expand(@Nullable final byte[] oldBuffer,
final int oldLength, final int newLength) {
return (byte[]) doExpand(oldBuffer, oldLength, newLength, Byte.TYPE);
}
public final byte[] shrink(@Nullable final byte[] oldBuffer,
final int oldLength) {
return (byte[]) doShrink(oldBuffer, oldLength, Byte.TYPE);
}
public final byte[] resize(@Nullable final byte[] oldBuffer,
final int oldLength, final int newLength) {
return (byte[]) doResize(oldBuffer, oldLength, newLength, Byte.TYPE);
}
public final short[] expand(@Nullable final short[] oldBuffer,
final int oldLength, final int newLength) {
return (short[]) doExpand(oldBuffer, oldLength, newLength, Short.TYPE);
}
public final short[] shrink(@Nullable final short[] oldBuffer,
final int oldLength) {
return (short[]) doShrink(oldBuffer, oldLength, Short.TYPE);
}
public final short[] resize(@Nullable final short[] oldBuffer,
final int oldLength, final int newLength) {
return (short[]) doResize(oldBuffer, oldLength, newLength, Short.TYPE);
}
public final int[] expand(@Nullable final int[] oldBuffer,
final int oldLength, final int newLength) {
return (int[]) doExpand(oldBuffer, oldLength, newLength, Integer.TYPE);
}
public final int[] shrink(@Nullable final int[] oldBuffer, final int oldLength) {
return (int[]) doShrink(oldBuffer, oldLength, Integer.TYPE);
}
public final int[] resize(@Nullable final int[] oldBuffer,
final int oldLength, final int newLength) {
return (int[]) doResize(oldBuffer, oldLength, newLength, Integer.TYPE);
}
public final long[] expand(@Nullable final long[] oldBuffer,
final int oldLength, final int newLength) {
return (long[]) doExpand(oldBuffer, oldLength, newLength, Long.TYPE);
}
public final long[] shrink(@Nullable final long[] oldBuffer,
final int oldLength) {
return (long[]) doShrink(oldBuffer, oldLength, Long.TYPE);
}
public final long[] resize(@Nullable final long[] oldBuffer,
final int oldLength, final int newLength) {
return (long[]) doResize(oldBuffer, oldLength, newLength, Long.TYPE);
}
public final float[] expand(@Nullable final float[] oldBuffer,
final int oldLength, final int newLength) {
return (float[]) doExpand(oldBuffer, oldLength, newLength, Float.TYPE);
}
public final float[] shrink(@Nullable final float[] oldBuffer,
final int oldLength) {
return (float[]) doShrink(oldBuffer, oldLength, Float.TYPE);
}
public final float[] resize(@Nullable final float[] oldBuffer,
final int oldLength, final int newLength) {
return (float[]) doResize(oldBuffer, oldLength, newLength, Float.TYPE);
}
public final double[] expand(@Nullable final double[] oldBuffer,
final int oldLength, final int newLength) {
return (double[]) doExpand(oldBuffer, oldLength, newLength, Double.TYPE);
}
public final double[] shrink(@Nullable final double[] oldBuffer,
final int oldLength) {
return (double[]) doShrink(oldBuffer, oldLength, Double.TYPE);
}
public final double[] resize(@Nullable final double[] oldBuffer,
final int oldLength, final int newLength) {
return (double[]) doResize(oldBuffer, oldLength, newLength, Double.TYPE);
}
@SuppressWarnings("unchecked")
public final <T> T[] expand(@Nullable final T[] oldBuffer,
final int oldLength, final int newLength, final Class<T> valueClass) {
return (T[]) doExpand(oldBuffer, oldLength, newLength, valueClass);
}
@SuppressWarnings("unchecked")
public final <T> T[] shrink(@Nullable final T[] oldBuffer,
final int oldLength, final Class<T> valueClass) {
return (T[]) doShrink(oldBuffer, oldLength, valueClass);
}
@SuppressWarnings("unchecked")
public final <T> T[] resize(@Nullable final T[] oldBuffer,
final int oldLength, final int newLength, final Class<T> valueClass) {
return (T[]) doResize(oldBuffer, oldLength, newLength, valueClass);
}
private final Object doExpand(@Nullable final Object oldBuffer,
final int oldLength, final int newLength, final Class<?> valueClass) {
if ((oldLength < 0) || (newLength < 0)) {
throw new IndexOutOfBoundsException();
}
if (oldBuffer == null) {
return Array.newInstance(valueClass, newLength);
}
final int oldCapacity = Array.getLength(oldBuffer);
if (newLength <= oldCapacity) {
return oldBuffer; // don't need to expand
}
final int newCapacity = getNextCapacity(oldCapacity, newLength);
if (newCapacity < newLength) {
throw new RuntimeException(INVALID_IMPLEMENTATION);
}
final Object newBuffer = Array.newInstance(valueClass, newCapacity);
if (oldLength > 0) {
System.arraycopy(oldBuffer, 0, newBuffer, 0, oldLength);
}
return newBuffer;
}
private final Object doShrink(@Nullable final Object oldBuffer,
final int oldLength, final Class<?> valueClass) {
if (oldLength < 0) {
throw new IndexOutOfBoundsException();
}
if (oldBuffer == null) {
return null;
}
final int oldCapacity = Array.getLength(oldBuffer);
final int newCapacity = getShrinkCapacity(oldLength, oldCapacity);
if (newCapacity == oldCapacity) {
return oldBuffer;
}
if ((newCapacity < oldLength) || (newCapacity > oldCapacity)) {
throw new RuntimeException(INVALID_IMPLEMENTATION);
}
final Object newBuffer = Array.newInstance(valueClass, newCapacity);
if (oldLength > 0) {
System.arraycopy(oldBuffer, 0, newBuffer, 0, oldLength);
}
return newBuffer;
}
private final Object doResize(@Nullable final Object oldBuffer,
final int oldLength, final int newLength, final Class<?> valueClass) {
if ((oldLength < 0) || (newLength < 0)) {
throw new IndexOutOfBoundsException();
}
if (oldBuffer == null) {
return Array.newInstance(valueClass, newLength);
}
final int oldCapacity = Array.getLength(oldBuffer);
if (newLength == oldCapacity) {
return oldBuffer;
} else {
final Object newBuffer = Array.newInstance(valueClass, newLength);
final int n = (oldLength < newLength ? oldLength : newLength);
if (n > 0) {
System.arraycopy(oldBuffer, 0, newBuffer, 0, n);
}
return newBuffer;
}
}
}
| |
package nl.rug.jbi.jsm.util;
import au.com.bytecode.opencsv.CSVWriter;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.collect.*;
import nl.rug.jbi.jsm.core.calculator.MetricScope;
import java.io.Closeable;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
/**
* Multi-file exporter for the metric results. Using the CSV-writer from the OpenCSV project, it will create a CSV file
* for each Metric->ResultList association. Mappings between the various Metrics, Scopes and their result files will be
* put in the unique file with identifier "Mapping".
*
* @author David van Leusen
* @see <a href="https://code.google.com/p/opencsv/">OpenCSV Project</a>
* @since 2014-05-28
*/
public class ResultsExporter implements Closeable {
private final static int DOUBLE_PRINT_PRECISION = 4;
private final CSVWriter mappingWriter;
private final String mappingFileName;
private final File container;
private final String namePattern;
private final NumberFormat nf;
/**
* Creates a new ResultsExporter that creates files using the given fileNamePattern. This pattern should include
* the replacement token '%s' so the unique identifiers can be inserted. This token will be replaced by 'mapping' to
* create the file containing the metrics to file mappings.
*
* @param fileNamePattern String pattern indicating the location results should be output to.
* @throws IOException If the underlying CSVWriter throws an IOException or the mappings file
* already exists.
* @throws java.lang.IllegalArgumentException If the given fileNamePattern doesn't contain '%s'
*/
public ResultsExporter(final String fileNamePattern) throws IOException {
checkArgument(fileNamePattern != null);
assert fileNamePattern != null;
final File tmpFile = new File(fileNamePattern);
this.container = tmpFile.getParentFile();
this.namePattern = tmpFile.getName();
checkArgument(this.namePattern.contains("%s"), "Name has to contain '%s' for identifier replacing.");
this.container.mkdirs();
final File mappingFile = getFileForName("Mapping", true); //Throw exception if this file already exists.
this.mappingFileName = mappingFile.getAbsolutePath();
this.mappingWriter = new CSVWriter(new FileWriter(mappingFile));
this.mappingWriter.writeNext(new String[]{"Metric Identifier", "Scope", "Output File"});
this.nf = NumberFormat.getNumberInstance(Locale.US);
//Set double precision
this.nf.setMinimumFractionDigits(0);
this.nf.setMaximumFractionDigits(DOUBLE_PRINT_PRECISION);
}
private static String printObject(final Object o, final NumberFormat nf) {
if (o instanceof Number) {
return nf.format(((Number) o).doubleValue());
} else {
return o != null ? o.toString() : "null";
}
}
private File getFileForName(final String identifier, final boolean strict) throws IOException {
final String fileName = String.format(namePattern, identifier.toLowerCase());
File selectedFile = new File(container, fileName);
if (selectedFile.exists() && strict) {
throw new IOException("File already exists: " + selectedFile);
} else {
int counter = 0;
while (selectedFile.exists()) {
selectedFile = new File(container, String.format("%s.%d", fileName, ++counter));
}
}
return selectedFile;
}
/**
* Exports a data set, creates a file to contain the results, writes the filename to the mapping file and then
* write all the data from the given map to that file.
*
* @param metricClass Metric Class that is getting exported.
* @param scope Scope of the results getting exported.
* @param results Map with results for the given metric and scope.
* @throws IOException If it gets thrown by the underlying CSVWriter
*/
public void exportData(final Class metricClass, final MetricScope scope, final Map<String, Object> results)
throws IOException {
checkArgument(metricClass != null);
assert metricClass != null;
checkArgument(scope != null);
assert scope != null;
checkArgument(results != null);
assert results != null;
final File metricOutput = getFileForName(
String.format("%s.%s", metricClass.getSimpleName(), scope),
false
);
//Output mapping
this.mappingWriter.writeNext(new String[]{metricClass.getName(), scope.toString(), metricOutput.toString()});
this.mappingWriter.flush();
//Write results to file.
final CSVWriter writer = new CSVWriter(new FileWriter(metricOutput));
try {
writer.writeNext(new String[]{"Identifier", "Result"});
//Map each identifier->result pair to an entry in the CSV file.
writer.writeAll(
FluentIterable.from(results.entrySet())
.transform(new Function<Map.Entry<String, Object>, String[]>() {
@Override
public String[] apply(Map.Entry<String, Object> entry) {
return new String[]{
entry.getKey(),
printObject(entry.getValue(), ResultsExporter.this.nf)
};
}
})
.toList()
);
} finally {
try {
writer.close();
} catch (IOException ignored) {
//Prevent finally block from overriding exceptions
}
}
}
/**
* Exports a data set collection, creates a file to contain the results, writes the filename to the mapping file
* and then write all the data from the given table to that file.
*
* @param scope Scope of the results getting exported.
* @param results Map with results for the given scope.
* @throws IOException If it gets thrown by the underlying CSVWriter
*/
public void exportDataCollection(final MetricScope scope, final Table<String, Class, Object> results)
throws IOException {
checkArgument(scope != null);
assert scope != null;
checkArgument(results != null);
assert results != null;
final File metricOutput = getFileForName(scope.toString(), false);
//Output mappings
final String outputFileName = metricOutput.toString();
for (final Class c : results.columnKeySet()) {
this.mappingWriter.writeNext(new String[]{c.getName(), scope.toString(), outputFileName});
}
this.mappingWriter.flush();
//Write results
final CSVWriter writer = new CSVWriter(new FileWriter(metricOutput));
try {
//Output headers
final List<String> headers = Lists.newLinkedList();
headers.add("Identifier");
headers.addAll(Collections2.transform(results.columnKeySet(), new Function<Class, String>() {
@Override
public String apply(Class aClass) {
return aClass.getSimpleName();
}
}));
writer.writeNext(headers.toArray(new String[headers.size()]));
//Output data
final ImmutableList<String[]> mappedDataSet = FluentIterable.from(results.rowMap().entrySet())
.transform(new Function<Map.Entry<String, Map<Class, Object>>, String[]>() {
@Override
public String[] apply(Map.Entry<String, Map<Class, Object>> entry) {
final String[] ret = new String[1 + entry.getValue().size()];
ret[0] = entry.getKey();
final Iterator<Object> it = entry.getValue().values().iterator();
int count = 1;
while (it.hasNext()) {
ret[count++] = printObject(it.next(), ResultsExporter.this.nf);
}
return ret;
}
})
.toList();
writer.writeAll(mappedDataSet);
} finally {
try {
writer.close();
} catch (IOException ignored) {
//Prevent finally block from overriding exceptions
}
}
}
/**
* @return Relative location of the 'mapping' file
*/
public String getMappingFileName() {
return this.mappingFileName;
}
@Override
public void close() throws IOException {
this.mappingWriter.close();
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("container", container)
.add("namePattern", namePattern)
.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.auth;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import com.google.common.base.*;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.statements.SelectStatement;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.mindrot.jbcrypt.BCrypt;
/**
* Responsible for the creation, maintenance and deletion of roles
* for the purposes of authentication and authorization.
* Role data is stored internally, using the roles and role_members tables
* in the system_auth keyspace.
*
* Additionally, if org.apache.cassandra.auth.PasswordAuthenticator is used,
* encrypted passwords are also stored in the system_auth.roles table. This
* coupling between the IAuthenticator and IRoleManager implementations exists
* because setting a role's password via CQL is done with a CREATE ROLE or
* ALTER ROLE statement, the processing of which is handled by IRoleManager.
* As IAuthenticator is concerned only with credentials checking and has no
* means to modify passwords, PasswordAuthenticator depends on
* CassandraRoleManager for those functions.
*
* Alternative IAuthenticator implementations may be used in conjunction with
* CassandraRoleManager, but WITH PASSWORD = 'password' will not be supported
* in CREATE/ALTER ROLE statements.
*
* Such a configuration could be implemented using a custom IRoleManager that
* extends CassandraRoleManager and which includes Option.PASSWORD in the Set<Option>
* returned from supportedOptions/alterableOptions. Any additional processing
* of the password itself (such as storing it in an alternative location) would
* be added in overridden createRole and alterRole implementations.
*/
public class CassandraRoleManager implements IRoleManager
{
private static final Logger logger = LoggerFactory.getLogger(CassandraRoleManager.class);
static final String DEFAULT_SUPERUSER_NAME = "cassandra";
static final String DEFAULT_SUPERUSER_PASSWORD = "cassandra";
// Transform a row in the AuthKeyspace.ROLES to a Role instance
private static final Function<UntypedResultSet.Row, Role> ROW_TO_ROLE = new Function<UntypedResultSet.Row, Role>()
{
public Role apply(UntypedResultSet.Row row)
{
return new Role(row.getString("role"),
row.getBoolean("is_superuser"),
row.getBoolean("can_login"),
row.has("member_of") ? row.getSet("member_of", UTF8Type.instance)
: Collections.<String>emptySet());
}
};
public static final String LEGACY_USERS_TABLE = "users";
// Transform a row in the legacy system_auth.users table to a Role instance,
// used to fallback to previous schema on a mixed cluster during an upgrade
private static final Function<UntypedResultSet.Row, Role> LEGACY_ROW_TO_ROLE = new Function<UntypedResultSet.Row, Role>()
{
public Role apply(UntypedResultSet.Row row)
{
return new Role(row.getString("name"),
row.getBoolean("super"),
true,
Collections.<String>emptySet());
}
};
// 2 ** GENSALT_LOG2_ROUNDS rounds of hashing will be performed.
private static final String GENSALT_LOG2_ROUNDS_PROPERTY = Config.PROPERTY_PREFIX + "auth_bcrypt_gensalt_log2_rounds";
private static final int GENSALT_LOG2_ROUNDS = getGensaltLogRounds();
static int getGensaltLogRounds()
{
int rounds = Integer.getInteger(GENSALT_LOG2_ROUNDS_PROPERTY, 10);
if (rounds < 4 || rounds > 31)
throw new ConfigurationException(String.format("Bad value for system property -D%s." +
"Please use a value between 4 and 31 inclusively",
GENSALT_LOG2_ROUNDS_PROPERTY));
return rounds;
}
// NullObject returned when a supplied role name not found in AuthKeyspace.ROLES
private static final Role NULL_ROLE = new Role(null, false, false, Collections.<String>emptySet());
private SelectStatement loadRoleStatement;
private SelectStatement legacySelectUserStatement;
private final Set<Option> supportedOptions;
private final Set<Option> alterableOptions;
// Will be set to true when all nodes in the cluster are on a version which supports roles (i.e. 2.2+)
private volatile boolean isClusterReady = false;
public CassandraRoleManager()
{
supportedOptions = DatabaseDescriptor.getAuthenticator().getClass() == PasswordAuthenticator.class
? ImmutableSet.of(Option.LOGIN, Option.SUPERUSER, Option.PASSWORD)
: ImmutableSet.of(Option.LOGIN, Option.SUPERUSER);
alterableOptions = DatabaseDescriptor.getAuthenticator().getClass().equals(PasswordAuthenticator.class)
? ImmutableSet.of(Option.PASSWORD)
: ImmutableSet.<Option>of();
}
public void setup()
{
loadRoleStatement = (SelectStatement) prepare("SELECT * from %s.%s WHERE role = ?",
AuthKeyspace.NAME,
AuthKeyspace.ROLES);
// If the old users table exists, we may need to migrate the legacy authn
// data to the new table. We also need to prepare a statement to read from
// it, so we can continue to use the old tables while the cluster is upgraded.
// Otherwise, we may need to create a default superuser role to enable others
// to be added.
if (Schema.instance.getCFMetaData(AuthKeyspace.NAME, "users") != null)
{
legacySelectUserStatement = (SelectStatement) prepare("SELECT * FROM %s.%s WHERE name = ?",
AuthKeyspace.NAME,
LEGACY_USERS_TABLE);
scheduleSetupTask(() -> {
convertLegacyData();
return null;
});
}
else
{
scheduleSetupTask(() -> {
setupDefaultRole();
return null;
});
}
}
public Set<Option> supportedOptions()
{
return supportedOptions;
}
public Set<Option> alterableOptions()
{
return alterableOptions;
}
public void createRole(AuthenticatedUser performer, RoleResource role, RoleOptions options)
throws RequestValidationException, RequestExecutionException
{
String insertCql = options.getPassword().isPresent()
? String.format("INSERT INTO %s.%s (role, is_superuser, can_login, salted_hash) VALUES ('%s', %s, %s, '%s')",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
escape(role.getRoleName()),
options.getSuperuser().or(false),
options.getLogin().or(false),
escape(hashpw(options.getPassword().get())))
: String.format("INSERT INTO %s.%s (role, is_superuser, can_login) VALUES ('%s', %s, %s)",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
escape(role.getRoleName()),
options.getSuperuser().or(false),
options.getLogin().or(false));
process(insertCql, consistencyForRole(role.getRoleName()));
}
public void dropRole(AuthenticatedUser performer, RoleResource role) throws RequestValidationException, RequestExecutionException
{
process(String.format("DELETE FROM %s.%s WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
escape(role.getRoleName())),
consistencyForRole(role.getRoleName()));
removeAllMembers(role.getRoleName());
}
public void alterRole(AuthenticatedUser performer, RoleResource role, RoleOptions options)
{
// Unlike most of the other data access methods here, this does not use a
// prepared statement in order to allow the set of assignments to be variable.
String assignments = Joiner.on(',').join(Iterables.filter(optionsToAssignments(options.getOptions()),
Predicates.notNull()));
if (!Strings.isNullOrEmpty(assignments))
{
process(String.format("UPDATE %s.%s SET %s WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
assignments,
escape(role.getRoleName())),
consistencyForRole(role.getRoleName()));
}
}
public void grantRole(AuthenticatedUser performer, RoleResource role, RoleResource grantee)
throws RequestValidationException, RequestExecutionException
{
if (getRoles(grantee, true).contains(role))
throw new InvalidRequestException(String.format("%s is a member of %s",
grantee.getRoleName(),
role.getRoleName()));
if (getRoles(role, true).contains(grantee))
throw new InvalidRequestException(String.format("%s is a member of %s",
role.getRoleName(),
grantee.getRoleName()));
modifyRoleMembership(grantee.getRoleName(), role.getRoleName(), "+");
process(String.format("INSERT INTO %s.%s (role, member) values ('%s', '%s')",
AuthKeyspace.NAME,
AuthKeyspace.ROLE_MEMBERS,
escape(role.getRoleName()),
escape(grantee.getRoleName())),
consistencyForRole(role.getRoleName()));
}
public void revokeRole(AuthenticatedUser performer, RoleResource role, RoleResource revokee)
throws RequestValidationException, RequestExecutionException
{
if (!getRoles(revokee, false).contains(role))
throw new InvalidRequestException(String.format("%s is not a member of %s",
revokee.getRoleName(),
role.getRoleName()));
modifyRoleMembership(revokee.getRoleName(), role.getRoleName(), "-");
process(String.format("DELETE FROM %s.%s WHERE role = '%s' and member = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLE_MEMBERS,
escape(role.getRoleName()),
escape(revokee.getRoleName())),
consistencyForRole(role.getRoleName()));
}
public Set<RoleResource> getRoles(RoleResource grantee, boolean includeInherited) throws RequestValidationException, RequestExecutionException
{
Set<RoleResource> roles = new HashSet<>();
Role role = getRole(grantee.getRoleName());
if (!role.equals(NULL_ROLE))
{
roles.add(RoleResource.role(role.name));
collectRoles(role, roles, includeInherited);
}
return roles;
}
public Set<RoleResource> getAllRoles() throws RequestValidationException, RequestExecutionException
{
UntypedResultSet rows = process(String.format("SELECT role from %s.%s", AuthKeyspace.NAME, AuthKeyspace.ROLES), ConsistencyLevel.QUORUM);
Iterable<RoleResource> roles = Iterables.transform(rows, new Function<UntypedResultSet.Row, RoleResource>()
{
public RoleResource apply(UntypedResultSet.Row row)
{
return RoleResource.role(row.getString("role"));
}
});
return ImmutableSet.<RoleResource>builder().addAll(roles).build();
}
public boolean isSuper(RoleResource role)
{
return getRole(role.getRoleName()).isSuper;
}
public boolean canLogin(RoleResource role)
{
return getRole(role.getRoleName()).canLogin;
}
public Map<String, String> getCustomOptions(RoleResource role)
{
return Collections.emptyMap();
}
public boolean isExistingRole(RoleResource role)
{
return getRole(role.getRoleName()) != NULL_ROLE;
}
public Set<? extends IResource> protectedResources()
{
return ImmutableSet.of(DataResource.table(AuthKeyspace.NAME, AuthKeyspace.ROLES),
DataResource.table(AuthKeyspace.NAME, AuthKeyspace.ROLE_MEMBERS));
}
public void validateConfiguration() throws ConfigurationException
{
}
/*
* Create the default superuser role to bootstrap role creation on a clean system. Preemptively
* gives the role the default password so PasswordAuthenticator can be used to log in (if
* configured)
*/
private static void setupDefaultRole()
{
try
{
if (!hasExistingRoles())
{
QueryProcessor.process(String.format("INSERT INTO %s.%s (role, is_superuser, can_login, salted_hash) " +
"VALUES ('%s', true, true, '%s')",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
DEFAULT_SUPERUSER_NAME,
escape(hashpw(DEFAULT_SUPERUSER_PASSWORD))),
consistencyForRole(DEFAULT_SUPERUSER_NAME));
logger.info("Created default superuser role '{}'", DEFAULT_SUPERUSER_NAME);
}
}
catch (RequestExecutionException e)
{
logger.warn("CassandraRoleManager skipped default role setup: some nodes were not ready");
throw e;
}
}
private static boolean hasExistingRoles() throws RequestExecutionException
{
// Try looking up the 'cassandra' default role first, to avoid the range query if possible.
String defaultSUQuery = String.format("SELECT * FROM %s.%s WHERE role = '%s'", AuthKeyspace.NAME, AuthKeyspace.ROLES, DEFAULT_SUPERUSER_NAME);
String allUsersQuery = String.format("SELECT * FROM %s.%s LIMIT 1", AuthKeyspace.NAME, AuthKeyspace.ROLES);
return !QueryProcessor.process(defaultSUQuery, ConsistencyLevel.ONE).isEmpty()
|| !QueryProcessor.process(defaultSUQuery, ConsistencyLevel.QUORUM).isEmpty()
|| !QueryProcessor.process(allUsersQuery, ConsistencyLevel.QUORUM).isEmpty();
}
private void scheduleSetupTask(final Callable<Void> setupTask)
{
// The delay is to give the node a chance to see its peers before attempting the operation
ScheduledExecutors.optionalTasks.schedule(new Runnable()
{
public void run()
{
// If not all nodes are on 2.2, we don't want to initialize the role manager as this will confuse 2.1
// nodes (see CASSANDRA-9761 for details). So we re-schedule the setup for later, hoping that the upgrade
// will be finished by then.
if (!MessagingService.instance().areAllNodesAtLeast22())
{
logger.debug("Not all nodes are upgraded to a version that supports Roles yet, rescheduling setup task");
scheduleSetupTask(setupTask);
return;
}
isClusterReady = true;
try
{
setupTask.call();
}
catch (Exception e)
{
logger.info("Setup task failed with error, rescheduling");
scheduleSetupTask(setupTask);
}
}
}, AuthKeyspace.SUPERUSER_SETUP_DELAY, TimeUnit.MILLISECONDS);
}
/*
* Copy legacy auth data from the system_auth.users & system_auth.credentials tables to
* the new system_auth.roles table. This setup is not performed if AllowAllAuthenticator
* is configured (see Auth#setup).
*/
private void convertLegacyData() throws Exception
{
try
{
// read old data at QUORUM as it may contain the data for the default superuser
if (Schema.instance.getCFMetaData("system_auth", "users") != null)
{
logger.info("Converting legacy users");
UntypedResultSet users = QueryProcessor.process("SELECT * FROM system_auth.users",
ConsistencyLevel.QUORUM);
for (UntypedResultSet.Row row : users)
{
RoleOptions options = new RoleOptions();
options.setOption(Option.SUPERUSER, row.getBoolean("super"));
options.setOption(Option.LOGIN, true);
createRole(null, RoleResource.role(row.getString("name")), options);
}
logger.info("Completed conversion of legacy users");
}
if (Schema.instance.getCFMetaData("system_auth", "credentials") != null)
{
logger.info("Migrating legacy credentials data to new system table");
UntypedResultSet credentials = QueryProcessor.process("SELECT * FROM system_auth.credentials",
ConsistencyLevel.QUORUM);
for (UntypedResultSet.Row row : credentials)
{
// Write the password directly into the table to avoid doubly encrypting it
QueryProcessor.process(String.format("UPDATE %s.%s SET salted_hash = '%s' WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
row.getString("salted_hash"),
row.getString("username")),
consistencyForRole(row.getString("username")));
}
logger.info("Completed conversion of legacy credentials");
}
}
catch (Exception e)
{
logger.info("Unable to complete conversion of legacy auth data (perhaps not enough nodes are upgraded yet). " +
"Conversion should not be considered complete");
logger.debug("Conversion error", e);
throw e;
}
}
private CQLStatement prepare(String template, String keyspace, String table)
{
try
{
return QueryProcessor.parseStatement(String.format(template, keyspace, table)).prepare().statement;
}
catch (RequestValidationException e)
{
throw new AssertionError(e); // not supposed to happen
}
}
/*
* Retrieve all roles granted to the given role. includeInherited specifies
* whether to include only those roles granted directly or all inherited roles.
*/
private void collectRoles(Role role, Set<RoleResource> collected, boolean includeInherited) throws RequestValidationException, RequestExecutionException
{
for (String memberOf : role.memberOf)
{
Role granted = getRole(memberOf);
if (granted.equals(NULL_ROLE))
continue;
collected.add(RoleResource.role(granted.name));
if (includeInherited)
collectRoles(granted, collected, true);
}
}
/*
* Get a single Role instance given the role name. This never returns null, instead it
* uses the null object NULL_ROLE when a role with the given name cannot be found. So
* it's always safe to call methods on the returned object without risk of NPE.
*/
private Role getRole(String name)
{
try
{
// If it exists, try the legacy users table in case the cluster
// is in the process of being upgraded and so is running with mixed
// versions of the authn schema.
return (Schema.instance.getCFMetaData(AuthKeyspace.NAME, "users") != null)
? getRoleFromTable(name, legacySelectUserStatement, LEGACY_ROW_TO_ROLE)
: getRoleFromTable(name, loadRoleStatement, ROW_TO_ROLE);
}
catch (RequestExecutionException | RequestValidationException e)
{
throw new RuntimeException(e);
}
}
private Role getRoleFromTable(String name, SelectStatement statement, Function<UntypedResultSet.Row, Role> function)
throws RequestExecutionException, RequestValidationException
{
ResultMessage.Rows rows =
statement.execute(QueryState.forInternalCalls(),
QueryOptions.forInternalCalls(consistencyForRole(name),
Collections.singletonList(ByteBufferUtil.bytes(name))));
if (rows.result.isEmpty())
return NULL_ROLE;
return function.apply(UntypedResultSet.create(rows.result).one());
}
/*
* Adds or removes a role name from the membership list of an entry in the roles table table
* (adds if op is "+", removes if op is "-")
*/
private void modifyRoleMembership(String grantee, String role, String op)
throws RequestExecutionException
{
process(String.format("UPDATE %s.%s SET member_of = member_of %s {'%s'} WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLES,
op,
escape(role),
escape(grantee)),
consistencyForRole(grantee));
}
/*
* Clear the membership list of the given role
*/
private void removeAllMembers(String role) throws RequestValidationException, RequestExecutionException
{
// Get the membership list of the the given role
UntypedResultSet rows = process(String.format("SELECT member FROM %s.%s WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLE_MEMBERS,
escape(role)),
consistencyForRole(role));
if (rows.isEmpty())
return;
// Update each member in the list, removing this role from its own list of granted roles
for (UntypedResultSet.Row row : rows)
modifyRoleMembership(row.getString("member"), role, "-");
// Finally, remove the membership list for the dropped role
process(String.format("DELETE FROM %s.%s WHERE role = '%s'",
AuthKeyspace.NAME,
AuthKeyspace.ROLE_MEMBERS,
escape(role)),
consistencyForRole(role));
}
/*
* Convert a map of Options from a CREATE/ALTER statement into
* assignment clauses used to construct a CQL UPDATE statement
*/
private Iterable<String> optionsToAssignments(Map<Option, Object> options)
{
return Iterables.transform(
options.entrySet(),
new Function<Map.Entry<Option, Object>, String>()
{
public String apply(Map.Entry<Option, Object> entry)
{
switch (entry.getKey())
{
case LOGIN:
return String.format("can_login = %s", entry.getValue());
case SUPERUSER:
return String.format("is_superuser = %s", entry.getValue());
case PASSWORD:
return String.format("salted_hash = '%s'", escape(hashpw((String) entry.getValue())));
default:
return null;
}
}
});
}
protected static ConsistencyLevel consistencyForRole(String role)
{
if (role.equals(DEFAULT_SUPERUSER_NAME))
return ConsistencyLevel.QUORUM;
else
return ConsistencyLevel.LOCAL_ONE;
}
private static String hashpw(String password)
{
return BCrypt.hashpw(password, BCrypt.gensalt(GENSALT_LOG2_ROUNDS));
}
private static String escape(String name)
{
return StringUtils.replace(name, "'", "''");
}
/**
* Executes the provided query.
* This shouldn't be used during setup as this will directly return an error if the manager is not setup yet. Setup tasks
* should use QueryProcessor.process directly.
*/
private UntypedResultSet process(String query, ConsistencyLevel consistencyLevel) throws RequestValidationException, RequestExecutionException
{
if (!isClusterReady)
throw new InvalidRequestException("Cannot process role related query as the role manager isn't yet setup. "
+ "This is likely because some of nodes in the cluster are on version 2.1 or earlier. "
+ "You need to upgrade all nodes to Cassandra 2.2 or more to use roles.");
return QueryProcessor.process(query, consistencyLevel);
}
private static final class Role
{
private String name;
private final boolean isSuper;
private final boolean canLogin;
private Set<String> memberOf;
private Role(String name, boolean isSuper, boolean canLogin, Set<String> memberOf)
{
this.name = name;
this.isSuper = isSuper;
this.canLogin = canLogin;
this.memberOf = memberOf;
}
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof Role))
return false;
Role r = (Role) o;
return Objects.equal(name, r.name);
}
public int hashCode()
{
return Objects.hashCode(name);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.zeppelin.jdbc;
import com.google.common.base.Joiner;
import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter;
import jline.console.completer.ArgumentCompleter;
import jline.console.completer.Completer;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import static com.google.common.collect.Sets.newHashSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* SQL completer unit tests
*/
public class SqlCompleterTest extends BasicJDBCTestCaseAdapter {
public class CompleterTester {
private Completer completer;
private String buffer;
private int fromCursor;
private int toCursor;
private Set<String> expectedCompletions;
public CompleterTester(Completer completer) {
this.completer = completer;
}
public CompleterTester buffer(String buffer) {
this.buffer = buffer;
return this;
}
public CompleterTester from(int fromCursor) {
this.fromCursor = fromCursor;
return this;
}
public CompleterTester to(int toCursor) {
this.toCursor = toCursor;
return this;
}
public CompleterTester expect(Set<String> expectedCompletions) {
this.expectedCompletions = expectedCompletions;
return this;
}
public void test() {
for (int c = fromCursor; c <= toCursor; c++) {
expectedCompletions(buffer, c, expectedCompletions);
}
}
private void expectedCompletions(String buffer, int cursor, Set<String> expected) {
ArrayList<CharSequence> candidates = new ArrayList<>();
completer.complete(buffer, cursor, candidates);
String explain = explain(buffer, cursor, candidates);
logger.info(explain);
assertEquals("Buffer [" + buffer.replace(" ", ".") + "] and Cursor[" + cursor + "] "
+ explain, expected, newHashSet(candidates));
}
private String explain(String buffer, int cursor, ArrayList<CharSequence> candidates) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i <= Math.max(cursor, buffer.length()); i++) {
if (i == cursor) {
sb.append("(");
}
if (i >= buffer.length()) {
sb.append("_");
} else {
if (Character.isWhitespace(buffer.charAt(i))) {
sb.append(".");
} else {
sb.append(buffer.charAt(i));
}
}
if (i == cursor) {
sb.append(")");
}
}
sb.append(" >> [").append(Joiner.on(",").join(candidates)).append("]");
return sb.toString();
}
}
private Logger logger = LoggerFactory.getLogger(SqlCompleterTest.class);
private final static Set<String> EMPTY = new HashSet<>();
private CompleterTester tester;
private ArgumentCompleter.WhitespaceArgumentDelimiter delimiter =
new ArgumentCompleter.WhitespaceArgumentDelimiter();
private SqlCompleter sqlCompleter = new SqlCompleter();
@Before
public void beforeTest() throws IOException, SQLException {
Map<String, Set<String>> tables = new HashMap<>();
Map<String, Set<String>> columns = new HashMap<>();
Set<String> schemas = new HashSet<>();
Set<String> keywords = new HashSet<>();
keywords.add("SUM");
keywords.add("SUBSTRING");
keywords.add("SUBCLASS_ORIGIN");
keywords.add("ORDER");
keywords.add("SELECT");
keywords.add("LIMIT");
keywords.add("FROM");
schemas.add("prod_dds");
schemas.add("prod_emart");
Set<String> prod_dds_tables = new HashSet<>();
prod_dds_tables.add("financial_account");
prod_dds_tables.add("customer");
Set<String> prod_emart_tables = new HashSet<>();
prod_emart_tables.add("financial_account");
tables.put("prod_dds", prod_dds_tables);
tables.put("prod_emart", prod_emart_tables);
Set<String> prod_dds_financial_account_columns = new HashSet<>();
prod_dds_financial_account_columns.add("account_rk");
prod_dds_financial_account_columns.add("account_id");
Set<String> prod_dds_customer_columns = new HashSet<>();
prod_dds_customer_columns.add("customer_rk");
prod_dds_customer_columns.add("name");
prod_dds_customer_columns.add("birth_dt");
Set<String> prod_emart_financial_account_columns = new HashSet<>();
prod_emart_financial_account_columns.add("account_rk");
prod_emart_financial_account_columns.add("balance_amt");
columns.put("prod_dds.financial_account", prod_dds_financial_account_columns);
columns.put("prod_dds.customer", prod_dds_customer_columns);
columns.put("prod_emart.financial_account", prod_emart_financial_account_columns);
sqlCompleter.init(schemas, tables, columns, keywords);
tester = new CompleterTester(sqlCompleter);
}
@Test
public void testFindAliasesInSQL_Simple(){
String sql = "select * from prod_emart.financial_account a";
Map<String, String> res = sqlCompleter.findAliasesInSQL(delimiter.delimit(sql, 0).getArguments());
assertEquals(1, res.size());
assertTrue(res.get("a").equals("prod_emart.financial_account"));
}
@Test
public void testFindAliasesInSQL_Two(){
String sql = "select * from prod_dds.financial_account a, prod_dds.customer b";
Map<String, String> res = sqlCompleter.findAliasesInSQL(sqlCompleter.getSqlDelimiter().delimit(sql, 0).getArguments());
assertEquals(2, res.size());
assertTrue(res.get("a").equals("prod_dds.financial_account"));
assertTrue(res.get("b").equals("prod_dds.customer"));
}
@Test
public void testFindAliasesInSQL_WrongTables(){
String sql = "select * from prod_ddsxx.financial_account a, prod_dds.customerxx b";
Map<String, String> res = sqlCompleter.findAliasesInSQL(sqlCompleter.getSqlDelimiter().delimit(sql, 0).getArguments());
assertEquals(0, res.size());
}
@Test
public void testCompleteName_Empty() {
String buffer = "";
int cursor = 0;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
sqlCompleter.completeName(buffer, cursor, candidates, aliases, false);
assertEquals(9, candidates.size());
assertTrue(candidates.contains("prod_dds"));
assertTrue(candidates.contains("prod_emart"));
assertTrue(candidates.contains("SUM"));
assertTrue(candidates.contains("SUBSTRING"));
assertTrue(candidates.contains("SUBCLASS_ORIGIN"));
assertTrue(candidates.contains("SELECT"));
assertTrue(candidates.contains("ORDER"));
assertTrue(candidates.contains("LIMIT"));
assertTrue(candidates.contains("FROM"));
}
@Test
public void testCompleteName_SimpleSchema() {
String buffer = "prod_";
int cursor = 3;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
sqlCompleter.completeName(buffer, cursor, candidates, aliases, false);
assertEquals(2, candidates.size());
assertTrue(candidates.contains("prod_dds"));
assertTrue(candidates.contains("prod_emart"));
}
@Test
public void testCompleteName_SimpleTable() {
String buffer = "prod_dds.fin";
int cursor = 11;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
sqlCompleter.completeName(buffer, cursor, candidates, aliases, false);
assertEquals(1, candidates.size());
assertTrue(candidates.contains("financial_account "));
}
@Test
public void testCompleteName_SimpleColumn() {
String buffer = "prod_dds.financial_account.acc";
int cursor = 30;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
sqlCompleter.completeName(buffer, cursor, candidates, aliases, true);
assertEquals(2, candidates.size());
assertTrue(candidates.contains("account_rk"));
assertTrue(candidates.contains("account_id"));
}
@Test
public void testCompleteName_WithAlias() {
String buffer = "a.acc";
int cursor = 4;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
aliases.put("a", "prod_dds.financial_account");
sqlCompleter.completeName(buffer, cursor, candidates, aliases, true);
assertEquals(2, candidates.size());
assertTrue(candidates.contains("account_rk"));
assertTrue(candidates.contains("account_id"));
}
@Test
public void testCompleteName_WithAliasAndPoint() {
String buffer = "a.";
int cursor = 2;
List<CharSequence> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
aliases.put("a", "prod_dds.financial_account");
sqlCompleter.completeName(buffer, cursor, candidates, aliases, true);
assertEquals(2, candidates.size());
assertTrue(candidates.contains("account_rk"));
assertTrue(candidates.contains("account_id"));
}
public void testSchemaAndTable() {
String buffer = "select * from prod_v_emart.fi";
tester.buffer(buffer).from(15).to(26).expect(newHashSet("prod_v_emart ")).test();
tester.buffer(buffer).from(27).to(29).expect(newHashSet("financial_account ")).test();
}
@Test
public void testEdges() {
String buffer = " ORDER ";
tester.buffer(buffer).from(0).to(7).expect(newHashSet("ORDER ")).test();
tester.buffer(buffer).from(8).to(15).expect(newHashSet("ORDER", "SUBCLASS_ORIGIN", "SUBSTRING",
"prod_emart", "LIMIT", "SUM", "prod_dds", "SELECT", "FROM")).test();
}
@Test
public void testMultipleWords() {
String buffer = "SELE FRO LIM";
tester.buffer(buffer).from(0).to(4).expect(newHashSet("SELECT ")).test();
tester.buffer(buffer).from(5).to(8).expect(newHashSet("FROM ")).test();
tester.buffer(buffer).from(9).to(12).expect(newHashSet("LIMIT ")).test();
}
@Test
public void testMultiLineBuffer() {
String buffer = " \n SELE\nFRO";
tester.buffer(buffer).from(0).to(7).expect(newHashSet("SELECT ")).test();
tester.buffer(buffer).from(8).to(11).expect(newHashSet("FROM ")).test();
}
@Test
public void testMultipleCompletionSuggestions() {
String buffer = "SU";
tester.buffer(buffer).from(0).to(2).expect(newHashSet("SUBCLASS_ORIGIN", "SUM", "SUBSTRING"))
.test();
}
@Test
public void testSqlDelimiterCharacters() {
assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("r,", 1));
assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("SS,", 2));
assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar(",", 0));
assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("ttt,", 3));
}
}
| |
package com.saerasoft.caesium;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
/**
* Created by lymphatus on 02/10/15.
*/
//TODO Merge the two update methods in something smaller, too much code in common
public class DatabaseHelper extends SQLiteOpenHelper {
//If you change the database schema, you must increment the database version.
public static final int DATABASE_VERSION = 1;
public static final String DATABASE_NAME = "Caesium.db";
/* Helper definitions */
//Types
private static final String TEXT_TYPE = " TEXT";
private static final String INT_TYPE = " INTEGER";
//Punctuation
private static final String COMMA_SEP = ",";
/* Helper queries */
//Create the DB
private static final String SQL_CREATE_ENTRIES =
"CREATE TABLE " + DatabaseContract.ImageEntry.TABLE_NAME + " (" +
DatabaseContract.ImageEntry._ID + INT_TYPE + " PRIMARY KEY AUTOINCREMENT" + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_PATH + TEXT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_HEADER + TEXT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_SIZE + INT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_WIDTH + INT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_HEIGHT + INT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_MIME + TEXT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_TIMESTAMP + INT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_NEW + INT_TYPE + COMMA_SEP +
DatabaseContract.ImageEntry.COLUMN_NAME_HIT_TIMESTAMP + INT_TYPE +
")";
//Delete the DB
private static final String SQL_DELETE_ENTRIES = "DROP TABLE IF EXISTS " +
DatabaseContract.ImageEntry.TABLE_NAME;
public DatabaseHelper(Context context) {
//Constructor
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
private static ContentValues populateAllEntries(CImage cImage) {
//Create a whole set of values
ContentValues values = new ContentValues();
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_PATH, cImage.getPath());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_HEADER, cImage.getHeader());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_SIZE, cImage.getSize());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_WIDTH, cImage.getWidth());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_HEIGHT, cImage.getHeight());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_MIME, cImage.getMimeType());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_TIMESTAMP, cImage.getTimestamp());
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_NEW, 1);
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_HIT_TIMESTAMP,
System.currentTimeMillis());
return values;
}
public static long insertImageIntoDatabase(SQLiteDatabase db, CImage cImage) {
//Create a whole new set of values
ContentValues values = populateAllEntries(cImage);
//Insert the new image into the database
//The returning value is the ID of the new row
return db.insert(DatabaseContract.ImageEntry.TABLE_NAME,
null,
values);
}
public static int hitImageRow(SQLiteDatabase db, String path, long timestamp) {
/* The return value should be 1 for success
* 0 means we didn't find a match by path
* and > 1 means we have multiple rows with the same path,
* something we should track if happens
*/
//Put the new timestamp on the content values
ContentValues values = new ContentValues();
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_HIT_TIMESTAMP,
timestamp);
//Tell the DB the image is not new (means compressed)
values.put(DatabaseContract.ImageEntry.COLUMN_NAME_NEW,
0);
//We'll identify the row by the image path
String selection = DatabaseContract.ImageEntry.COLUMN_NAME_PATH + " LIKE ?";
//Path is the variable we need to pass to the selection
String[] selectionArgs = {path};
//Update the entry
return db.update(DatabaseContract.ImageEntry.TABLE_NAME,
values,
selection,
selectionArgs);
}
public static int updateImageInfo(SQLiteDatabase db, CImage cImage) {
/* The return value should be 1 for success
* 0 means we didn't find a match by path
* and > 1 means we have multiple rows with the same path,
* something we should track if happens
*/
//Create a whole new set of values
ContentValues values = populateAllEntries(cImage);
//We'll identify the row by the image path
String selection = DatabaseContract.ImageEntry.COLUMN_NAME_PATH + " LIKE ?";
//Path is the variable we need to pass to the selection
String[] selectionArgs = {cImage.getPath()};
//Update the entry
return db.update(DatabaseContract.ImageEntry.TABLE_NAME,
values,
selection,
selectionArgs);
}
/* -- Start of the routine methods -- */
public static int deleteUnhitImages(SQLiteDatabase db) {
//Returns the number of deleted items
//TODO Decide a proper timespan for this
//Delete images that were not hit for 1 day
String selection = DatabaseContract.ImageEntry.COLUMN_NAME_HIT_TIMESTAMP +
" <= date('now','-1 day')";
return db.delete(DatabaseContract.ImageEntry.TABLE_NAME,
selection,
null);
}
public static DatabaseType getDatabaseTypeOfImage(SQLiteDatabase db, CImage cImage) {
//Get path and timestamp
String[] projection = {
DatabaseContract.ImageEntry.COLUMN_NAME_PATH,
DatabaseContract.ImageEntry.COLUMN_NAME_TIMESTAMP,
};
//Use path as where clause
String[] where = {cImage.getPath()};
//Execute the query to get path and timestamp
Cursor cursor = db.query(
DatabaseContract.ImageEntry.TABLE_NAME, // The table to query
projection, // The columns to return
DatabaseContract.ImageEntry.COLUMN_NAME_PATH + " = ?", // The columns for the WHERE clause
where, // The values for the WHERE clause
null, // don't group the rows
null, // don't filter by row groups
null // The sort order
);
//Check if the cursor is not empty, meaning we have and hit
if (cursor.moveToFirst()) {
//The images already exists, check if it has been modified
if (cImage.getTimestamp() > cursor.getLong(1)) {
//Cursor has done its job, we don't need to evaluate more
//The image timestamp is higher, image MODIFIED
Log.d("Database", "MODIFIED: " + cursor.getString(0));
cursor.close();
return DatabaseType.MODIFIED;
} else {
//Same (or less, but should not happen) timestamp, EQUAL image
Log.d("Database", "EQUAL: " + cursor.getString(0));
cursor.close();
return DatabaseType.EQUAL;
}
} else {
//The image does not exist in the database, it's NEW
Log.d("Database", "NEW: " + cImage.getPath());
cursor.close();
return DatabaseType.NEW;
}
}
public void onCreate(SQLiteDatabase db) {
//Newly created database, exec the above query
db.execSQL(SQL_CREATE_ENTRIES);
}
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
//TODO Check the method and onDowngrade too
//Delete and create again the database upon upgrading
db.execSQL(SQL_DELETE_ENTRIES);
onCreate(db);
}
public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) {
//Since we are deleting and recreating the database, we can use
//the onUpgrade method
onUpgrade(db, oldVersion, newVersion);
}
public static void deleteDatabase(SQLiteDatabase db) {
//Delete all entries
db.execSQL(SQL_DELETE_ENTRIES);
}
/* -- End of the routine methods -- */
/* -- Helper methods -- */
public enum DatabaseType {
NEW,
MODIFIED,
EQUAL
}
public static void databaseRoutine(SQLiteDatabase db, CImage image, Boolean compression) {
//This methods updates the database
//Cleaning is done while compressing
//Update fill and/or update each entry of the database according to the image
switch (getDatabaseTypeOfImage(db, image)) {
case NEW:
//Image is completely fresh, use the insert
Log.d("ImageScan", "NEW: " + image.getPath());
if (compression) {
DatabaseHelper.insertImageIntoDatabase(db, image);
}
break;
case MODIFIED:
//The file exists but was modified since last time
DatabaseHelper.updateImageInfo(db, image);
break;
case EQUAL:
//Same image, do nothing
Log.d("ImageScan", "EQUAL: " + image.getPath());
break;
default:
break;
}
}
/* -- End of helper methods -- */
}
| |
package eu.marcocattaneo.rememberhere.business.controllers;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingRequest;
import com.google.android.gms.location.LocationServices;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import eu.marcocattaneo.rememberhere.R;
import eu.marcocattaneo.rememberhere.business.callback.OnClientAPIListener;
import eu.marcocattaneo.rememberhere.business.receivers.GeofenceTransitionsIntentService;
public class ProximityController extends BaseController implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener {
public static final String TAG = "ProximityController";
public static final int RADIUS_METERS = 80;
private PendingIntent mPendingIntent;
private GoogleApiClient mGoogleApiClient;
private OnClientAPIListener mCallback = null;
public ProximityController(Context context, @Nullable OnClientAPIListener onClientAPIListener) {
super(context);
this.mCallback = onClientAPIListener;
}
public ProximityController(Context context) {
super(context);
}
public void addPOI(final double latitude, final double longitude, final String note) {
if (ActivityCompat.checkSelfPermission(getContext(), android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
return;
}
final String guid = UUID.randomUUID().toString();
LocationServices.GeofencingApi.addGeofences(
mGoogleApiClient,
getGeofencingRequest(getGeofencingRequestList(guid, latitude, longitude)),
getGeofencePendingIntent()
).setResultCallback(new ResultCallback<Status>() {
@Override
public void onResult(@NonNull Status status) {
if (status.isSuccess()) {
getDao().create(guid, note, RADIUS_METERS, latitude, longitude);
} else {
Toast.makeText(getContext(), R.string.err_insert, Toast.LENGTH_SHORT).show();
}
}
});
}
/**
* Start GoogleAPI
* @param onClientAPIListener
*/
public void onStartGoogleAPI(@Nullable OnClientAPIListener onClientAPIListener) {
mCallback = onClientAPIListener;
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(getContext())
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
mGoogleApiClient.connect();
}
}
/**
* Start realm
*/
@Override
public void onStartRealm() {
super.onStartRealm();
}
/**
* Create geofence POI
*
* @return
*/
private List<Geofence> getGeofencingRequestList(String guid, double latitude, double longitude) {
List<Geofence> mGeofenceList = new ArrayList<>();
mGeofenceList.add(new Geofence.Builder()
.setRequestId(guid)
.setExpirationDuration(Geofence.NEVER_EXPIRE)
.setCircularRegion(latitude, longitude, RADIUS_METERS)
.setTransitionTypes(Geofence.GEOFENCE_TRANSITION_ENTER)
.build());
return mGeofenceList;
}
/**
* Build GeoFence request
*
* @param mGeofenceList
* @return
*/
private GeofencingRequest getGeofencingRequest(List<Geofence> mGeofenceList) {
GeofencingRequest.Builder builder = new GeofencingRequest.Builder();
builder.setInitialTrigger(GeofencingRequest.INITIAL_TRIGGER_ENTER);
builder.addGeofences(mGeofenceList);
return builder.build();
}
/**
* Return PendingIntent request
*
* @return
*/
private PendingIntent getGeofencePendingIntent() {
if (mPendingIntent != null)
return mPendingIntent;
// Reuse the PendingIntent if we already have it.
Intent intent = new Intent(getContext(), GeofenceTransitionsIntentService.class);
// We use FLAG_UPDATE_CURRENT so that we get the same pending intent back when
// calling addGeofences() and removeGeofences().
return mPendingIntent = PendingIntent.getService(getContext(), 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
}
@Override
public void onConnected(@Nullable Bundle bundle) {
if (mCallback != null)
mCallback.onConnect(mGoogleApiClient);
}
@Override
public void onConnectionSuspended(int i) {
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
if (mCallback != null)
mCallback.onConnectionFail(connectionResult);
}
public void removeGeofence(String requestId) {
if (mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
List<String> listRequestId = new ArrayList<>();
listRequestId.add(requestId);
LocationServices.GeofencingApi.removeGeofences(
mGoogleApiClient,
listRequestId
).setResultCallback(new ResultCallback<Status>() {
@Override
public void onResult(@NonNull Status status) {
if (!status.isSuccess())
Log.d(TAG, "Errore rimozione geofence");
}
});
}
}
/**
* Stop controller
*/
@Override
public void onStop() {
super.onStop();
if (mGoogleApiClient != null)
mGoogleApiClient.disconnect();
mGoogleApiClient = null;
}
/**
* Return Google API Client
*
* @return
*/
public GoogleApiClient getGoogleApiClient() {
return mGoogleApiClient;
}
}
| |
package com.coffeelint.settings;
import com.coffeelint.CoffeeLintProjectComponent;
import com.coffeelint.cli.CoffeeLintFinder;
import com.coffeelint.cli.CoffeeLintRunner;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.execution.ExecutionException;
//import com.intellij.javascript.nodejs.NodeDetectionUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiManager;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.HyperlinkLabel;
import com.intellij.ui.TextFieldWithHistory;
import com.intellij.ui.TextFieldWithHistoryWithBrowseButton;
import com.intellij.util.NotNullProducer;
import com.intellij.util.ui.UIUtil;
import com.intellij.webcore.ui.SwingHelper;
import com.wix.nodejs.NodeDetectionUtil;
import com.wix.settings.ValidationInfo;
import com.wix.ui.PackagesNotificationPanel;
import com.wix.utils.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
public class CoffeeLintSettingsPage implements Configurable {
public static final String FIX_IT = "Fix it";
public static final String HOW_TO_USE_COFFEE_LINT = "How to Use CoffeeLint";
public static final String HOW_TO_USE_LINK = "https://github.com/idok/coffee-lint-plugin";
protected Project project;
private JCheckBox pluginEnabledCheckbox;
private JTextField customRulesPathField;
private JPanel panel;
private JPanel errorPanel;
private TextFieldWithHistoryWithBrowseButton coffeelintBinField;
private TextFieldWithHistoryWithBrowseButton nodeInterpreterField;
private TextFieldWithHistoryWithBrowseButton eslintrcFile;
private JRadioButton searchForEslintrcInRadioButton;
private JRadioButton useProjectEslintrcRadioButton;
private HyperlinkLabel usageLink;
private JLabel coffeeLintConfigFilePathLabel;
private JLabel rulesDirectoryLabel;
private JLabel pathToCoffeelintBinLabel;
private JLabel nodeInterpreterLabel;
private JLabel versionLabel;
private JTextField extensionsField;
// private TextFieldWithHistoryWithBrowseButton rulesPathField;
// private JLabel rulesDirectoryLabel1;
private final PackagesNotificationPanel packagesNotificationPanel;
public CoffeeLintSettingsPage(@NotNull final Project project) {
this.project = project;
configLintBinField();
configConfigFileField();
// configESLintRulesField();
configNodeField();
// searchForEslintrcInRadioButton.addItemListener(new ItemListener() {
// public void itemStateChanged(ItemEvent e) {
// eslintrcFile.setEnabled(e.getStateChange() == ItemEvent.DESELECTED);
// System.out.println("searchForEslintrcInRadioButton: " + (e.getStateChange() == ItemEvent.SELECTED ? "checked" : "unchecked"));
// }
// });
useProjectEslintrcRadioButton.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
eslintrcFile.setEnabled(e.getStateChange() == ItemEvent.SELECTED);
// System.out.println("useProjectEslintrcRadioButton: " + (e.getStateChange() == ItemEvent.SELECTED ? "checked" : "unchecked"));
}
});
pluginEnabledCheckbox.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
boolean enabled = e.getStateChange() == ItemEvent.SELECTED;
setEnabledState(enabled);
}
});
this.packagesNotificationPanel = new PackagesNotificationPanel(project);
// GridConstraints gridConstraints = new GridConstraints(5, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH,
// GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW,
// null, new Dimension(250, 150), null);
errorPanel.add(this.packagesNotificationPanel.getComponent(), BorderLayout.CENTER);
DocumentAdapter docAdp = new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
updateLaterInEDT();
}
};
coffeelintBinField.getChildComponent().getTextEditor().getDocument().addDocumentListener(docAdp);
eslintrcFile.getChildComponent().getTextEditor().getDocument().addDocumentListener(docAdp);
nodeInterpreterField.getChildComponent().getTextEditor().getDocument().addDocumentListener(docAdp);
// rulesPathField.getChildComponent().getTextEditor().getDocument().addDocumentListener(docAdp);
customRulesPathField.getDocument().addDocumentListener(docAdp);
}
private File getProjectPath() {
return new File(project.getBaseDir().getPath());
}
private void updateLaterInEDT() {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
CoffeeLintSettingsPage.this.update();
}
});
}
private void update() {
ApplicationManager.getApplication().assertIsDispatchThread();
validate();
}
private void setEnabledState(boolean enabled) {
eslintrcFile.setEnabled(enabled);
customRulesPathField.setEnabled(enabled);
searchForEslintrcInRadioButton.setEnabled(enabled);
useProjectEslintrcRadioButton.setEnabled(enabled);
coffeelintBinField.setEnabled(enabled);
nodeInterpreterField.setEnabled(enabled);
coffeeLintConfigFilePathLabel.setEnabled(enabled);
rulesDirectoryLabel.setEnabled(enabled);
pathToCoffeelintBinLabel.setEnabled(enabled);
nodeInterpreterLabel.setEnabled(enabled);
extensionsField.setEnabled(enabled);
}
private void validateField(List<ValidationInfo> errors, TextFieldWithHistoryWithBrowseButton field, boolean allowEmpty, String message) {
if (!validatePath(field.getChildComponent().getText(), allowEmpty)) {
ValidationInfo error = new ValidationInfo(field.getChildComponent().getTextEditor(), message, FIX_IT);
errors.add(error);
}
}
private void validate() {
if (!pluginEnabledCheckbox.isSelected()) {
return;
}
List<ValidationInfo> errors = new ArrayList<ValidationInfo>();
validateField(errors, coffeelintBinField, false, "Path to coffeelint is invalid {{LINK}}");
validateField(errors, eslintrcFile, true, "Path to config file is invalid {{LINK}}"); //Please correct path to
validateField(errors, nodeInterpreterField, false, "Path to node interpreter is invalid {{LINK}}");
if (!validateDirectory(customRulesPathField.getText(), true)) {
ValidationInfo error = new ValidationInfo(customRulesPathField, "Path to custom rules is invalid {{LINK}}", FIX_IT);
errors.add(error);
}
if (errors.isEmpty()) {
getVersion();
}
packagesNotificationPanel.processErrors(errors);
}
private CoffeeLintRunner.CoffeeLintSettings settings;
private void getVersion() {
if (settings != null &&
areEqual(nodeInterpreterField, settings.node) &&
areEqual(coffeelintBinField, settings.executablePath) &&
settings.cwd.equals(project.getBasePath())
) {
return;
}
settings = new CoffeeLintRunner.CoffeeLintSettings();
settings.node = nodeInterpreterField.getChildComponent().getText();
settings.executablePath = coffeelintBinField.getChildComponent().getText();
settings.cwd = project.getBasePath();
try {
String version = CoffeeLintRunner.runVersion(settings);
versionLabel.setText(version.trim());
} catch (ExecutionException e) {
e.printStackTrace();
}
}
private boolean validatePath(String path, boolean allowEmpty) {
if (StringUtils.isEmpty(path)) {
return allowEmpty;
}
File filePath = new File(path);
if (filePath.isAbsolute()) {
if (!filePath.exists() || !filePath.isFile()) {
return false;
}
} else {
VirtualFile child = project.getBaseDir().findFileByRelativePath(path);
if (child == null || !child.exists() || child.isDirectory()) {
return false;
}
}
return true;
}
private boolean validateDirectory(String path, boolean allowEmpty) {
if (StringUtils.isEmpty(path)) {
return allowEmpty;
}
File filePath = new File(path);
if (filePath.isAbsolute()) {
if (!filePath.exists() || !filePath.isDirectory()) {
return false;
}
} else {
VirtualFile child = project.getBaseDir().findFileByRelativePath(path);
if (child == null || !child.exists() || !child.isDirectory()) {
return false;
}
}
return true;
}
private static TextFieldWithHistory configWithDefaults(TextFieldWithHistoryWithBrowseButton field) {
TextFieldWithHistory textFieldWithHistory = field.getChildComponent();
textFieldWithHistory.setHistorySize(-1);
textFieldWithHistory.setMinimumAndPreferredWidth(0);
return textFieldWithHistory;
}
private void configLintBinField() {
configWithDefaults(coffeelintBinField);
SwingHelper.addHistoryOnExpansion(coffeelintBinField.getChildComponent(), new NotNullProducer<List<String>>() {
@NotNull
public List<String> produce() {
List<File> newFiles = CoffeeLintFinder.searchForCoffeeLintExe(getProjectPath());
return FileUtils.toAbsolutePath(newFiles);
}
});
SwingHelper.installFileCompletionAndBrowseDialog(project, coffeelintBinField, "Select CoffeeLint cli", FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor());
}
// private void configESLintRulesField() {
// TextFieldWithHistory textFieldWithHistory = rulesPathField.getChildComponent();
// SwingHelper.addHistoryOnExpansion(textFieldWithHistory, new NotNullProducer<List<String>>() {
// @NotNull
// public List<String> produce() {
// return CoffeeLintFinder.tryFindRulesAsString(getProjectPath());
// }
// });
// SwingHelper.installFileCompletionAndBrowseDialog(project, rulesPathField, "Select Built in rules", FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor());
// }
private void configConfigFileField() {
TextFieldWithHistory textFieldWithHistory = configWithDefaults(eslintrcFile);
SwingHelper.addHistoryOnExpansion(textFieldWithHistory, new NotNullProducer<List<String>>() {
@NotNull
public List<String> produce() {
return CoffeeLintFinder.searchForConfigFiles(getProjectPath());
}
});
SwingHelper.installFileCompletionAndBrowseDialog(project, eslintrcFile, "Select CoffeeLint config", FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor());
}
private void configNodeField() {
TextFieldWithHistory textFieldWithHistory = configWithDefaults(nodeInterpreterField);
SwingHelper.addHistoryOnExpansion(textFieldWithHistory, new NotNullProducer<List<String>>() {
@NotNull
public List<String> produce() {
List<File> newFiles = NodeDetectionUtil.listAllPossibleNodeInterpreters();
return FileUtils.toAbsolutePath(newFiles);
}
});
SwingHelper.installFileCompletionAndBrowseDialog(project, nodeInterpreterField, "Select Node interpreter", FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor());
}
@Nls
@Override
public String getDisplayName() {
return "CoffeeLint";
}
@Nullable
@Override
public String getHelpTopic() {
return null;
}
@Nullable
@Override
public JComponent createComponent() {
loadSettings();
return panel;
}
private static boolean areEqual(TextFieldWithHistoryWithBrowseButton field, String value) {
return field.getChildComponent().getText().equals(value);
}
@Override
public boolean isModified() {
Settings s = getSettings();
return pluginEnabledCheckbox.isSelected() != s.pluginEnabled ||
!areEqual(coffeelintBinField, s.lintExecutable) ||
!areEqual(nodeInterpreterField, s.nodeInterpreter) ||
// treatAllLintIssuesCheckBox.isSelected() != s.treatAllIssuesAsWarnings ||
!customRulesPathField.getText().equals(s.rulesPath) ||
!extensionsField.getText().equals(s.extensions) ||
// !areEqual(rulesPathField, s.builtinRulesPath) ||
!getConfigFile().equals(s.configFile);
}
private String getConfigFile() {
return useProjectEslintrcRadioButton.isSelected() ? eslintrcFile.getChildComponent().getText() : "";
}
@Override
public void apply() throws ConfigurationException {
saveSettings();
PsiManager.getInstance(project).dropResolveCaches();
}
protected void saveSettings() {
Settings settings = getSettings();
settings.pluginEnabled = pluginEnabledCheckbox.isSelected();
settings.lintExecutable = coffeelintBinField.getChildComponent().getText();
settings.nodeInterpreter = nodeInterpreterField.getChildComponent().getText();
settings.configFile = getConfigFile();
settings.rulesPath = customRulesPathField.getText();
settings.extensions = extensionsField.getText();
// settings.builtinRulesPath = rulesPathField.getChildComponent().getText();
// settings.treatAllIssuesAsWarnings = treatAllLintIssuesCheckBox.isSelected();
project.getComponent(CoffeeLintProjectComponent.class).validateSettings();
DaemonCodeAnalyzer.getInstance(project).restart();
}
protected void loadSettings() {
Settings settings = getSettings();
pluginEnabledCheckbox.setSelected(settings.pluginEnabled);
coffeelintBinField.getChildComponent().setText(settings.lintExecutable);
eslintrcFile.getChildComponent().setText(settings.configFile);
nodeInterpreterField.getChildComponent().setText(settings.nodeInterpreter);
customRulesPathField.setText(settings.rulesPath);
extensionsField.setText(settings.extensions);
// rulesPathField.getChildComponent().setText(settings.builtinRulesPath);
useProjectEslintrcRadioButton.setSelected(StringUtils.isNotEmpty(settings.configFile));
searchForEslintrcInRadioButton.setSelected(StringUtils.isEmpty(settings.configFile));
eslintrcFile.setEnabled(useProjectEslintrcRadioButton.isSelected());
// treatAllLintIssuesCheckBox.setSelected(settings.treatAllIssuesAsWarnings);
setEnabledState(settings.pluginEnabled);
}
@Override
public void reset() {
loadSettings();
}
@Override
public void disposeUIResources() {
}
protected Settings getSettings() {
return Settings.getInstance(project);
}
private void createUIComponents() {
// TODO: place custom component creation code here
usageLink = SwingHelper.createWebHyperlink(HOW_TO_USE_COFFEE_LINT, HOW_TO_USE_LINK);
}
}
| |
/**
* <copyright>
*
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation
*
* </copyright>
*
*/
package org.eclipse.bpmn2.di.util;
import java.util.List;
import org.eclipse.bpmn2.di.*;
import org.eclipse.bpmn2.di.BPMNDiagram;
import org.eclipse.bpmn2.di.BPMNEdge;
import org.eclipse.bpmn2.di.BPMNLabel;
import org.eclipse.bpmn2.di.BPMNLabelStyle;
import org.eclipse.bpmn2.di.BPMNPlane;
import org.eclipse.bpmn2.di.BPMNShape;
import org.eclipse.bpmn2.di.BpmnDiPackage;
import org.eclipse.bpmn2.di.DocumentRoot;
import org.eclipse.dd.di.Diagram;
import org.eclipse.dd.di.DiagramElement;
import org.eclipse.dd.di.Edge;
import org.eclipse.dd.di.Label;
import org.eclipse.dd.di.LabeledEdge;
import org.eclipse.dd.di.LabeledShape;
import org.eclipse.dd.di.Node;
import org.eclipse.dd.di.Plane;
import org.eclipse.dd.di.Shape;
import org.eclipse.dd.di.Style;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Switch</b> for the model's inheritance hierarchy.
* It supports the call {@link #doSwitch(EObject) doSwitch(object)}
* to invoke the <code>caseXXX</code> method for each class of the model,
* starting with the actual class of the object
* and proceeding up the inheritance hierarchy
* until a non-null result is returned,
* which is the result of the switch.
* <!-- end-user-doc -->
* @see org.eclipse.bpmn2.di.BpmnDiPackage
* @generated
*/
public class BpmnDiSwitch<T> {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static BpmnDiPackage modelPackage;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public BpmnDiSwitch() {
if (modelPackage == null) {
modelPackage = BpmnDiPackage.eINSTANCE;
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
public T doSwitch(EObject theEObject) {
return doSwitch(theEObject.eClass(), theEObject);
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(EClass theEClass, EObject theEObject) {
if (theEClass.eContainer() == modelPackage) {
return doSwitch(theEClass.getClassifierID(), theEObject);
} else {
List<EClass> eSuperTypes = theEClass.getESuperTypes();
return eSuperTypes.isEmpty() ? defaultCase(theEObject) : doSwitch(eSuperTypes.get(0),
theEObject);
}
}
/**
* Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the first non-null result returned by a <code>caseXXX</code> call.
* @generated
*/
protected T doSwitch(int classifierID, EObject theEObject) {
switch (classifierID) {
case BpmnDiPackage.DOCUMENT_ROOT: {
DocumentRoot documentRoot = (DocumentRoot) theEObject;
T result = caseDocumentRoot(documentRoot);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_DIAGRAM: {
BPMNDiagram bpmnDiagram = (BPMNDiagram) theEObject;
T result = caseBPMNDiagram(bpmnDiagram);
if (result == null)
result = caseDiagram(bpmnDiagram);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_EDGE: {
BPMNEdge bpmnEdge = (BPMNEdge) theEObject;
T result = caseBPMNEdge(bpmnEdge);
if (result == null)
result = caseLabeledEdge(bpmnEdge);
if (result == null)
result = caseEdge(bpmnEdge);
if (result == null)
result = caseDiagramElement(bpmnEdge);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_LABEL: {
BPMNLabel bpmnLabel = (BPMNLabel) theEObject;
T result = caseBPMNLabel(bpmnLabel);
if (result == null)
result = caseLabel(bpmnLabel);
if (result == null)
result = caseNode(bpmnLabel);
if (result == null)
result = caseDiagramElement(bpmnLabel);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_LABEL_STYLE: {
BPMNLabelStyle bpmnLabelStyle = (BPMNLabelStyle) theEObject;
T result = caseBPMNLabelStyle(bpmnLabelStyle);
if (result == null)
result = caseStyle(bpmnLabelStyle);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_PLANE: {
BPMNPlane bpmnPlane = (BPMNPlane) theEObject;
T result = caseBPMNPlane(bpmnPlane);
if (result == null)
result = casePlane(bpmnPlane);
if (result == null)
result = caseNode(bpmnPlane);
if (result == null)
result = caseDiagramElement(bpmnPlane);
if (result == null)
result = defaultCase(theEObject);
return result;
}
case BpmnDiPackage.BPMN_SHAPE: {
BPMNShape bpmnShape = (BPMNShape) theEObject;
T result = caseBPMNShape(bpmnShape);
if (result == null)
result = caseLabeledShape(bpmnShape);
if (result == null)
result = caseShape(bpmnShape);
if (result == null)
result = caseNode(bpmnShape);
if (result == null)
result = caseDiagramElement(bpmnShape);
if (result == null)
result = defaultCase(theEObject);
return result;
}
default:
return defaultCase(theEObject);
}
}
/**
* Returns the result of interpreting the object as an instance of '<em>Document Root</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Document Root</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDocumentRoot(DocumentRoot object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Diagram</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Diagram</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNDiagram(BPMNDiagram object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Edge</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Edge</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNEdge(BPMNEdge object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Label</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Label</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNLabel(BPMNLabel object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Label Style</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Label Style</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNLabelStyle(BPMNLabelStyle object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Plane</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Plane</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNPlane(BPMNPlane object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>BPMN Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>BPMN Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseBPMNShape(BPMNShape object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Diagram</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Diagram</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDiagram(Diagram object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Diagram Element</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Diagram Element</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseDiagramElement(DiagramElement object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Edge</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Edge</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseEdge(Edge object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Labeled Edge</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Labeled Edge</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseLabeledEdge(LabeledEdge object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Node</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Node</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseNode(Node object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Label</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Label</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseLabel(Label object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Style</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Style</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseStyle(Style object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Plane</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Plane</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T casePlane(Plane object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseShape(Shape object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>Labeled Shape</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Labeled Shape</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/
public T caseLabeledShape(LabeledShape object) {
return null;
}
/**
* Returns the result of interpreting the object as an instance of '<em>EObject</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch, but this is the last case anyway.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>EObject</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject)
* @generated
*/
public T defaultCase(EObject object) {
return null;
}
} //BpmnDiSwitch
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.OutputBuffers.OutputBufferId;
import com.facebook.presto.execution.StateMachine;
import com.facebook.presto.memory.context.SimpleLocalMemoryContext;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.BigintType;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import static com.facebook.presto.OutputBuffers.BROADCAST_PARTITION_ID;
import static com.facebook.presto.OutputBuffers.BufferType.BROADCAST;
import static com.facebook.presto.OutputBuffers.createInitialEmptyOutputBuffers;
import static com.facebook.presto.execution.buffer.BufferResult.emptyResults;
import static com.facebook.presto.execution.buffer.BufferState.OPEN;
import static com.facebook.presto.execution.buffer.BufferState.TERMINAL_BUFFER_STATES;
import static com.facebook.presto.execution.buffer.BufferTestUtils.MAX_WAIT;
import static com.facebook.presto.execution.buffer.BufferTestUtils.NO_WAIT;
import static com.facebook.presto.execution.buffer.BufferTestUtils.acknowledgeBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.addPage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertBufferResultEquals;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertFinished;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertFutureIsDone;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertQueueClosed;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertQueueState;
import static com.facebook.presto.execution.buffer.BufferTestUtils.createBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.createPage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.enqueuePage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.getBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.getFuture;
import static com.facebook.presto.execution.buffer.BufferTestUtils.sizeOfPages;
import static com.facebook.presto.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestBroadcastOutputBuffer
{
private static final String TASK_INSTANCE_ID = "task-instance-id";
private static final ImmutableList<BigintType> TYPES = ImmutableList.of(BIGINT);
private static final OutputBufferId FIRST = new OutputBufferId(0);
private static final OutputBufferId SECOND = new OutputBufferId(1);
private static final OutputBufferId THIRD = new OutputBufferId(2);
private ScheduledExecutorService stateNotificationExecutor;
@BeforeClass
public void setUp()
{
stateNotificationExecutor = newScheduledThreadPool(5, daemonThreadsNamed("test-%s"));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
if (stateNotificationExecutor != null) {
stateNotificationExecutor.shutdownNow();
stateNotificationExecutor = null;
}
}
@Test
public void testInvalidConstructorArg()
{
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID).withNoMoreBufferIds(), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testSimple()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge first three pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// pages now acknowledged
assertQueueState(buffer, FIRST, 0, 3);
// fill the buffer (we already added 3 pages)
for (int i = 3; i < 10; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 7, 3);
// try to add one more page, which should block
ListenableFuture<?> future = enqueuePage(buffer, createPage(10));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 8, 3);
// remove a page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 3, sizeOfPages(1), NO_WAIT), bufferResult(3, createPage(3)));
// page not acknowledged yet so sent count is the same
assertQueueState(buffer, FIRST, 8, 3);
// we should still be blocked
assertFalse(future.isDone());
//
// add another buffer and verify it sees all pages
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, SECOND, 11, 0);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0),
createPage(1),
createPage(2),
createPage(3),
createPage(4),
createPage(5),
createPage(6),
createPage(7),
createPage(8),
createPage(9)));
// page not acknowledged yet so sent count is still zero
assertQueueState(buffer, SECOND, 11, 0);
// acknowledge the 10 pages
buffer.get(SECOND, 10, sizeOfPages(10)).cancel(true);
assertQueueState(buffer, SECOND, 1, 10);
//
// tell shared buffer there will be no more queues
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// queues consumed the first three pages, so they should be dropped now and the blocked page future from above should be done
assertQueueState(buffer, FIRST, 8, 3);
assertQueueState(buffer, SECOND, 1, 10);
assertFutureIsDone(future);
// we should be able to add 3 more pages (the third will be queued)
// although the first queue fetched the 4th page, the page has not been acknowledged yet
addPage(buffer, createPage(11));
addPage(buffer, createPage(12));
future = enqueuePage(buffer, createPage(13));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 11, 3);
assertQueueState(buffer, SECOND, 4, 10);
// acknowledge the receipt of the 3rd page and try to remove the 4th page from the first queue
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 4, sizeOfPages(1), NO_WAIT), bufferResult(4, createPage(4)));
// the blocked page future above should be done
assertFutureIsDone(future);
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
//
// finish the buffer
assertFalse(buffer.isFinished());
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
// not fully finished until all pages are consumed
assertFalse(buffer.isFinished());
// remove a page, not finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 5, sizeOfPages(1), NO_WAIT), bufferResult(5, createPage(5)));
assertQueueState(buffer, FIRST, 9, 5);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from first queue, should not be finished
BufferResult x = getBufferResult(buffer, FIRST, 6, sizeOfPages(10), NO_WAIT);
assertBufferResultEquals(TYPES, x, bufferResult(6, createPage(6),
createPage(7),
createPage(8),
createPage(9),
createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, FIRST, 8, 6);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
// finish first queue
buffer.abort(FIRST);
assertQueueClosed(buffer, FIRST, 14);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from second queue, should be finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 10, sizeOfPages(10), NO_WAIT), bufferResult(10, createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, SECOND, 4, 10);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 14);
assertQueueClosed(buffer, SECOND, 14);
assertFinished(buffer);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
}
// TODO: remove this after PR #7987 is landed
@Test
public void testAcknowledge()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// acknowledge pages 0 and 1
acknowledgeBufferResult(buffer, FIRST, 2);
// only page 2 is not removed
assertQueueState(buffer, FIRST, 1, 2);
// acknowledge page 2
acknowledgeBufferResult(buffer, FIRST, 3);
// nothing left
assertQueueState(buffer, FIRST, 0, 3);
// acknowledge more pages will fail
try {
acknowledgeBufferResult(buffer, FIRST, 4);
}
catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Invalid sequence id");
}
// fill the buffer
for (int i = 3; i < 6; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 3, 3);
// getting new pages will again acknowledge the previously acknowledged pages but this is ok
buffer.get(FIRST, 3, sizeOfPages(1)).cancel(true);
assertQueueState(buffer, FIRST, 3, 3);
}
@Test
public void testSharedBufferFull()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testDuplicateRequests()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
// add a queue
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge the pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// attempt to get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 0, 3);
}
@Test
public void testAddQueueAfterCreation()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
try {
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds());
fail("Expected IllegalStateException from addQueue after noMoreQueues has been called");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testAddAfterFinish()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.setNoMorePages();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testAddQueueAfterNoMoreQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// tell buffer no more queues will be added
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a second time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a third time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
}
@Test
public void testAddAfterDestroy()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.destroy();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testGetBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and verify the future is complete
addPage(buffer, createPage(33));
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = ".*does not contain.*\\[0]")
public void testSetFinalBuffersWihtoutDeclaringUsedBuffer()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and set no more pages
addPage(buffer, createPage(33));
buffer.setNoMorePages();
// read the page
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
// acknowledge the page and verify we are finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
buffer.abort(FIRST);
// set final buffers to a set that does not contain the buffer, which will fail
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "No more buffers already set")
public void testUseUndeclaredBufferAfterFinalBuffersSet()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that was not declared, which will fail
buffer.get(SECOND, 0L, sizeOfPages(1));
}
@Test
public void testAbortBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(1));
assertFalse(future.isDone());
// abort that buffer, and verify the future is complete and buffer is finished
buffer.abort(FIRST);
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFullBufferBlocksWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testAcknowledgementFreesWriters()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
assertQueueState(buffer, FIRST, 2, 0);
// third page is blocked
ListenableFuture<?> future = enqueuePage(buffer, createPage(3));
// we should be blocked
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 3, 0);
assertQueueState(buffer, SECOND, 3, 0);
// acknowledge pages for first buffer, no space is freed
buffer.get(FIRST, 2, sizeOfPages(10)).cancel(true);
assertFalse(future.isDone());
// acknowledge pages for second buffer, which makes space in the buffer
buffer.get(SECOND, 2, sizeOfPages(10)).cancel(true);
// writer should not be blocked
assertFutureIsDone(future);
assertQueueState(buffer, SECOND, 1, 2);
}
@Test
public void testAbort()
{
BroadcastOutputBuffer bufferedBuffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// fill the buffer
for (int i = 0; i < 10; i++) {
addPage(bufferedBuffer, createPage(i));
}
bufferedBuffer.setNoMorePages();
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(FIRST);
assertQueueClosed(bufferedBuffer, FIRST, 0);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(SECOND);
assertQueueClosed(bufferedBuffer, SECOND, 0);
assertFinished(bufferedBuffer);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFinishClosesEmptyQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// finish while queues are empty
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 0);
assertQueueState(buffer, SECOND, 0, 0);
buffer.abort(FIRST);
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 0);
assertQueueClosed(buffer, SECOND, 0);
}
@Test
public void testAbortFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
assertTrue(future.isDone());
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// abort the buffer
buffer.abort(FIRST);
// verify the future completed
// broadcast buffer does not return a "complete" result in this case, but it doesn't mapper
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
// further requests will see a completed result
assertQueueClosed(buffer, FIRST, 1);
}
@Test
public void testFinishFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// finish the buffer
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
}
@Test
public void testFinishFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// enqueue the addition two pages more pages
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// finish the query
buffer.setNoMorePages();
assertFalse(buffer.isFinished());
// verify futures are complete
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
// get and acknowledge the last 6 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(100), NO_WAIT),
bufferResult(1, createPage(1), createPage(2), createPage(3), createPage(4), createPage(5), createPage(6)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 7, sizeOfPages(100), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 7, true));
buffer.abort(FIRST);
// verify finished
assertFinished(buffer);
}
@Test
public void testDestroyFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// destroy the buffer
buffer.destroy();
assertQueueClosed(buffer, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testDestroyFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// destroy the buffer (i.e., cancel the query)
buffer.destroy();
assertFinished(buffer);
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testFailDoesNotFreeReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// fail the buffer
buffer.fail();
// future should have not finished
assertFalse(future.isDone());
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testFailFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// fail the buffer (i.e., cancel the query)
buffer.fail();
assertFalse(buffer.isFinished());
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testAddBufferAfterFail()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// fail the buffer
buffer.fail();
// add a buffer
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
// set no more buffers
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testBufferCompletion()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
List<Page> pages = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Page page = createPage(i);
addPage(buffer, page);
pages.add(page);
}
buffer.setNoMorePages();
// get and acknowledge 5 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(5), MAX_WAIT), createBufferResult(TASK_INSTANCE_ID, 0, pages));
// buffer is not finished
assertFalse(buffer.isFinished());
// there are no more pages and no more buffers, but buffer is not finished because it didn't receive an acknowledgement yet
assertFalse(buffer.isFinished());
// ask the buffer to finish
buffer.abort(FIRST);
// verify that the buffer is finished
assertTrue(buffer.isFinished());
}
@Test
public void testBufferFinishesWhenClientBuffersDestroyed()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withBuffer(THIRD, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
// add pages before closing the buffers to make sure
// that the buffers close even if there are pending pages
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// the buffer is in the NO_MORE_BUFFERS state now
// and if we abort all the buffers it should destroy itself
// and move to the FINISHED state
buffer.abort(FIRST);
assertFalse(buffer.isFinished());
buffer.abort(SECOND);
assertFalse(buffer.isFinished());
buffer.abort(THIRD);
assertTrue(buffer.isFinished());
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize)
{
BroadcastOutputBuffer buffer = new BroadcastOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext()),
stateNotificationExecutor);
buffer.setOutputBuffers(outputBuffers);
return buffer;
}
private static BufferResult bufferResult(long token, Page firstPage, Page... otherPages)
{
List<Page> pages = ImmutableList.<Page>builder().add(firstPage).add(otherPages).build();
return createBufferResult(TASK_INSTANCE_ID, token, pages);
}
}
| |
/*-
* -\-\-
* Helios Tools
* --
* Copyright (C) 2016 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.helios.cli.command;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.Ordering.natural;
import static com.spotify.helios.cli.Output.formatHostname;
import static com.spotify.helios.cli.Output.humanDuration;
import static com.spotify.helios.cli.Output.table;
import static com.spotify.helios.common.descriptors.HostStatus.Status.UP;
import static java.lang.String.format;
import static java.lang.System.currentTimeMillis;
import static net.sourceforge.argparse4j.impl.Arguments.append;
import static net.sourceforge.argparse4j.impl.Arguments.storeTrue;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.spotify.helios.cli.Table;
import com.spotify.helios.client.HeliosClient;
import com.spotify.helios.common.Json;
import com.spotify.helios.common.descriptors.DockerVersion;
import com.spotify.helios.common.descriptors.HostInfo;
import com.spotify.helios.common.descriptors.HostStatus;
import com.spotify.helios.common.descriptors.JobId;
import com.spotify.helios.common.descriptors.TaskStatus;
import java.io.BufferedReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ExecutionException;
import net.sourceforge.argparse4j.inf.Argument;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
public class HostListCommand extends ControlCommand {
private final Argument quietArg;
private final Argument patternArg;
private final Argument fullArg;
private final Argument statusArg;
private final Argument hostSelectorsArg;
private final String statusChoicesString;
public HostListCommand(final Subparser parser) {
super(parser);
Collection<String> statusChoices = Collections2.transform(
Arrays.asList(HostStatus.Status.values()), new Function<HostStatus.Status, String>() {
@Override
public String apply(final HostStatus.Status input) {
return input.toString();
}
});
statusChoicesString = Joiner.on(", ").join(statusChoices);
parser.help("list hosts");
patternArg = parser.addArgument("pattern")
.nargs("?")
.setDefault("")
.help("Pattern to filter hosts with");
quietArg = parser.addArgument("-q")
.action(storeTrue())
.help("only print host names");
fullArg = parser.addArgument("-f")
.action(storeTrue())
.help("Print full host names.");
statusArg = parser.addArgument("--status")
.nargs("?")
.choices(statusChoices.toArray(new String[statusChoices.size()]))
.help("Filter hosts by its status. Valid statuses are: " + statusChoicesString);
hostSelectorsArg = parser.addArgument("-s", "--selector")
.action(append())
.setDefault(new ArrayList<String>())
.help("Host selector expression. The list of hosts will be filtered to match only those "
+ "whose labels match all of the supplied expressions. "
+ "Multiple selector expressions can be specified with multiple `-s` arguments "
+ "(e.g. `-s site=foo -s bar!=yes`). "
+ "Supported operators are '=', '!=', 'in' and 'notin'.");
}
@Override
int run(final Namespace options, final HeliosClient client, final PrintStream out,
final boolean json, final BufferedReader stdin)
throws ExecutionException, InterruptedException {
final String pattern = options.getString(patternArg.getDest());
final List<String> selectorArgValue = options.getList(hostSelectorsArg.getDest());
final Set<String> selectors = ImmutableSet.copyOf(selectorArgValue);
final List<String> hosts;
if (pattern.isEmpty() && selectors.isEmpty()) {
hosts = client.listHosts().get();
} else if (!pattern.isEmpty() && selectors.isEmpty()) {
hosts = client.listHosts(pattern).get();
} else if (pattern.isEmpty() && !selectors.isEmpty()) {
hosts = client.listHosts(selectors).get();
} else {
hosts = client.listHosts(pattern, selectors).get();
}
final Map<String, String> queryParams = Maps.newHashMap();
final String statusFilter = options.getString(statusArg.getDest());
if (!isNullOrEmpty(statusFilter)) {
try {
HostStatus.Status.valueOf(statusFilter);
queryParams.put("status", statusFilter);
} catch (IllegalArgumentException ignored) {
throw new IllegalArgumentException(
"Invalid status. Valid statuses are: " + statusChoicesString);
}
}
final boolean full = options.getBoolean(fullArg.getDest());
final boolean quiet = options.getBoolean(quietArg.getDest());
if (hosts.isEmpty()) {
if (json) {
out.println("{ }");
} else if (!quiet && !isNullOrEmpty(pattern)) {
out.printf("host pattern %s matched no hosts%n", pattern);
}
return 1;
}
if (quiet) {
final List<String> sortedHosts = natural().sortedCopy(hosts);
if (json) {
out.println(Json.asPrettyStringUnchecked(sortedHosts));
} else {
for (final String host : sortedHosts) {
out.println(formatHostname(full, host));
}
}
} else {
final Map<String, HostStatus> statuses =
new TreeMap<>(client.hostStatuses(hosts, queryParams).get());
if (json) {
out.println(Json.asPrettyStringUnchecked(statuses));
} else {
final Table table = table(out);
table.row("HOST", "STATUS", "DEPLOYED", "RUNNING", "CPUS", "MEM", "LOAD AVG", "MEM USAGE",
"OS", "HELIOS", "DOCKER", "LABELS");
for (final Map.Entry<String, HostStatus> e : statuses.entrySet()) {
final String host = e.getKey();
final HostStatus s = e.getValue();
if (s == null) {
continue;
}
final Set<TaskStatus> runningDeployedJobs = Sets.newHashSet();
for (final JobId jobId : s.getJobs().keySet()) {
final TaskStatus taskStatus = s.getStatuses().get(jobId);
if (taskStatus == null) {
continue;
}
if (taskStatus.getState() == TaskStatus.State.RUNNING) {
runningDeployedJobs.add(taskStatus);
}
}
final HostInfo hi = s.getHostInfo();
final String memUsage;
final String cpus;
final String mem;
final String loadAvg;
final String os;
final String docker;
if (hi != null) {
final long free = hi.getMemoryFreeBytes();
final long total = hi.getMemoryTotalBytes();
memUsage = format("%.2f", (float) (total - free) / total);
cpus = String.valueOf(hi.getCpus());
mem = hi.getMemoryTotalBytes() / (1024 * 1024 * 1024) + " gb";
loadAvg = format("%.2f", hi.getLoadAvg());
os = hi.getOsName() + " " + hi.getOsVersion();
final DockerVersion dv = hi.getDockerVersion();
docker = (dv != null) ? format("%s (%s)", dv.getVersion(), dv.getApiVersion()) : "";
} else {
memUsage = cpus = mem = loadAvg = os = docker = "";
}
final String version;
if (s.getAgentInfo() != null) {
version = Optional.fromNullable(s.getAgentInfo().getVersion()).or("");
} else {
version = "";
}
String status = s.getStatus() == UP ? "Up" : "Down";
if (s.getAgentInfo() != null) {
final long startTime = s.getAgentInfo().getStartTime();
final long upTime = s.getAgentInfo().getUptime();
if (s.getStatus() == UP) {
status += " " + humanDuration(currentTimeMillis() - startTime);
} else {
status += " " + humanDuration(currentTimeMillis() - startTime - upTime);
}
}
final String hostLabels = Joiner.on(", ").withKeyValueSeparator("=").join(s.getLabels());
table.row(formatHostname(full, host), status, s.getJobs().size(),
runningDeployedJobs.size(), cpus, mem, loadAvg, memUsage, os, version, docker,
hostLabels);
}
table.print();
}
}
return 0;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.upgrade;
import com.google.inject.Inject;
import com.google.inject.Injector;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
import org.apache.ambari.server.orm.dao.DaoUtils;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.utils.VersionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
/**
* Upgrade catalog for version 2.1.2.
*/
public class UpgradeCatalog212 extends AbstractUpgradeCatalog {
private static final String HIVE_SITE = "hive-site";
private static final String HIVE_ENV = "hive-env";
private static final String HBASE_ENV = "hbase-env";
private static final String HBASE_SITE = "hbase-site";
private static final String CLUSTER_ENV = "cluster-env";
private static final String OOZIE_ENV = "oozie-env";
private static final String TOPOLOGY_REQUEST_TABLE = "topology_request";
private static final String CLUSTERS_TABLE = "clusters";
private static final String CLUSTERS_TABLE_CLUSTER_ID_COLUMN = "cluster_id";
private static final String TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN = "cluster_name";
private static final String TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN = "cluster_id";
private static final String TOPOLOGY_REQUEST_CLUSTER_ID_FK_CONSTRAINT_NAME = "FK_topology_request_cluster_id";
private static final String HOST_ROLE_COMMAND_TABLE = "host_role_command";
private static final String HOST_ROLE_COMMAND_SKIP_COLUMN = "auto_skip_on_failure";
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog212.class);
@Inject
DaoUtils daoUtils;
// ----- Constructors ------------------------------------------------------
/**
* Don't forget to register new UpgradeCatalogs in {@link org.apache.ambari.server.upgrade.SchemaUpgradeHelper.UpgradeHelperModule#configure()}
*
* @param injector Guice injector to track dependencies and uses bindings to inject them.
*/
@Inject
public UpgradeCatalog212(Injector injector) {
super(injector);
daoUtils = injector.getInstance(DaoUtils.class);
}
// ----- UpgradeCatalog ----------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public String getTargetVersion() {
return "2.1.2";
}
// ----- AbstractUpgradeCatalog --------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public String getSourceVersion() {
return "2.1.1";
}
/**
* {@inheritDoc}
*/
@Override
protected void executeDDLUpdates() throws AmbariException, SQLException {
executeTopologyDDLUpdates();
executeHostRoleCommandDDLUpdates();
}
private void executeTopologyDDLUpdates() throws AmbariException, SQLException {
dbAccessor.addColumn(TOPOLOGY_REQUEST_TABLE, new DBColumnInfo(TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN,
Long.class, null, null, true));
// TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN will be deleted in PreDML. We need a cluster name to set cluster id.
// dbAccessor.dropColumn(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN);
// dbAccessor.setColumnNullable(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN, false);
// dbAccessor.addFKConstraint(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_ID_FK_CONSTRAINT_NAME,
// TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN, CLUSTERS_TABLE, CLUSTERS_TABLE_CLUSTER_ID_COLUMN, false);
}
/**
* {@inheritDoc}
*/
@Override
protected void executePreDMLUpdates() throws AmbariException, SQLException {
if (dbAccessor.tableHasColumn(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN)) {
addClusterIdToTopology();
finilizeTopologyDDL();
} else {
LOG.debug("The column: [ {} ] has already been dropped from table: [ {} ]. Skipping preDMLUpdate logic.",
TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN, TOPOLOGY_REQUEST_TABLE);
}
}
protected void finilizeTopologyDDL() throws AmbariException, SQLException {
dbAccessor.dropColumn(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN);
dbAccessor.setColumnNullable(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN, false);
dbAccessor.addFKConstraint(TOPOLOGY_REQUEST_TABLE, TOPOLOGY_REQUEST_CLUSTER_ID_FK_CONSTRAINT_NAME,
TOPOLOGY_REQUEST_CLUSTER_ID_COLUMN, CLUSTERS_TABLE, CLUSTERS_TABLE_CLUSTER_ID_COLUMN, false);
}
/**
* {@inheritDoc}
*/
@Override
protected void executeDMLUpdates() throws AmbariException, SQLException {
addNewConfigurationsFromXml();
addMissingConfigs();
}
protected void addClusterIdToTopology() throws AmbariException, SQLException {
Map<String, Long> clusterNameIdMap = new HashMap<String, Long>();
try (Statement statement = dbAccessor.getConnection().createStatement();
ResultSet rs = statement.executeQuery("SELECT DISTINCT cluster_name, cluster_id FROM clusters");
) {
while (rs.next()) {
long clusterId = rs.getLong("cluster_id");
String clusterName = rs.getString("cluster_name");
clusterNameIdMap.put(clusterName, clusterId);
}
}
for (String clusterName : clusterNameIdMap.keySet()) {
try (PreparedStatement preparedStatement = dbAccessor.getConnection().prepareStatement("UPDATE topology_request " +
"SET cluster_id=? WHERE cluster_name=?");
) {
preparedStatement.setLong(1, clusterNameIdMap.get(clusterName));
preparedStatement.setString(2, clusterName);
preparedStatement.executeUpdate();
}
}
// Set cluster id for all null values.
// Useful if cluster was renamed and cluster name does not match.
if (clusterNameIdMap.entrySet().size() >= 1) {
try (PreparedStatement preparedStatement = dbAccessor.getConnection().prepareStatement("UPDATE topology_request " +
"SET cluster_id=? WHERE cluster_id IS NULL");
) {
preparedStatement.setLong(1, clusterNameIdMap.entrySet().iterator().next().getValue());
preparedStatement.executeUpdate();
}
}
if (clusterNameIdMap.entrySet().size() == 0) {
LOG.warn("Cluster not found. topology_request.cluster_id is not set");
}
if (clusterNameIdMap.entrySet().size() > 1) {
LOG.warn("Found more than one cluster. topology_request.cluster_id can be incorrect if you have renamed the cluster.");
}
}
protected void addMissingConfigs() throws AmbariException {
updateHiveConfigs();
updateOozieConfigs();
updateHbaseAndClusterConfigurations();
updateKafkaConfigurations();
updateStormConfigs();
removeDataDirMountConfig();
}
protected void updateStormConfigs() throws AmbariException {
AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
if (clusters != null) {
Map<String, Cluster> clusterMap = clusters.getClusters();
if ((clusterMap != null) && !clusterMap.isEmpty()) {
// Iterate through the clusters and perform any configuration updates
for (final Cluster cluster : clusterMap.values()) {
Set<String> removes = new HashSet<String>();
removes.add("topology.metrics.consumer.register");
updateConfigurationPropertiesForCluster(cluster, "storm-site",
new HashMap<String, String>(), removes, false, false);
}
}
}
}
protected void updateKafkaConfigurations() throws AmbariException {
Map<String, String> properties = new HashMap<>();
properties.put("external.kafka.metrics.exclude.prefix",
"kafka.network.RequestMetrics,kafka.server.DelayedOperationPurgatory," +
"kafka.server.BrokerTopicMetrics.BytesRejectedPerSec");
properties.put("external.kafka.metrics.include.prefix",
"kafka.network.RequestMetrics.ResponseQueueTimeMs.request.OffsetCommit.98percentile," +
"kafka.network.RequestMetrics.ResponseQueueTimeMs.request.Offsets.95percentile," +
"kafka.network.RequestMetrics.ResponseSendTimeMs.request.Fetch.95percentile," +
"kafka.network.RequestMetrics.RequestsPerSec.request");
updateConfigurationProperties("kafka-broker", properties, false, false);
}
protected void updateHbaseAndClusterConfigurations() throws AmbariException {
AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
if (clusters != null) {
Map<String, Cluster> clusterMap = clusters.getClusters();
if ((clusterMap != null) && !clusterMap.isEmpty()) {
// Iterate through the clusters and perform any configuration updates
for (final Cluster cluster : clusterMap.values()) {
Config hbaseEnvProps = cluster.getDesiredConfigByType(HBASE_ENV);
Config hbaseSiteProps = cluster.getDesiredConfigByType(HBASE_SITE);
if (hbaseEnvProps != null) {
// Remove override_hbase_uid from hbase-env and add override_uid to cluster-env
String value = hbaseEnvProps.getProperties().get("override_hbase_uid");
if (value != null) {
Map<String, String> updates = new HashMap<String, String>();
Set<String> removes = new HashSet<String>();
updates.put("override_uid", value);
removes.add("override_hbase_uid");
updateConfigurationPropertiesForCluster(cluster, HBASE_ENV, new HashMap<String, String>(), removes, false, true);
updateConfigurationPropertiesForCluster(cluster, CLUSTER_ENV, updates, true, false);
}
}
if (hbaseSiteProps != null) {
String value = hbaseSiteProps.getProperties().get("hbase.bucketcache.size");
if (value != null) {
if (value.endsWith("m")) {
value = value.substring(0, value.length() - 1);
Map<String, String> updates = new HashMap<String, String>();
updates.put("hbase.bucketcache.size", value);
updateConfigurationPropertiesForCluster(cluster, HBASE_SITE, updates, true, false);
}
}
}
}
}
}
}
protected void updateHiveConfigs() throws AmbariException {
AmbariManagementController ambariManagementController = injector.getInstance(
AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
if (clusters != null) {
Map<String, Cluster> clusterMap = clusters.getClusters();
if (clusterMap != null && !clusterMap.isEmpty()) {
for (final Cluster cluster : clusterMap.values()) {
String content = null;
Boolean isHiveSitePresent = cluster.getDesiredConfigByType(HIVE_SITE) != null;
StackId stackId = cluster.getCurrentStackVersion();
Boolean isStackNotLess22 = (stackId != null && stackId.getStackName().equals("HDP") &&
VersionUtils.compareVersions(stackId.getStackVersion(), "2.2") >= 0);
if (cluster.getDesiredConfigByType(HIVE_ENV) != null && isStackNotLess22) {
Map<String, String> hiveEnvProps = new HashMap<String, String>();
content = cluster.getDesiredConfigByType(HIVE_ENV).getProperties().get("content");
if(content != null) {
content = updateHiveEnvContent(content);
hiveEnvProps.put("content", content);
}
updateConfigurationPropertiesForCluster(cluster, HIVE_ENV, hiveEnvProps, true, true);
}
if (isHiveSitePresent && isStackNotLess22) {
Set<String> hiveSiteRemoveProps = new HashSet<String>();
hiveSiteRemoveProps.add("hive.heapsize");
hiveSiteRemoveProps.add("hive.optimize.mapjoin.mapreduce");
hiveSiteRemoveProps.add("hive.server2.enable.impersonation");
hiveSiteRemoveProps.add("hive.auto.convert.sortmerge.join.noconditionaltask");
updateConfigurationPropertiesForCluster(cluster, HIVE_SITE, new HashMap<String, String>(), hiveSiteRemoveProps, false, true);
}
}
}
}
}
protected void updateOozieConfigs() throws AmbariException {
AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
if (clusters != null) {
Map<String, Cluster> clusterMap = clusters.getClusters();
if (clusterMap != null && !clusterMap.isEmpty()) {
for (final Cluster cluster : clusterMap.values()) {
Config oozieEnv = cluster.getDesiredConfigByType(OOZIE_ENV);
if (oozieEnv != null) {
Map<String, String> oozieEnvProperties = oozieEnv.getProperties();
String hostname = oozieEnvProperties.get("oozie_hostname");
String db_type = oozieEnvProperties.get("oozie_database");
String final_db_host = null;
// fix for empty hostname after 1.7 -> 2.1.x+ upgrade
if (hostname != null && db_type != null && hostname.equals("")) {
switch (db_type.toUpperCase()) {
case "EXISTING MYSQL DATABASE":
final_db_host = oozieEnvProperties.get("oozie_existing_mysql_host");
break;
case "EXISTING POSTGRESQL DATABASE":
final_db_host = oozieEnvProperties.get("oozie_existing_postgresql_host");
break;
case "EXISTING ORACLE DATABASE":
final_db_host = oozieEnvProperties.get("oozie_existing_oracle_host");
break;
default:
final_db_host = null;
break;
}
if (final_db_host != null) {
Map<String, String> newProperties = new HashMap<>();
newProperties.put("oozie_hostname", final_db_host);
updateConfigurationPropertiesForCluster(cluster, OOZIE_ENV, newProperties, true, true);
}
}
}
}
}
}
}
protected String updateHiveEnvContent(String hiveEnvContent) {
if(hiveEnvContent == null) {
return null;
}
String oldHeapSizeRegex = "export HADOOP_HEAPSIZE=\"\\{\\{hive_heapsize\\}\\}\"\\s*\\n" +
"export HADOOP_CLIENT_OPTS=\"-Xmx\\$\\{HADOOP_HEAPSIZE\\}m \\$HADOOP_CLIENT_OPTS\"";
String newAuxJarPath = "";
return hiveEnvContent.replaceAll(oldHeapSizeRegex, Matcher.quoteReplacement(newAuxJarPath));
}
/**
* DDL changes for {@link #HOST_ROLE_COMMAND_TABLE}.
*
* @throws AmbariException
* @throws SQLException
*/
private void executeHostRoleCommandDDLUpdates() throws AmbariException, SQLException {
dbAccessor.addColumn(HOST_ROLE_COMMAND_TABLE,
new DBColumnInfo(HOST_ROLE_COMMAND_SKIP_COLUMN, Integer.class, 1, 0, false));
}
protected void removeDataDirMountConfig() throws AmbariException {
Set<String> properties = new HashSet<>();
properties.add("dfs.datanode.data.dir.mount.file");
AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
Clusters clusters = ambariManagementController.getClusters();
if (clusters != null) {
Map<String, Cluster> clusterMap = clusters.getClusters();
if (clusterMap != null && !clusterMap.isEmpty()) {
for (final Cluster cluster : clusterMap.values()) {
removeConfigurationPropertiesFromCluster(cluster, "hadoop-env", properties);
}
}
}
}
}
| |
package org.jgroups.tests;
import org.jgroups.Address;
import org.jgroups.Global;
import org.jgroups.JChannel;
import org.jgroups.Message;
import org.jgroups.blocks.MessageDispatcher;
import org.jgroups.blocks.RequestOptions;
import org.jgroups.logging.Log;
import org.jgroups.logging.LogFactory;
import org.jgroups.protocols.*;
import org.jgroups.protocols.pbcast.GMS;
import org.jgroups.protocols.pbcast.NAKACK2;
import org.jgroups.stack.DiagnosticsHandler;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.util.*;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Collections;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* Tests the {@link org.jgroups.protocols.RSVP} protocol
* @author Dan Berindei
* @author Bela Ban
*/
@Test(groups=Global.FUNCTIONAL,singleThreaded=true)
public class MessageDispatcherRSVPTest {
protected static final int NUM=2; // number of members
protected final JChannel[] channels=new JChannel[NUM];
protected final MessageDispatcher[] dispatchers=new MessageDispatcher[NUM];
protected MyDiagnosticsHandler handler;
protected ThreadPoolExecutor oob_thread_pool;
protected ThreadPoolExecutor thread_pool;
@BeforeMethod
void setUp() throws Exception {
handler=new MyDiagnosticsHandler(InetAddress.getByName("224.0.75.75"), 7500,
LogFactory.getLog(DiagnosticsHandler.class),
new DefaultSocketFactory(),
new DefaultThreadFactory("", false));
handler.start();
TimeScheduler timer=new TimeScheduler3(new DefaultThreadFactory("Timer", true, true),
5,20,
3000, 5000, "abort");
oob_thread_pool=new ThreadPoolExecutor(5, Math.max(5, NUM/4), 3000, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<>(NUM * NUM));
oob_thread_pool.setRejectedExecutionHandler(new ThreadPoolExecutor.DiscardPolicy());
thread_pool=new ThreadPoolExecutor(5, Math.max(5, NUM/4), 3000, TimeUnit.MILLISECONDS,
new ArrayBlockingQueue<>(NUM * NUM));
thread_pool.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
System.out.print("Connecting channels: ");
for(int i=0; i < NUM; i++) {
SHARED_LOOPBACK shared_loopback=new SHARED_LOOPBACK();
shared_loopback.setTimer(timer);
shared_loopback.setOOBThreadPool(oob_thread_pool);
shared_loopback.setDefaultThreadPool(thread_pool);
shared_loopback.setDiagnosticsHandler(handler);
channels[i]=new JChannel(shared_loopback,
new DISCARD(),
new SHARED_LOOPBACK_PING(),
new MERGE3().setValue("min_interval", 1000).setValue("max_interval", 3000),
new NAKACK2().setValue("use_mcast_xmit",false)
.setValue("discard_delivered_msgs",true)
.setValue("log_discard_msgs", false).setValue("log_not_found_msgs", false),
new UNICAST3().setValue("xmit_table_num_rows",5).setValue("xmit_interval", 300),
new RSVP().setValue("timeout", 10000).setValue("throw_exception_on_timeout", true),
new GMS().setValue("print_local_addr",false)
.setValue("leave_timeout",100).setValue("join_timeout", 500)
.setValue("log_view_warnings",false)
.setValue("view_ack_collection_timeout",2000)
.setValue("log_collect_msgs",false));
channels[i].setName(String.valueOf((i + 1)));
dispatchers[i]=new MessageDispatcher(channels[i]);
channels[i].connect("MessageDispatcherRSVPTest");
System.out.print(i + 1 + " ");
if(i == 0)
Util.sleep(1000);
}
Util.waitUntilAllChannelsHaveSameSize(30000, 1000, channels);
System.out.println("");
}
@AfterMethod
void tearDown() throws Exception {
for(int i=NUM-1; i >= 0; i--) {
ProtocolStack stack=channels[i].getProtocolStack();
String cluster_name=channels[i].getClusterName();
stack.stopStack(cluster_name);
stack.destroy();
}
handler.destroy();
}
/**
* First send a message, drop it (using DISCARD) and then close the channel. The caller invoking castMessage() should
* get an exception, as the channel was closed
*/
public void testCancellationByClosingChannel() throws Exception {
testCancellationByClosing(false, // multicast
new Closer(channels[0]));
}
public void testCancellationByClosingChannelUnicast() throws Exception {
testCancellationByClosing(true, // unicast
new Closer(channels[0]));
}
/**
* Sends a message via the MessageDispatcher on a closed channel. This should immediately throw an exception.
*/
public void testSendingMessageOnClosedChannel() throws Exception {
// unicast
sendMessageOnClosedChannel(channels[1].getAddress());
// multicast
sendMessageOnClosedChannel(null);
}
public void testSendingMessageOnClosedChannelRSVP() throws Exception {
// unicast
sendMessageOnClosedChannel(channels[1].getAddress(), Message.Flag.RSVP);
// multicast
sendMessageOnClosedChannel(null, Message.Flag.RSVP);
}
protected void testCancellationByClosing(boolean unicast, Thread closer) throws Exception {
DISCARD discard=channels[0].getProtocolStack().findProtocol(DISCARD.class);
discard.setDiscardAll(true);
try {
Address target=unicast? channels[1].getAddress() : null;
byte[] data="bla".getBytes();
Buffer buf=new Buffer(data, 0, data.length);
Message msg=new Message(target, "bla");
msg.setFlag(Message.Flag.RSVP);
closer.start();
if(unicast) {
System.out.println("sending unicast message to " + target);
dispatchers[0].sendMessage(target, buf, RequestOptions.SYNC().flags(Message.Flag.RSVP));
assert false: "sending the message on a closed channel should have thrown an exception";
}
else {
System.out.println("sending multicast message");
RspList<Object> rsps=dispatchers[0].castMessage(Collections.singleton(channels[1].getAddress()),buf,RequestOptions.SYNC());
System.out.println("rsps = " + rsps);
assert rsps.size() == 1;
Rsp<Object> rsp=rsps.iterator().next();
System.out.println("rsp = " + rsp);
assert rsp.hasException();
Throwable ex=rsp.getException();
assert ex instanceof IllegalStateException;
}
}
catch(IllegalStateException t) {
System.out.println("received \"" + t + "\" as expected");
}
}
protected void sendMessageOnClosedChannel(Address dest, Message.Flag... flags) throws Exception {
RequestOptions opts=RequestOptions.SYNC().timeout(2000).flags(flags);
byte[] data="bla".getBytes();
Buffer buf=new Buffer(data, 0, data.length);
channels[0].close();
try {
if(dest == null) // multicast
dispatchers[0].castMessage(Collections.singleton(channels[1].getAddress()), buf, opts);
else
dispatchers[0].sendMessage(dest, buf, opts);
assert false: "sending the message on a closed channel should have thrown an exception";
}
catch(IllegalStateException t) {
System.out.println("received \"" + t + "\" as expected");
}
}
protected static class Closer extends Thread {
protected final JChannel ch;
public Closer(JChannel ch) {this.ch=ch;}
public void run() {
Util.sleep(2000);
System.out.println("closing channel");
Util.close(ch);
};
}
protected static class MyDiagnosticsHandler extends DiagnosticsHandler {
protected MyDiagnosticsHandler(InetAddress diagnostics_addr, int diagnostics_port, Log log, SocketFactory socket_factory, ThreadFactory thread_factory) {
super(diagnostics_addr,diagnostics_port,log,socket_factory,thread_factory);
}
public void start() throws IOException {super.start();}
public void stop() {}
public void destroy() {super.stop();}
}
}
;
| |
package org.motechproject.mds.web.service.impl;
import javassist.CannotCompileException;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.reflect.FieldUtils;
import org.apache.commons.lang.reflect.MethodUtils;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.motechproject.commons.date.model.Time;
import org.motechproject.mds.dto.EntityDto;
import org.motechproject.mds.dto.FieldDto;
import org.motechproject.mds.dto.FieldInstanceDto;
import org.motechproject.mds.dto.LookupDto;
import org.motechproject.mds.dto.MetadataDto;
import org.motechproject.mds.dto.TypeDto;
import org.motechproject.mds.ex.entity.EntityInstancesNonEditableException;
import org.motechproject.mds.ex.entity.EntityNotFoundException;
import org.motechproject.mds.ex.entity.EntitySchemaMismatchException;
import org.motechproject.mds.ex.field.FieldReadOnlyException;
import org.motechproject.mds.ex.lookup.LookupExecutionException;
import org.motechproject.mds.ex.lookup.LookupNotFoundException;
import org.motechproject.mds.ex.object.ObjectCreateException;
import org.motechproject.mds.ex.object.ObjectNotFoundException;
import org.motechproject.mds.ex.object.ObjectReadException;
import org.motechproject.mds.ex.object.ObjectUpdateException;
import org.motechproject.mds.ex.object.RevertFromTrashException;
import org.motechproject.mds.ex.object.SecurityException;
import org.motechproject.mds.filter.Filters;
import org.motechproject.mds.helper.DataServiceHelper;
import org.motechproject.mds.lookup.LookupExecutor;
import org.motechproject.mds.query.QueryParams;
import org.motechproject.mds.service.EntityService;
import org.motechproject.mds.service.HistoryService;
import org.motechproject.mds.service.MotechDataService;
import org.motechproject.mds.service.TrashService;
import org.motechproject.mds.service.TypeService;
import org.motechproject.mds.service.impl.history.HistoryTrashClassHelper;
import org.motechproject.mds.util.ClassName;
import org.motechproject.mds.util.Constants;
import org.motechproject.mds.util.MDSClassLoader;
import org.motechproject.mds.util.MemberUtil;
import org.motechproject.mds.util.PropertyUtil;
import org.motechproject.mds.util.SecurityMode;
import org.motechproject.mds.util.TypeHelper;
import org.motechproject.mds.web.domain.ComboboxHolder;
import org.motechproject.mds.web.domain.EntityRecord;
import org.motechproject.mds.web.domain.FieldRecord;
import org.motechproject.mds.web.domain.HistoryRecord;
import org.motechproject.mds.web.service.InstanceService;
import org.motechproject.osgi.web.util.WebBundleUtil;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static org.motechproject.mds.util.Constants.MetadataKeys.MAP_KEY_TYPE;
import static org.motechproject.mds.util.Constants.MetadataKeys.MAP_VALUE_TYPE;
import static org.motechproject.mds.util.Constants.MetadataKeys.RELATED_CLASS;
import static org.motechproject.mds.util.SecurityUtil.getUserPermissions;
import static org.motechproject.mds.util.SecurityUtil.getUsername;
/**
* Default implementation of the {@link org.motechproject.mds.web.service.InstanceService} interface.
*/
@Service
public class InstanceServiceImpl implements InstanceService {
private static final Logger LOGGER = LoggerFactory.getLogger(InstanceServiceImpl.class);
private static final DateTimeFormatter DTF = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm Z");
private static final String ID = "id";
private static final Integer TO_STRING_MAX_LENGTH = 80;
private EntityService entityService;
private BundleContext bundleContext;
private HistoryService historyService;
private TrashService trashService;
private TypeService typeService;
@Override
@Transactional
public Object saveInstance(EntityRecord entityRecord) {
return saveInstance(entityRecord, null);
}
@Override
@Transactional
public Object saveInstance(EntityRecord entityRecord, Long deleteValueFieldId) {
EntityDto entity = getEntity(entityRecord.getEntitySchemaId());
validateCredentials(entity);
validateNonEditableProperty(entity);
try {
MotechDataService service = getServiceForEntity(entity);
Class<?> entityClass = getEntityClass(entity);
boolean newObject = entityRecord.getId() == null;
Object instance;
if (newObject) {
instance = entityClass.newInstance();
} else {
instance = service.retrieve(ID, entityRecord.getId());
if (instance == null) {
throw new ObjectNotFoundException(entity.getName(), entityRecord.getId());
}
}
updateFields(instance, entityRecord.getFields(), service, deleteValueFieldId, !newObject);
if (newObject) {
return service.create(instance);
} else {
return service.update(instance);
}
} catch (Exception e) {
if (entityRecord.getId() == null) {
throw new ObjectCreateException(entity.getName(), e);
} else {
throw new ObjectUpdateException(entity.getName(), entityRecord.getId(), e);
}
}
}
@Override
public List<EntityRecord> getEntityRecords(Long entityId) {
return getEntityRecords(entityId, null);
}
@Override
public List<EntityRecord> getEntityRecords(Long entityId, QueryParams queryParams) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
List<FieldDto> fields = entityService.getEntityFields(entityId);
MotechDataService service = getServiceForEntity(entity);
List instances = service.retrieveAll(queryParams);
return instancesToRecords(instances, entity, fields, service);
}
@Override
public List<FieldDto> getEntityFields(Long entityId) {
validateCredentials(getEntity(entityId));
return entityService.getEntityFields(entityId);
}
@Override
public List<EntityRecord> getTrashRecords(Long entityId, QueryParams queryParams) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
List<FieldDto> fields = entityService.getEntityFields(entityId);
Collection collection = trashService.getInstancesFromTrash(entity.getClassName(), queryParams);
return instancesToRecords(collection, entity, fields, service);
}
@Override
public long countTrashRecords(Long entityId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
return trashService.countTrashRecords(entity.getClassName());
}
@Override
public EntityRecord getSingleTrashRecord(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
List<FieldDto> fields = entityService.getEntityFields(entityId);
Object instance = trashService.findTrashById(instanceId, entityId);
return instanceToRecord(instance, entity, fields, service);
}
@Override
public Object getInstanceField(Long entityId, Long instanceId, String fieldName) {
EntityDto entity = getEntity(entityId);
MotechDataService service = getServiceForEntity(entity);
validateCredentials(entity);
Object instance = service.retrieve(ID, instanceId);
return service.getDetachedField(instance, fieldName);
}
@Override
public List<EntityRecord> getEntityRecordsFromLookup(Long entityId, String lookupName, Map<String, Object> lookupMap,
QueryParams queryParams) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
LookupDto lookup = getLookupByName(entityId, lookupName);
List<FieldDto> fields = entityService.getEntityFields(entityId);
Map<String, FieldDto> fieldMap = entityService.getLookupFieldsMapping(entityId, lookupName);
MotechDataService service = getServiceForEntity(entity);
try {
LookupExecutor lookupExecutor = new LookupExecutor(service, lookup, fieldMap);
Object result = lookupExecutor.execute(lookupMap, queryParams);
if (lookup.isSingleObjectReturn()) {
EntityRecord record = instanceToRecord(result, entity, fields, service);
return (record == null) ? new ArrayList<EntityRecord>() : Collections.singletonList(record);
} else {
List instances = (List) result;
return instancesToRecords(instances, entity, fields, service);
}
} catch (RuntimeException e) {
throw new LookupExecutionException(e);
}
}
@Override
public List<EntityRecord> getEntityRecordsWithFilter(Long entityId, Filters filters, QueryParams queryParams) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
List<FieldDto> fields = entityService.getEntityFields(entityId);
MotechDataService service = getServiceForEntity(entity);
List instances = service.filter(filters, queryParams);
return instancesToRecords(instances, entity, fields, service);
}
@Override
public long countRecordsWithFilters(Long entityId, Filters filters) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
return service.countForFilters(filters);
}
@Override
public long countRecords(Long entityId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
return service.count();
}
@Override
public long countRecordsByLookup(Long entityId, String lookupName, Map<String, Object> lookupMap) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
LookupDto lookup = getLookupByName(entityId, lookupName);
Map<String, FieldDto> fieldMap = entityService.getLookupFieldsMapping(entityId, lookupName);
MotechDataService service = getServiceForEntity(entity);
try {
LookupExecutor lookupExecutor = new LookupExecutor(service, lookup, fieldMap);
return lookupExecutor.executeCount(lookupMap);
} catch (RuntimeException e) {
throw new LookupExecutionException(e);
}
}
@Override
public void revertPreviousVersion(Long entityId, Long instanceId, Long historyId) {
validateNonEditableProperty(getEntity(entityId));
HistoryRecord historyRecord = getHistoryRecord(entityId, instanceId, historyId);
if (!historyRecord.isRevertable()) {
EntityDto entity = getEntity(entityId);
throw new EntitySchemaMismatchException(entity.getName());
}
saveInstance(new EntityRecord(instanceId, entityId, historyRecord.getFields()));
}
@Override
public List<FieldInstanceDto> getInstanceFields(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
List<FieldDto> fields = entityService.getEntityFields(entityId);
List<FieldInstanceDto> result = new ArrayList<>();
for (FieldDto field : fields) {
FieldInstanceDto fieldInstanceDto = new FieldInstanceDto(field.getId(), instanceId, field.getBasic());
result.add(fieldInstanceDto);
}
return result;
}
@Override
public List<HistoryRecord> getInstanceHistory(Long entityId, Long instanceId, QueryParams queryParams) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
Object instance = service.retrieve(ID, instanceId);
List history = historyService.getHistoryForInstance(instance, queryParams);
List<HistoryRecord> result = new ArrayList<>();
for (Object o : history) {
result.add(convertToHistoryRecord(o, entity, instanceId, service));
}
return result;
}
@Override
public long countHistoryRecords(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
Object instance = service.retrieve(ID, instanceId);
return historyService.countHistoryRecords(instance);
}
@Override
public HistoryRecord getHistoryRecord(Long entityId, Long instanceId, Long historyId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
Object instance = service.retrieve(ID, instanceId);
Object historyInstance = historyService.getSingleHistoryInstance(instance, historyId);
return convertToHistoryRecord(historyInstance, entity, instanceId, service);
}
@Override
public EntityRecord newInstance(Long entityId) {
validateCredentials(getEntity(entityId));
List<FieldDto> fields = entityService.getEntityFields(entityId);
List<FieldRecord> fieldRecords = new ArrayList<>();
for (FieldDto field : fields) {
FieldRecord fieldRecord = new FieldRecord(field);
fieldRecords.add(fieldRecord);
}
populateDefaultFields(fieldRecords);
return new EntityRecord(null, entityId, fieldRecords);
}
@Override
public EntityRecord getEntityInstance(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
MotechDataService service = getServiceForEntity(entity);
Object instance = service.retrieve(ID, instanceId);
if (instance == null) {
throw new ObjectNotFoundException(entity.getName(), instanceId);
}
List<FieldDto> fields = entityService.getEntityFields(entityId);
return instanceToRecord(instance, entity, fields, service);
}
@Override
public FieldRecord getInstanceValueAsRelatedField(Long entityId, Long fieldId, Long instanceId) {
validateCredentials(getEntity(entityId));
try {
FieldRecord fieldRecord;
FieldDto field = entityService.getEntityFieldById(entityId, fieldId);
MotechDataService service = DataServiceHelper.getDataService(bundleContext, field.getMetadata(RELATED_CLASS).getValue());
Object instance = service.findById(instanceId);
if (instance == null) {
throw new ObjectNotFoundException(service.getClassType().getName(), instanceId);
}
fieldRecord = new FieldRecord(field);
fieldRecord.setValue(parseValueForDisplay(instance, field.getMetadata(Constants.MetadataKeys.RELATED_FIELD)));
fieldRecord.setDisplayValue(instance.toString());
return fieldRecord;
} catch (IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new ObjectReadException(entityId, e);
}
}
@Override
public void deleteInstance(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
validateNonEditableProperty(entity);
MotechDataService service = getServiceForEntity(entity);
service.delete(ID, instanceId);
}
@Override
public void revertInstanceFromTrash(Long entityId, Long instanceId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
validateNonEditableProperty(entity);
MotechDataService service = getServiceForEntity(entity);
Object trash = service.findTrashInstanceById(instanceId, entityId);
List<FieldRecord> fieldRecords = new LinkedList<>();
try {
for (FieldDto field : entityService.getEntityFields(entity.getId())) {
if ("id".equalsIgnoreCase(field.getBasic().getDisplayName())) {
continue;
}
Field f = FieldUtils.getField(trash.getClass(), StringUtils.uncapitalize(field.getBasic().getName()), true);
FieldRecord record = new FieldRecord(field);
record.setValue(f.get(trash));
fieldRecords.add(record);
}
Class<?> entityClass = getEntityClass(entity);
Object newInstance = entityClass.newInstance();
updateFields(newInstance, fieldRecords, service, null);
service.revertFromTrash(newInstance, trash);
} catch (Exception e) {
throw new RevertFromTrashException(entity.getName(), instanceId, e);
}
}
@Override
public void verifyEntityAccess(Long entityId) {
EntityDto entity = getEntity(entityId);
validateCredentials(entity);
}
private void populateDefaultFields(List<FieldRecord> fieldRecords) {
for (FieldRecord record : fieldRecords) {
if (Constants.Util.CREATOR_FIELD_NAME.equals(record.getName()) ||
Constants.Util.OWNER_FIELD_NAME.equals(record.getName())) {
record.setValue(SecurityContextHolder.getContext().getAuthentication().getName());
}
}
}
private LookupDto getLookupByName(Long entityId, String lookupName) {
LookupDto lookup = entityService.getLookupByName(entityId, lookupName);
if (lookup == null) {
throw new LookupNotFoundException(entityId, lookupName);
}
return lookup;
}
private EntityDto getEntity(Long entityId) {
EntityDto entityDto = entityService.getEntity(entityId);
if (entityDto == null) {
throw new EntityNotFoundException(entityId);
}
return entityDto;
}
private MotechDataService getServiceForEntity(EntityDto entity) {
String className = entity.getClassName();
return DataServiceHelper.getDataService(bundleContext, className);
}
private void updateFields(Object instance, List<FieldRecord> fieldRecords, MotechDataService service, Long deleteValueFieldId)
throws NoSuchMethodException, InstantiationException, NoSuchFieldException, CannotCompileException, IllegalAccessException, ClassNotFoundException {
updateFields(instance, fieldRecords, service, deleteValueFieldId, false);
}
private void updateFields(Object instance, List<FieldRecord> fieldRecords, MotechDataService service,
Long deleteValueFieldId, boolean retainId)
throws NoSuchMethodException, ClassNotFoundException, CannotCompileException, InstantiationException, IllegalAccessException, NoSuchFieldException {
for (FieldRecord fieldRecord : fieldRecords) {
if (!(retainId && ID.equals(fieldRecord.getName())) && !fieldRecord.getType().isRelationship()) {
setProperty(instance, fieldRecord, service, deleteValueFieldId);
} else if (fieldRecord.getType().isRelationship()) {
setRelationProperty(instance, fieldRecord);
}
}
}
private List<EntityRecord> instancesToRecords(Collection instances, EntityDto entity, List<FieldDto> fields,
MotechDataService service) {
List<EntityRecord> records = new ArrayList<>();
for (Object instance : instances) {
EntityRecord record = instanceToRecord(instance, entity, fields, service);
records.add(record);
}
return records;
}
private EntityRecord instanceToRecord(Object instance, EntityDto entityDto, List<FieldDto> fields,
MotechDataService service) {
if (instance == null) {
return null;
}
try {
List<FieldRecord> fieldRecords = new ArrayList<>();
for (FieldDto field : fields) {
Object value = getProperty(instance, field, service);
Object displayValueForRelatedInstances = null;
if (field.getType().isRelationship()) {
if (field.getType().equals(TypeDto.ONE_TO_MANY_RELATIONSHIP) || field.getType().equals(TypeDto.MANY_TO_MANY_RELATIONSHIP)) {
displayValueForRelatedInstances = buildDisplayValuesMap((Collection) value);
} else {
if (value != null) {
String toStringResult = value.toString();
displayValueForRelatedInstances = toStringResult.length() > TO_STRING_MAX_LENGTH ?
toStringResult.substring(0, TO_STRING_MAX_LENGTH + 1) + "..." : toStringResult;
}
}
}
value = parseValueForDisplay(value, field.getMetadata(Constants.MetadataKeys.RELATED_FIELD));
FieldRecord fieldRecord = new FieldRecord(field);
fieldRecord.setValue(value);
fieldRecord.setDisplayValue(displayValueForRelatedInstances);
fieldRecords.add(fieldRecord);
}
Number id = (Number) PropertyUtil.safeGetProperty(instance, ID);
return new EntityRecord(id == null ? null : id.longValue(), entityDto.getId(), fieldRecords);
} catch (Exception e) {
throw new ObjectReadException(entityDto.getName(), e);
}
}
private Map<Long, String> buildDisplayValuesMap(Collection values) throws InvocationTargetException, IllegalAccessException {
Map<Long, String> displayValues = new HashMap<>();
for (Object obj : values) {
Method method = MethodUtils.getAccessibleMethod(obj.getClass(), "getId", (Class[]) null);
Long key = (Long) method.invoke(obj);
String toStringResult = obj.toString();
displayValues.put(key, toStringResult.length() > TO_STRING_MAX_LENGTH ?
toStringResult.substring(0 , TO_STRING_MAX_LENGTH + 1) + "..." : toStringResult);
}
return displayValues;
}
private HistoryRecord convertToHistoryRecord(Object object, EntityDto entity, Long instanceId,
MotechDataService service) {
Long entityId = entity.getId();
EntityRecord entityRecord = instanceToRecord(object, entity, entityService.getEntityFields(entityId), service);
Long historyInstanceSchemaVersion = (Long) PropertyUtil.safeGetProperty(object,
HistoryTrashClassHelper.schemaVersion(object.getClass()));
Long currentSchemaVersion = entityService.getCurrentSchemaVersion(entity.getClassName());
return new HistoryRecord(entityRecord.getId(), instanceId,
historyInstanceSchemaVersion.equals(currentSchemaVersion), entityRecord.getFields());
}
private void setProperty(Object instance, FieldRecord fieldRecord, MotechDataService service, Long deleteValueFieldId) throws NoSuchMethodException, ClassNotFoundException, NoSuchFieldException, IllegalAccessException {
String fieldName = fieldRecord.getName();
TypeDto type = getType(fieldRecord);
String methodName = "set" + StringUtils.capitalize(fieldName);
ComboboxHolder holder = type.isCombobox() ? new ComboboxHolder(instance, fieldRecord) : null;
String methodParameterType = getMethodParameterType(type, holder);
ClassLoader classLoader = instance.getClass().getClassLoader();
Class<?> parameterType;
Object parsedValue;
if (Byte[].class.getName().equals(methodParameterType) || byte[].class.getName().equals(methodParameterType)) {
parameterType = getCorrectByteArrayType(methodParameterType);
parsedValue = parseBlobValue(fieldRecord, service, fieldName, deleteValueFieldId, instance);
} else {
parameterType = classLoader.loadClass(methodParameterType);
parsedValue = parseValue(holder, methodParameterType, fieldRecord, classLoader);
}
validateNonEditableField(fieldRecord, instance, parsedValue);
Method method = MethodUtils.getAccessibleMethod(instance.getClass(), methodName, parameterType);
if (method == null && TypeHelper.hasPrimitive(parameterType)) {
method = MethodUtils.getAccessibleMethod(instance.getClass(), methodName, TypeHelper.getPrimitive(parameterType));
// if the setter is for a primitive, but we have a null, we leave the default
if (method != null && parsedValue == null) {
return;
}
}
invokeMethod(method, instance, parsedValue, methodName, fieldName);
}
private void setRelationProperty(Object instance, FieldRecord fieldRecord) throws NoSuchMethodException, ClassNotFoundException, NoSuchFieldException, IllegalAccessException, InstantiationException, CannotCompileException {
String fieldName = fieldRecord.getName();
String methodName = MemberUtil.getSetterName(fieldName);
Class<?> clazz = instance.getClass().getClassLoader().loadClass(instance.getClass().getName());
Field field = FieldUtils.getField(clazz, fieldName, true);
Class<?> parameterType = field.getType();
Object value = null;
MotechDataService serviceForRelatedClass = null;
TypeDto type = getType(fieldRecord);
if (StringUtils.isNotEmpty(ObjectUtils.toString(fieldRecord.getValue()))) {
if (type.equals(TypeDto.ONE_TO_MANY_RELATIONSHIP) || type.equals(TypeDto.MANY_TO_MANY_RELATIONSHIP)) {
Class<?> genericType = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
serviceForRelatedClass = DataServiceHelper.getDataService(bundleContext, genericType.getName());
} else if (type.equals(TypeDto.MANY_TO_ONE_RELATIONSHIP) || type.equals(TypeDto.ONE_TO_ONE_RELATIONSHIP)) {
serviceForRelatedClass = DataServiceHelper.getDataService(bundleContext, parameterType.getName());
}
value = buildRelatedInstances(serviceForRelatedClass, parameterType, fieldRecord.getValue());
}
Method method = MethodUtils.getAccessibleMethod(instance.getClass(), methodName, parameterType);
invokeMethod(method, instance, value, methodName, fieldName);
}
private Object buildRelatedInstances(MotechDataService service, Class<?> parameterType, Object fieldValue) throws IllegalAccessException, InstantiationException {
Object parsedValue = null;
if (Collection.class.isAssignableFrom(parameterType)) {
if (Set.class.isAssignableFrom(parameterType)) {
parsedValue = new HashSet();
} else if (List.class.isAssignableFrom(parameterType)) {
parsedValue = new ArrayList();
} else {
parsedValue = new ArrayList();
}
for (Object object : (Collection) fieldValue) {
if (isFromUI(object)) {
((Collection) parsedValue).add(findRelatedObjectById(((Map) object).get(ID), service));
} else if (isHistoricalObject(object)){
String currentVersion = HistoryTrashClassHelper.currentVersion(object.getClass());
((Collection) parsedValue).add(findRelatedObjectById(PropertyUtil.safeGetProperty(object, currentVersion), service));
}
}
} else {
if (isFromUI(fieldValue)) {
parsedValue = findRelatedObjectById(((Map) fieldValue).get(ID), service);
} else if (isHistoricalObject(fieldValue)){
String currentVersion = HistoryTrashClassHelper.currentVersion(fieldValue.getClass());
parsedValue = findRelatedObjectById(PropertyUtil.safeGetProperty(fieldValue, currentVersion), service);
}
}
return parsedValue;
}
/**
* Checks if the value is from the UI. When we updating or creating object with relationship field via the UI
* the single related object is representing by Map.
* @param value the value to be checked
* @return true if the value is from the UI; false otherwise
*/
private boolean isFromUI(Object value) {
return value instanceof Map;
}
private boolean isHistoricalObject(Object object) {
return ClassName.isHistoryClassName(object.getClass().getName());
}
private Object findRelatedObjectById(Object id, MotechDataService service) {
//We need parse id value to the long type
return service.findById(TypeHelper.parseNumber(id, Long.class.getName()).longValue());
}
private Class getCorrectByteArrayType(String type) {
return Byte[].class.getName().equals(type) ? Byte[].class : byte[].class;
}
private TypeDto getType(FieldRecord fieldRecord) {
TypeDto type = fieldRecord.getType();
if (type.isTextArea()) {
type = typeService.findType(String.class);
}
return type;
}
private Object parseBlobValue(FieldRecord fieldRecord, MotechDataService service, String fieldName,
Long deleteValueFieldId, Object instance) {
Object parsedValue;
if ((ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY.equals(fieldRecord.getValue()) || ArrayUtils.EMPTY_BYTE_ARRAY.equals(fieldRecord.getValue()))
&& !fieldRecord.getId().equals(deleteValueFieldId)) {
parsedValue = service.getDetachedField(instance, fieldName);
} else {
parsedValue = fieldRecord.getValue();
}
return verifyParsedValue(parsedValue);
}
private Object verifyParsedValue(Object parsedValue) {
if (parsedValue == null) {
return ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;
}
return parsedValue;
}
private void invokeMethod(Method method, Object instance, Object parsedValue, String methodName, String fieldName) throws NoSuchMethodException {
if (method == null) {
throw new NoSuchMethodException(String.format("No setter %s for field %s", methodName, fieldName));
}
try {
if (method.getParameterTypes()[0].equals(byte[].class)) {
method.invoke(instance, parsedValue instanceof byte[] ? parsedValue : ArrayUtils.toPrimitive((Byte[]) parsedValue));
} else {
method.invoke(instance, parsedValue);
}
} catch (InvocationTargetException | IllegalAccessException e) {
throw new IllegalStateException(String.format("There was a problem with set value '%s' to field '%s'", parsedValue, fieldName), e);
}
}
private Object parseValue(ComboboxHolder holder, String methodParameterType, FieldRecord fieldRecord, ClassLoader classLoader) {
Object parsedValue = fieldRecord.getValue();
String valueAsString = null == parsedValue ? null : TypeHelper.format(parsedValue);
if (parsedValue instanceof Map) {
if (fieldRecord.getMetadata(MAP_KEY_TYPE) != null && fieldRecord.getMetadata(MAP_VALUE_TYPE) != null) {
Map<Object, Object> parsedValueAsMap = (Map<Object, Object>) parsedValue;
Map<Object, Object> parsedMap = new LinkedHashMap<>();
for (Iterator<Map.Entry<Object, Object>> it = parsedValueAsMap.entrySet().iterator(); it.hasNext();) {
Map.Entry<Object, Object> entry = it.next();
parsedMap.put(TypeHelper.parseMapValue(entry.getKey(), fieldRecord.getMetadata(MAP_KEY_TYPE).getValue(), true),
TypeHelper.parseMapValue(entry.getValue(), fieldRecord.getMetadata(MAP_VALUE_TYPE).getValue(), false));
it.remove();
}
parsedValueAsMap.putAll(parsedMap);
}
} else if (null != holder && holder.isEnumCollection()) {
String genericType = holder.getEnumName();
parsedValue = TypeHelper.parse(valueAsString, holder.getTypeClassName(), genericType, classLoader);
} else {
parsedValue = TypeHelper.parse(valueAsString, methodParameterType, classLoader);
}
return parsedValue;
}
private String getMethodParameterType(TypeDto type, ComboboxHolder holder) {
String methodParameterType;
if (type.isCombobox() && null != holder) {
methodParameterType = holder.getTypeClassName();
} else {
methodParameterType = type.getTypeClass();
}
return methodParameterType;
}
private Object getProperty(Object instance, FieldDto field, MotechDataService service)
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
String fieldName = StringUtils.uncapitalize(field.getBasic().getName());
PropertyDescriptor propertyDescriptor = PropertyUtil.getPropertyDescriptor(instance, fieldName);
Method readMethod = propertyDescriptor.getReadMethod();
if (readMethod == null) {
throw new NoSuchMethodException(String.format("No getter for field %s", fieldName));
}
if (TypeDto.BLOB.getTypeClass().equals(field.getType().getTypeClass())) {
return ArrayUtils.EMPTY_BYTE_OBJECT_ARRAY;
}
try {
return readMethod.invoke(instance);
} catch (InvocationTargetException e) {
LOGGER.debug("Invocation target exception thrown when retrieving field {}. This may indicate a non loaded field",
fieldName, e);
// fallback to the service
return service.getDetachedField(instance, fieldName);
}
}
private Object parseValueForDisplay(Object value, MetadataDto relatedFieldMetadata) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
Object parsedValue = value;
if (parsedValue instanceof DateTime) {
parsedValue = DTF.print((DateTime) parsedValue);
} else if (parsedValue instanceof Date) {
parsedValue = DTF.print(((Date) parsedValue).getTime());
} else if (parsedValue instanceof Time) {
parsedValue = ((Time) parsedValue).timeStr();
} else if (parsedValue instanceof LocalDate) {
parsedValue = parsedValue.toString();
} else if (relatedFieldMetadata != null) {
parsedValue = removeCircularRelations(parsedValue, relatedFieldMetadata.getValue());
}
return parsedValue;
}
private Object removeCircularRelations(Object object, String relatedField) {
// we must also handle a field that is a collection
// because of this we handle regular fields as single objects collection here
Collection objectsCollection = (object instanceof Collection) ? (Collection) object : Arrays.asList(object);
for (Object item : objectsCollection) {
if (item != null) {
PropertyUtil.safeSetProperty(item, relatedField, null);
}
}
return object;
}
private Class<?> getEntityClass(EntityDto entity) throws ClassNotFoundException {
// get the declaring bundle, for DDE the module bundle, for EUDE the generated entities bundle
Bundle declaringBundle;
if (entity.isDDE()) {
declaringBundle = WebBundleUtil.findBundleByName(bundleContext, entity.getModule());
} else {
declaringBundle = WebBundleUtil.findBundleBySymbolicName(bundleContext,
Constants.BundleNames.MDS_ENTITIES_SYMBOLIC_NAME);
}
Class<?> clazz;
// if no bundle found, fallback to the MDSClassLoader
if (declaringBundle == null) {
clazz = MDSClassLoader.getInstance().loadClass(entity.getClassName());
} else {
clazz = declaringBundle.loadClass(entity.getClassName());
}
return clazz;
}
private void validateCredentials(EntityDto entity) {
boolean authorized = false;
String username = getUsername();
SecurityMode securityMode = entity.getSecurityMode();
if (securityMode != null) {
Set<String> securityMembers = entity.getSecurityMembers();
if (securityMode == SecurityMode.EVERYONE) {
authorized = true;
} else if (securityMode == SecurityMode.USERS) {
if (securityMembers.contains(username)) {
authorized = true;
}
} else if (securityMode == SecurityMode.PERMISSIONS) {
for (String permission : getUserPermissions()) {
if (securityMembers.contains(permission)) {
authorized = true;
}
}
}
if (!authorized && !securityMode.isInstanceRestriction()) {
throw new SecurityException();
}
}
}
private void validateNonEditableProperty(EntityDto entity) {
if (entity.isNonEditable()) {
throw new EntityInstancesNonEditableException();
}
}
private void validateNonEditableField(FieldRecord fieldRecord, Object instance, Object parsedValue) throws IllegalAccessException {
Object fieldOldValue = FieldUtils.readField(instance,
StringUtils.uncapitalize(fieldRecord.getName()),
true);
// We need to check if hidden or read only field value isn't changed
// in some unexpected way. If so then throw exception
if((fieldRecord.isNonEditable() || fieldRecord.isNonDisplayable())
// There is need to use Objects.equals as values - one or both - can be null
// which would cause NullPointerException when just .equals() on null value
&& !Objects.equals(fieldOldValue, parsedValue)) {
throw new FieldReadOnlyException(instance.getClass().getName(), fieldRecord.getName());
}
}
@Autowired
public void setEntityService(EntityService entityService) {
this.entityService = entityService;
}
@Autowired
public void setTrashService(TrashService trashService) {
this.trashService = trashService;
}
@Autowired
public void setBundleContext(BundleContext bundleContext) {
this.bundleContext = bundleContext;
}
@Autowired
public void setHistoryService(HistoryService historyService) {
this.historyService = historyService;
}
@Autowired
public void setTypeService(TypeService typeService) {
this.typeService = typeService;
}
}
| |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.wing.element;
/**
* A class that represents a table row.
*
* The row element is contained inside a table and serves as a container of cell
* elements. A required 'role' attribute determines how the row and its cells
* are used.
*
* @author Scott Phillips
*/
import java.util.ArrayList;
import java.util.List;
import org.dspace.app.xmlui.wing.AttributeMap;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingContext;
import org.dspace.app.xmlui.wing.WingException;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.helpers.NamespaceSupport;
public class Row extends AbstractWingElement implements StructuralElement
{
/** The name of the row element */
public static final String E_ROW = "row";
/** The name of the role attribute */
public static final String A_ROLE = "role";
/** The row's name */
private String name;
/** The row's role, see ROLES below */
private String role;
/** Special rendering instructions */
private String rend;
/** The row (and cell) role types: */
public static final String ROLE_DATA = "data";
public static final String ROLE_HEADER = "header";
/** All the roles collected into one array */
public static final String[] ROLES = { ROLE_DATA, ROLE_HEADER };
/** The contents of this row */
List<AbstractWingElement> contents = new ArrayList<AbstractWingElement>();
/**
* Construct a new table row.
*
* @param context
* (Required) The context this element is contained in, such as
* where to route SAX events and what i18n catalogue to use.
* @param name
* (May be null) a local identifier used to differentiate the
* element from its siblings.
* @param role
* (May be null) determines what kind of information the row
* carries, either header or data. See row.ROLES
* @param rend
* (May be null) a rendering hint used to override the default
* display of the element.
*/
protected Row(WingContext context, String name, String role, String rend)
throws WingException
{
super(context);
restrict(role, ROLES,
"The 'role' parameter must be one of these values: 'data' or 'header'.");
this.name = name;
this.role = role;
this.rend = rend;
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* @param name
* (May be null) a local identifier used to differentiate the
* element from its siblings.
* @param role
* (May be null) determines what kind of information the cell
* carries, either header or data. See cell.ROLES
* @param rows
* (May be zero for no defined value) determines how many rows
* does this cell span.
* @param cols
* (May be zero for no defined value) determines how many columns
* does this cell span.
* @param rend
* (May be null) a rendering hint used to override the default
* display of the element.
* @return a new table cell.
*/
public Cell addCell(String name, String role, int rows, int cols,
String rend) throws WingException
{
Cell cell = new Cell(context, name, role, rows, cols, rend);
contents.add(cell);
return cell;
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* @param rows
* (May be zero for no defined value) determines how many rows
* does this cell span.
* @param cols
* (May be zero for no defined value) determines how many columns
* does this cell span.
* @return a new table cell.
*/
public Cell addCell(int rows, int cols) throws WingException
{
return addCell(null, null, rows, cols, null);
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* @param name
* (May be null) a local identifier used to differentiate the
* element from its siblings.
* @param role
* (May be null) determines what kind of information the cell
* carries, either header or data. See cell.ROLES
* @param rend
* (May be null) a rendering hint used to override the default
* display of the element.
* @return a new table cell.
*/
public Cell addCell(String name, String role, String rend)
throws WingException
{
return addCell(name, role, 0, 0, rend);
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* @param role
* (May be null) determines what kind of information the cell
* carries, either header or data. See cell.ROLES
* @return a new table cell.
*/
public Cell addCell(String role) throws WingException
{
return addCell(null, role, 0, 0, null);
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* @return a new table cell.
*/
public Cell addCell() throws WingException
{
return addCell(null, null, 0, 0, null);
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* Once the cell has been created set the cell's contents to the provided
* content.
*
* @param characters
* (Required) Untranslated character data to be included.
*/
public void addCellContent(String characters) throws WingException
{
Cell cell = this.addCell();
cell.addContent(characters);
}
/**
* Add a new cell to the table. The cell element contained in a row of a
* table carries content for that table. It is a character container, just
* like p, item, and hi, and its primary purpose is to display textual data,
* possibly enhanced with hyperlinks, emphasized blocks of text, images and
* form fields.
*
* Once the cell has been created set the cell's contents to the provided
* content.
*
* @param message
* (Required) Key to the i18n catalogue to translate the content
* into the language preferred by the user.
*/
public void addCellContent(Message message) throws WingException
{
Cell cell = this.addCell();
cell.addContent(message);
}
/**
* Translate this element and all contained elements into SAX events. The
* events should be routed to the contentHandler found in the WingContext.
*
* @param contentHandler
* (Required) The registered contentHandler where SAX events
* should be routed too.
* @param lexicalHandler
* (Required) The registered lexicalHandler where lexical
* events (such as CDATA, DTD, etc) should be routed too.
* @param namespaces
* (Required) SAX Helper class to keep track of namespaces able
* to determine the correct prefix for a given namespace URI.
*/
public void toSAX(ContentHandler contentHandler, LexicalHandler lexicalHandler,
NamespaceSupport namespaces) throws SAXException
{
AttributeMap attributes = new AttributeMap();
if (name != null)
{
attributes.put(A_NAME, name);
attributes.put(A_ID, context.generateID(E_ROW, name));
}
if (role != null)
{
attributes.put(A_ROLE, role);
}
if (rend != null)
{
attributes.put(A_RENDER, rend);
}
startElement(contentHandler, namespaces, E_ROW, attributes);
for (AbstractWingElement content : contents)
{
content.toSAX(contentHandler, lexicalHandler, namespaces);
}
endElement(contentHandler, namespaces, E_ROW);
}
/**
* dispose
*/
public void dispose()
{
for (AbstractWingElement content : contents)
{
content.dispose();
}
contents.clear();
contents = null;
super.dispose();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.json.codeinsight;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.json.JsonBundle;
import com.intellij.json.JsonDialectUtil;
import com.intellij.json.JsonElementTypes;
import com.intellij.json.psi.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* Compliance checks include
* <ul>
* <li>Usage of line and block commentaries</li>
* <li>Usage of single quoted strings</li>
* <li>Usage of identifiers (unqouted words)</li>
* <li>Not double quoted string literal is used as property key</li>
* <li>Multiple top-level values</li>
* </ul>
*
* @author Mikhail Golubev
*/
public class JsonStandardComplianceInspection extends LocalInspectionTool {
private static final Logger LOG = Logger.getInstance(JsonStandardComplianceInspection.class);
public boolean myWarnAboutComments = true;
public boolean myWarnAboutNanInfinity = true;
public boolean myWarnAboutTrailingCommas = true;
public boolean myWarnAboutMultipleTopLevelValues = true;
@NotNull
public String getDisplayName() {
return JsonBundle.message("inspection.compliance.name");
}
@NotNull
@Override
public HighlightDisplayLevel getDefaultLevel() {
return HighlightDisplayLevel.ERROR;
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) {
if (!JsonDialectUtil.isStandardJson(holder.getFile())) return PsiElementVisitor.EMPTY_VISITOR;
return new StandardJsonValidatingElementVisitor(holder);
}
@Nullable
private static PsiElement findTrailingComma(@NotNull JsonContainer container, @NotNull IElementType ending) {
final PsiElement lastChild = container.getLastChild();
if (lastChild.getNode().getElementType() != ending) {
return null;
}
final PsiElement beforeEnding = PsiTreeUtil.skipWhitespacesAndCommentsBackward(lastChild);
if (beforeEnding != null && beforeEnding.getNode().getElementType() == JsonElementTypes.COMMA) {
return beforeEnding;
}
return null;
}
@Override
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel optionsPanel = new MultipleCheckboxOptionsPanel(this);
optionsPanel.addCheckbox(JsonBundle.message("inspection.compliance.option.comments"), "myWarnAboutComments");
optionsPanel.addCheckbox(JsonBundle.message("inspection.compliance.option.multiple.top.level.values"), "myWarnAboutMultipleTopLevelValues");
optionsPanel.addCheckbox(JsonBundle.message("inspection.compliance.option.trailing.comma"), "myWarnAboutTrailingCommas");
optionsPanel.addCheckbox(JsonBundle.message("inspection.compliance.option.nan.infinity"), "myWarnAboutNanInfinity");
return optionsPanel;
}
private static class AddDoubleQuotesFix implements LocalQuickFix {
@NotNull
@Override
public String getFamilyName() {
return JsonBundle.message("quickfix.add.double.quotes.desc");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
final String rawText = element.getText();
if (element instanceof JsonLiteral || element instanceof JsonReferenceExpression) {
String content = JsonPsiUtil.stripQuotes(rawText);
if (element instanceof JsonStringLiteral && rawText.startsWith("'")) {
content = escapeSingleQuotedStringContent(content);
}
final PsiElement replacement = new JsonElementGenerator(project).createValue("\"" + content + "\"");
CodeStyleManager.getInstance(project).performActionWithFormatterDisabled((Runnable)() -> element.replace(replacement));
}
else {
LOG.error("Quick fix was applied to unexpected element", rawText, element.getParent().getText());
}
}
@NotNull
private static String escapeSingleQuotedStringContent(@NotNull String content) {
final StringBuilder result = new StringBuilder();
boolean nextCharEscaped = false;
for (int i = 0; i < content.length(); i++) {
final char c = content.charAt(i);
if ((nextCharEscaped && c != '\'') || (!nextCharEscaped && c == '"')) {
result.append('\\');
}
if (c != '\\' || nextCharEscaped) {
result.append(c);
nextCharEscaped = false;
}
else {
nextCharEscaped = true;
}
}
if (nextCharEscaped) {
result.append('\\');
}
return result.toString();
}
}
protected class StandardJsonValidatingElementVisitor extends JsonElementVisitor {
private final ProblemsHolder myHolder;
public StandardJsonValidatingElementVisitor(ProblemsHolder holder) {myHolder = holder;}
protected boolean allowComments() { return false; }
protected boolean allowSingleQuotes() { return false; }
protected boolean allowIdentifierPropertyNames() { return false; }
protected boolean allowTrailingCommas() { return false; }
protected boolean isValidPropertyName(@NotNull PsiElement literal) {
return literal instanceof JsonLiteral && JsonPsiUtil.getElementTextWithoutHostEscaping(literal).startsWith("\"");
}
@Override
public void visitComment(PsiComment comment) {
if (!allowComments() && myWarnAboutComments) {
if (JsonStandardComplianceProvider.shouldWarnAboutComment(comment)) {
myHolder.registerProblem(comment, JsonBundle.message("inspection.compliance.msg.comments"), ProblemHighlightType.WEAK_WARNING);
}
}
}
@Override
public void visitStringLiteral(@NotNull JsonStringLiteral stringLiteral) {
if (!allowSingleQuotes() && JsonPsiUtil.getElementTextWithoutHostEscaping(stringLiteral).startsWith("'")) {
myHolder.registerProblem(stringLiteral, JsonBundle.message("inspection.compliance.msg.single.quoted.strings"),
new AddDoubleQuotesFix());
}
// May be illegal property key as well
super.visitStringLiteral(stringLiteral);
}
@Override
public void visitLiteral(@NotNull JsonLiteral literal) {
if (JsonPsiUtil.isPropertyKey(literal) && !isValidPropertyName(literal)) {
myHolder.registerProblem(literal, JsonBundle.message("inspection.compliance.msg.illegal.property.key"), new AddDoubleQuotesFix());
}
// for standard JSON, the inspection for NaN, Infinity and -Infinity is now configurable
if (!allowNanInfinity() && literal instanceof JsonNumberLiteral && myWarnAboutNanInfinity) {
final String text = JsonPsiUtil.getElementTextWithoutHostEscaping(literal);
if (StandardJsonLiteralChecker.INF.equals(text) ||
StandardJsonLiteralChecker.MINUS_INF.equals(text) ||
StandardJsonLiteralChecker.NAN.equals(text)) {
myHolder.registerProblem(literal, JsonBundle.message("syntax.error.illegal.floating.point.literal"));
}
}
super.visitLiteral(literal);
}
protected boolean allowNanInfinity() {
return false;
}
@Override
public void visitReferenceExpression(@NotNull JsonReferenceExpression reference) {
if (!allowIdentifierPropertyNames() || !JsonPsiUtil.isPropertyKey(reference) || !isValidPropertyName(reference)) {
myHolder.registerProblem(reference, JsonBundle.message("inspection.compliance.msg.bad.token"), new AddDoubleQuotesFix());
}
// May be illegal property key as well
super.visitReferenceExpression(reference);
}
@Override
public void visitArray(@NotNull JsonArray array) {
if (myWarnAboutTrailingCommas && !allowTrailingCommas()) {
final PsiElement trailingComma = findTrailingComma(array, JsonElementTypes.R_BRACKET);
if (trailingComma != null) {
myHolder.registerProblem(trailingComma, JsonBundle.message("inspection.compliance.msg.trailing.comma"));
}
}
super.visitArray(array);
}
@Override
public void visitObject(@NotNull JsonObject object) {
if (myWarnAboutTrailingCommas && !allowTrailingCommas()) {
final PsiElement trailingComma = findTrailingComma(object, JsonElementTypes.R_CURLY);
if (trailingComma != null) {
myHolder.registerProblem(trailingComma, JsonBundle.message("inspection.compliance.msg.trailing.comma"));
}
}
super.visitObject(object);
}
@Override
public void visitValue(@NotNull JsonValue value) {
if (value.getContainingFile() instanceof JsonFile) {
final JsonFile jsonFile = (JsonFile)value.getContainingFile();
if (myWarnAboutMultipleTopLevelValues && value.getParent() == jsonFile && value != jsonFile.getTopLevelValue()) {
myHolder.registerProblem(value, JsonBundle.message("inspection.compliance.msg.multiple.top.level.values"));
}
}
}
}
}
| |
//
// This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.06.04 at 07:58:30 PM BST
//
package pubmed.openAccess.jaxb.generated;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import elsevier.jaxb.math.mathml.Math;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice maxOccurs="unbounded" minOccurs="0">
* <element ref="{}email"/>
* <element ref="{}ext-link"/>
* <element ref="{}uri"/>
* <element ref="{}inline-supplementary-material"/>
* <element ref="{}related-article"/>
* <element ref="{}related-object"/>
* <element ref="{}hr"/>
* <element ref="{}bold"/>
* <element ref="{}italic"/>
* <element ref="{}monospace"/>
* <element ref="{}overline"/>
* <element ref="{}overline-start"/>
* <element ref="{}overline-end"/>
* <element ref="{}roman"/>
* <element ref="{}sans-serif"/>
* <element ref="{}sc"/>
* <element ref="{}strike"/>
* <element ref="{}underline"/>
* <element ref="{}underline-start"/>
* <element ref="{}underline-end"/>
* <element ref="{}alternatives"/>
* <element ref="{}inline-graphic"/>
* <element ref="{}private-char"/>
* <element ref="{}chem-struct"/>
* <element ref="{}inline-formula"/>
* <element ref="{}tex-math"/>
* <element ref="{http://www.w3.org/1998/Math/MathML}math"/>
* <element ref="{}abbrev"/>
* <element ref="{}milestone-end"/>
* <element ref="{}milestone-start"/>
* <element ref="{}named-content"/>
* <element ref="{}styled-content"/>
* <element ref="{}fn"/>
* <element ref="{}target"/>
* <element ref="{}xref"/>
* <element ref="{}sub"/>
* <element ref="{}sup"/>
* <element ref="{}x"/>
* <element ref="{}degrees"/>
* <element ref="{}given-names"/>
* <element ref="{}prefix"/>
* <element ref="{}surname"/>
* <element ref="{}suffix"/>
* </choice>
* <attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "speaker")
public class Speaker {
@XmlElementRefs({
@XmlElementRef(name = "related-article", type = RelatedArticle.class),
@XmlElementRef(name = "hr", type = Hr.class),
@XmlElementRef(name = "underline-start", type = UnderlineStart.class),
@XmlElementRef(name = "xref", type = Xref.class),
@XmlElementRef(name = "inline-formula", type = InlineFormula.class),
@XmlElementRef(name = "roman", type = Roman.class),
@XmlElementRef(name = "italic", type = Italic.class),
@XmlElementRef(name = "fn", type = Fn.class),
@XmlElementRef(name = "related-object", type = RelatedObject.class),
@XmlElementRef(name = "private-char", type = PrivateChar.class),
@XmlElementRef(name = "overline", type = Overline.class),
@XmlElementRef(name = "email", type = Email.class),
@XmlElementRef(name = "given-names", type = GivenNames.class),
@XmlElementRef(name = "alternatives", type = Alternatives.class),
@XmlElementRef(name = "chem-struct", type = ChemStruct.class),
@XmlElementRef(name = "inline-graphic", type = InlineGraphic.class),
@XmlElementRef(name = "degrees", type = Degrees.class),
@XmlElementRef(name = "uri", type = Uri.class),
@XmlElementRef(name = "x", type = X.class),
@XmlElementRef(name = "suffix", type = Suffix.class),
@XmlElementRef(name = "styled-content", type = StyledContent.class),
@XmlElementRef(name = "overline-end", type = OverlineEnd.class),
@XmlElementRef(name = "strike", type = Strike.class),
@XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class),
@XmlElementRef(name = "milestone-start", type = MilestoneStart.class),
@XmlElementRef(name = "target", type = Target.class),
@XmlElementRef(name = "monospace", type = Monospace.class),
@XmlElementRef(name = "sup", type = Sup.class),
@XmlElementRef(name = "abbrev", type = Abbrev.class),
@XmlElementRef(name = "milestone-end", type = MilestoneEnd.class),
@XmlElementRef(name = "ext-link", type = ExtLink.class),
@XmlElementRef(name = "prefix", type = Prefix.class),
@XmlElementRef(name = "underline-end", type = UnderlineEnd.class),
@XmlElementRef(name = "surname", type = Surname.class),
@XmlElementRef(name = "bold", type = Bold.class),
@XmlElementRef(name = "sans-serif", type = SansSerif.class),
@XmlElementRef(name = "sub", type = Sub.class),
@XmlElementRef(name = "named-content", type = NamedContent.class),
@XmlElementRef(name = "underline", type = Underline.class),
@XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class),
@XmlElementRef(name = "sc", type = Sc.class),
@XmlElementRef(name = "tex-math", type = TexMath.class),
@XmlElementRef(name = "overline-start", type = OverlineStart.class)
})
@XmlMixed
protected List<Object> content;
@XmlAttribute(name = "content-type")
@XmlSchemaType(name = "anySimpleType")
protected String contentType;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RelatedArticle }
* {@link UnderlineStart }
* {@link Hr }
* {@link InlineFormula }
* {@link Xref }
* {@link String }
* {@link Roman }
* {@link Fn }
* {@link Italic }
* {@link PrivateChar }
* {@link RelatedObject }
* {@link Overline }
* {@link Email }
* {@link Alternatives }
* {@link GivenNames }
* {@link InlineGraphic }
* {@link ChemStruct }
* {@link Degrees }
* {@link X }
* {@link Uri }
* {@link Suffix }
* {@link StyledContent }
* {@link Strike }
* {@link OverlineEnd }
* {@link InlineSupplementaryMaterial }
* {@link MilestoneStart }
* {@link Abbrev }
* {@link Sup }
* {@link Monospace }
* {@link Target }
* {@link MilestoneEnd }
* {@link ExtLink }
* {@link UnderlineEnd }
* {@link Prefix }
* {@link Bold }
* {@link Surname }
* {@link Sub }
* {@link SansSerif }
* {@link Math }
* {@link Underline }
* {@link NamedContent }
* {@link Sc }
* {@link TexMath }
* {@link OverlineStart }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the contentType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentType() {
return contentType;
}
/**
* Sets the value of the contentType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentType(String value) {
this.contentType = value;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.InlineDispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.resourcemanager.Application;
import org.apache.hadoop.yarn.server.resourcemanager.MockNodes;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.Task;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.TestCapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestFifoScheduler {
private static final Log LOG = LogFactory.getLog(TestFifoScheduler.class);
private final int GB = 1024;
private ResourceManager resourceManager = null;
private static final RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
@Before
public void setUp() throws Exception {
resourceManager = new ResourceManager();
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER,
FifoScheduler.class, ResourceScheduler.class);
resourceManager.init(conf);
}
@After
public void tearDown() throws Exception {
resourceManager.stop();
}
private org.apache.hadoop.yarn.server.resourcemanager.NodeManager
registerNode(String hostName, int containerManagerPort, int nmHttpPort,
String rackName, Resource capability) throws IOException,
YarnException {
return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager(
hostName, containerManagerPort, nmHttpPort, rackName, capability,
resourceManager.getResourceTrackerService(), resourceManager
.getRMContext());
}
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
ApplicationId appIdImpl = ApplicationId.newInstance(0, appId);
ApplicationAttemptId attId =
ApplicationAttemptId.newInstance(appIdImpl, attemptId);
return attId;
}
private ResourceRequest createResourceRequest(int memory, String host,
int priority, int numContainers) {
ResourceRequest request = recordFactory
.newRecordInstance(ResourceRequest.class);
request.setCapability(Resources.createResource(memory));
request.setResourceName(host);
request.setNumContainers(numContainers);
Priority prio = recordFactory.newRecordInstance(Priority.class);
prio.setPriority(priority);
request.setPriority(prio);
return request;
}
@Test(timeout=5000)
public void testFifoSchedulerCapacityWhenNoNMs() {
FifoScheduler scheduler = new FifoScheduler();
QueueInfo queueInfo = scheduler.getQueueInfo(null, false, false);
Assert.assertEquals(0.0f, queueInfo.getCurrentCapacity());
}
@Test(timeout=5000)
public void testAppAttemptMetrics() throws Exception {
AsyncDispatcher dispatcher = new InlineDispatcher();
RMContext rmContext = new RMContextImpl(dispatcher, null,
null, null, null, null, null, null, null);
FifoScheduler schedular = new FifoScheduler();
schedular.reinitialize(new Configuration(), rmContext);
ApplicationId appId = BuilderUtils.newApplicationId(200, 1);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
appId, 1);
SchedulerEvent event = new AppAddedSchedulerEvent(appAttemptId, "queue",
"user");
schedular.handle(event);
appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 2);
event = new AppAddedSchedulerEvent(appAttemptId, "queue", "user");
schedular.handle(event);
QueueMetrics metrics = schedular.getRootQueueMetrics();
Assert.assertEquals(1, metrics.getAppsSubmitted());
}
@Test(timeout=2000)
public void testNodeLocalAssignment() throws Exception {
AsyncDispatcher dispatcher = new InlineDispatcher();
Configuration conf = new Configuration();
RMContainerTokenSecretManager containerTokenSecretManager =
new RMContainerTokenSecretManager(conf);
containerTokenSecretManager.rollMasterKey();
NMTokenSecretManagerInRM nmTokenSecretManager =
new NMTokenSecretManagerInRM(conf);
nmTokenSecretManager.rollMasterKey();
RMContext rmContext = new RMContextImpl(dispatcher, null, null, null, null,
null, containerTokenSecretManager, nmTokenSecretManager, null);
FifoScheduler scheduler = new FifoScheduler();
scheduler.reinitialize(new Configuration(), rmContext);
RMNode node0 = MockNodes.newNodeInfo(1,
Resources.createResource(1024 * 64), 1, "127.0.0.1");
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node0);
scheduler.handle(nodeEvent1);
int _appId = 1;
int _appAttemptId = 1;
ApplicationAttemptId appAttemptId = createAppAttemptId(_appId,
_appAttemptId);
AppAddedSchedulerEvent appEvent1 = new AppAddedSchedulerEvent(appAttemptId,
"queue1", "user1");
scheduler.handle(appEvent1);
int memory = 64;
int nConts = 3;
int priority = 20;
List<ResourceRequest> ask = new ArrayList<ResourceRequest>();
ResourceRequest nodeLocal = createResourceRequest(memory,
node0.getHostName(), priority, nConts);
ResourceRequest rackLocal = createResourceRequest(memory,
node0.getRackName(), priority, nConts);
ResourceRequest any = createResourceRequest(memory, ResourceRequest.ANY, priority,
nConts);
ask.add(nodeLocal);
ask.add(rackLocal);
ask.add(any);
scheduler.allocate(appAttemptId, ask, new ArrayList<ContainerId>(), null, null);
NodeUpdateSchedulerEvent node0Update = new NodeUpdateSchedulerEvent(node0);
// Before the node update event, there are 3 local requests outstanding
Assert.assertEquals(3, nodeLocal.getNumContainers());
scheduler.handle(node0Update);
// After the node update event, check that there are no more local requests
// outstanding
Assert.assertEquals(0, nodeLocal.getNumContainers());
//Also check that the containers were scheduled
SchedulerAppReport info = scheduler.getSchedulerAppInfo(appAttemptId);
Assert.assertEquals(3, info.getLiveContainers().size());
}
// @Test
public void testFifoScheduler() throws Exception {
LOG.info("--- START: testFifoScheduler ---");
final int GB = 1024;
// Register node1
String host_0 = "host_0";
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm_0 =
registerNode(host_0, 1234, 2345, NetworkTopology.DEFAULT_RACK,
Resources.createResource(4 * GB, 1));
nm_0.heartbeat();
// Register node2
String host_1 = "host_1";
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm_1 =
registerNode(host_1, 1234, 2345, NetworkTopology.DEFAULT_RACK,
Resources.createResource(2 * GB, 1));
nm_1.heartbeat();
// ResourceRequest priorities
Priority priority_0 =
org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.create(0);
Priority priority_1 =
org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.create(1);
// Submit an application
Application application_0 = new Application("user_0", resourceManager);
application_0.submit();
application_0.addNodeManager(host_0, 1234, nm_0);
application_0.addNodeManager(host_1, 1234, nm_1);
Resource capability_0_0 = Resources.createResource(GB);
application_0.addResourceRequestSpec(priority_1, capability_0_0);
Resource capability_0_1 = Resources.createResource(2 * GB);
application_0.addResourceRequestSpec(priority_0, capability_0_1);
Task task_0_0 = new Task(application_0, priority_1,
new String[] {host_0, host_1});
application_0.addTask(task_0_0);
// Submit another application
Application application_1 = new Application("user_1", resourceManager);
application_1.submit();
application_1.addNodeManager(host_0, 1234, nm_0);
application_1.addNodeManager(host_1, 1234, nm_1);
Resource capability_1_0 = Resources.createResource(3 * GB);
application_1.addResourceRequestSpec(priority_1, capability_1_0);
Resource capability_1_1 = Resources.createResource(4 * GB);
application_1.addResourceRequestSpec(priority_0, capability_1_1);
Task task_1_0 = new Task(application_1, priority_1,
new String[] {host_0, host_1});
application_1.addTask(task_1_0);
// Send resource requests to the scheduler
LOG.info("Send resource requests to the scheduler");
application_0.schedule();
application_1.schedule();
// Send a heartbeat to kick the tires on the Scheduler
LOG.info("Send a heartbeat to kick the tires on the Scheduler... " +
"nm0 -> task_0_0 and task_1_0 allocated, used=4G " +
"nm1 -> nothing allocated");
nm_0.heartbeat(); // task_0_0 and task_1_0 allocated, used=4G
nm_1.heartbeat(); // nothing allocated
// Get allocations from the scheduler
application_0.schedule(); // task_0_0
checkApplicationResourceUsage(GB, application_0);
application_1.schedule(); // task_1_0
checkApplicationResourceUsage(3 * GB, application_1);
nm_0.heartbeat();
nm_1.heartbeat();
checkNodeResourceUsage(4*GB, nm_0); // task_0_0 (1G) and task_1_0 (3G)
checkNodeResourceUsage(0*GB, nm_1); // no tasks, 2G available
LOG.info("Adding new tasks...");
Task task_1_1 = new Task(application_1, priority_1,
new String[] {ResourceRequest.ANY});
application_1.addTask(task_1_1);
Task task_1_2 = new Task(application_1, priority_1,
new String[] {ResourceRequest.ANY});
application_1.addTask(task_1_2);
Task task_1_3 = new Task(application_1, priority_0,
new String[] {ResourceRequest.ANY});
application_1.addTask(task_1_3);
application_1.schedule();
Task task_0_1 = new Task(application_0, priority_1,
new String[] {host_0, host_1});
application_0.addTask(task_0_1);
Task task_0_2 = new Task(application_0, priority_1,
new String[] {host_0, host_1});
application_0.addTask(task_0_2);
Task task_0_3 = new Task(application_0, priority_0,
new String[] {ResourceRequest.ANY});
application_0.addTask(task_0_3);
application_0.schedule();
// Send a heartbeat to kick the tires on the Scheduler
LOG.info("Sending hb from " + nm_0.getHostName());
nm_0.heartbeat(); // nothing new, used=4G
LOG.info("Sending hb from " + nm_1.getHostName());
nm_1.heartbeat(); // task_0_3, used=2G
// Get allocations from the scheduler
LOG.info("Trying to allocate...");
application_0.schedule();
checkApplicationResourceUsage(3 * GB, application_0);
application_1.schedule();
checkApplicationResourceUsage(3 * GB, application_1);
nm_0.heartbeat();
nm_1.heartbeat();
checkNodeResourceUsage(4*GB, nm_0);
checkNodeResourceUsage(2*GB, nm_1);
// Complete tasks
LOG.info("Finishing up task_0_0");
application_0.finishTask(task_0_0); // Now task_0_1
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(3 * GB, application_0);
checkApplicationResourceUsage(3 * GB, application_1);
checkNodeResourceUsage(4*GB, nm_0);
checkNodeResourceUsage(2*GB, nm_1);
LOG.info("Finishing up task_1_0");
application_1.finishTask(task_1_0); // Now task_0_2
application_0.schedule(); // final overcommit for app0 caused here
application_1.schedule();
nm_0.heartbeat(); // final overcommit for app0 occurs here
nm_1.heartbeat();
checkApplicationResourceUsage(4 * GB, application_0);
checkApplicationResourceUsage(0 * GB, application_1);
//checkNodeResourceUsage(1*GB, nm_0); // final over-commit -> rm.node->1G, test.node=2G
checkNodeResourceUsage(2*GB, nm_1);
LOG.info("Finishing up task_0_3");
application_0.finishTask(task_0_3); // No more
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(2 * GB, application_0);
checkApplicationResourceUsage(0 * GB, application_1);
//checkNodeResourceUsage(2*GB, nm_0); // final over-commit, rm.node->1G, test.node->2G
checkNodeResourceUsage(0*GB, nm_1);
LOG.info("Finishing up task_0_1");
application_0.finishTask(task_0_1);
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(1 * GB, application_0);
checkApplicationResourceUsage(0 * GB, application_1);
LOG.info("Finishing up task_0_2");
application_0.finishTask(task_0_2); // now task_1_3 can go!
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(0 * GB, application_0);
checkApplicationResourceUsage(4 * GB, application_1);
LOG.info("Finishing up task_1_3");
application_1.finishTask(task_1_3); // now task_1_1
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(0 * GB, application_0);
checkApplicationResourceUsage(3 * GB, application_1);
LOG.info("Finishing up task_1_1");
application_1.finishTask(task_1_1);
application_0.schedule();
application_1.schedule();
nm_0.heartbeat();
nm_1.heartbeat();
checkApplicationResourceUsage(0 * GB, application_0);
checkApplicationResourceUsage(3 * GB, application_1);
LOG.info("--- END: testFifoScheduler ---");
}
@Test
public void testConcurrentAccessOnApplications() throws Exception {
FifoScheduler fs = new FifoScheduler();
TestCapacityScheduler.verifyConcurrentAccessOnApplications(
fs.applications, FiCaSchedulerApp.class);
}
@SuppressWarnings("resource")
@Test
public void testBlackListNodes() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
rm.start();
FifoScheduler fs = (FifoScheduler) rm.getResourceScheduler();
String host = "127.0.0.1";
RMNode node =
MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1, host);
fs.handle(new NodeAddedSchedulerEvent(node));
ApplicationId appId = BuilderUtils.newApplicationId(100, 1);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
appId, 1);
SchedulerEvent event = new AppAddedSchedulerEvent(appAttemptId, "default",
"user");
fs.handle(event);
// Verify the blacklist can be updated independent of requesting containers
fs.allocate(appAttemptId, Collections.<ResourceRequest>emptyList(),
Collections.<ContainerId>emptyList(),
Collections.singletonList(host), null);
Assert.assertTrue(fs.getApplication(appAttemptId).isBlacklisted(host));
fs.allocate(appAttemptId, Collections.<ResourceRequest>emptyList(),
Collections.<ContainerId>emptyList(), null,
Collections.singletonList(host));
Assert.assertFalse(fs.getApplication(appAttemptId).isBlacklisted(host));
rm.stop();
}
private void checkApplicationResourceUsage(int expected,
Application application) {
Assert.assertEquals(expected, application.getUsedResources().getMemory());
}
private void checkNodeResourceUsage(int expected,
org.apache.hadoop.yarn.server.resourcemanager.NodeManager node) {
Assert.assertEquals(expected, node.getUsed().getMemory());
node.checkResourceUsage();
}
public static void main(String[] arg) throws Exception {
TestFifoScheduler t = new TestFifoScheduler();
t.setUp();
t.testFifoScheduler();
t.tearDown();
}
}
| |
package com.home911.httpchat.client.gui;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Composite;
import com.home911.httpchat.client.model.StatusResult;
import com.home911.httpchat.shared.model.Alert;
import com.home911.httpchat.shared.model.Contact;
import com.home911.httpchat.shared.model.Presence;
import com.smartgwt.client.data.DataSource;
import com.smartgwt.client.data.events.ErrorEvent;
import com.smartgwt.client.data.events.HandleErrorHandler;
import com.smartgwt.client.data.fields.DataSourceTextField;
import com.smartgwt.client.util.BooleanCallback;
import com.smartgwt.client.util.SC;
import com.smartgwt.client.widgets.Window;
import com.smartgwt.client.widgets.events.CloseClickEvent;
import com.smartgwt.client.widgets.events.CloseClickHandler;
import com.smartgwt.client.widgets.grid.ListGrid;
import com.smartgwt.client.widgets.grid.ListGridField;
import com.smartgwt.client.widgets.grid.ListGridRecord;
import com.smartgwt.client.widgets.grid.events.RecordClickEvent;
import com.smartgwt.client.widgets.grid.events.RecordClickHandler;
import com.smartgwt.client.widgets.grid.events.RowContextClickEvent;
import com.smartgwt.client.widgets.grid.events.RowContextClickHandler;
import com.smartgwt.client.widgets.menu.Menu;
import com.smartgwt.client.widgets.menu.MenuItem;
import com.smartgwt.client.widgets.menu.events.MenuItemClickEvent;
import java.util.logging.Level;
import java.util.logging.Logger;
public class AlertView extends Composite {
private static final Logger LOGGER = Logger.getLogger(AlertView.class.getName());
private final MainView mainView;
private final Window alertWnd;
private final ListGrid alertsGrid;
private final AlertDataSource alertsDs;
private final String token;
private static class AlertDataSource extends DataSource {
private AlertDataSource(String id) {
setID(id);
DataSourceTextField pkDsField = new DataSourceTextField("id");
pkDsField.setHidden(true);
pkDsField.setPrimaryKey(true);
DataSourceTextField typeDsfield = new DataSourceTextField("type", "Type");
typeDsfield.setRequired(true);
DataSourceTextField fromDsField = new DataSourceTextField("from", "From");
fromDsField.setRequired(true);
DataSourceTextField fromIdDsField = new DataSourceTextField("fromId", "FromId");
fromIdDsField.setRequired(true);
fromIdDsField.setHidden(true);
DataSourceTextField fromPresenceDsField = new DataSourceTextField("fromPresence", "FromPresence");
fromPresenceDsField.setRequired(true);
fromPresenceDsField.setHidden(true);
setFields(pkDsField, typeDsfield, fromDsField, fromIdDsField, fromPresenceDsField);
setClientOnly(true);
addHandleErrorHandler(new HandleErrorHandler() {
@Override
public void onHandleError(ErrorEvent errorEvent) {
LOGGER.log(Level.SEVERE, "An error has occured:" + errorEvent.getAssociatedType());
}
});
}
}
public AlertView(MainView mainView, String token) {
this.mainView = mainView;
this.token = token;
alertWnd = new Window();
alertWnd.setTitle("HttpChat Alerts");
alertWnd.setAnimateMinimize(true);
alertWnd.setAutoSize(true);
alertWnd.setLeft(490);
alertWnd.setCanDragResize(false);
alertWnd.setShowCloseButton(false);
alertWnd.setShowMaximizeButton(false);
alertWnd.setShowMinimizeButton(true);
alertWnd.addCloseClickHandler(new CloseClickHandler() {
public void onCloseClick(CloseClickEvent event) {
hide();
}
});
alertsDs = new AlertDataSource("alertsDS");
alertsGrid = new ListGrid();
alertsGrid.setWidth(310);
alertsGrid.setHeight(250);
alertsGrid.setAlternateRecordStyles(true);
alertsGrid.setShowAllRecords(true);
alertsGrid.setDataSource(alertsDs);
alertsGrid.setAutoFetchData(true);
alertsGrid.addRecordClickHandler(new RecordClickHandler() {
public void onRecordClick(RecordClickEvent event) {
Menu contactPopup = createAlertPopup(event.getRecord());
// Show the popup
contactPopup.showContextMenu();
}
});
alertsGrid.addRowContextClickHandler(new RowContextClickHandler() {
public void onRowContextClick(RowContextClickEvent event) {
Menu contactPopup = createAlertPopup(event.getRecord());
// Show the popup
contactPopup.showContextMenu();
event.cancel();
}
});
ListGridField idField = new ListGridField("id", "Id", 0);
ListGridField typeField = new ListGridField("type", "Type", 200);
ListGridField fromField = new ListGridField("from", "From", 100);
ListGridField fromIdField = new ListGridField("fromId", "FromId", 0);
ListGridField fromPresenceField = new ListGridField("fromPresence", "FromPresence", 0);
alertsGrid.setFields(idField, typeField, fromField, fromIdField);
alertsGrid.setCanResizeFields(true);
alertsGrid.hideFields(idField, fromIdField, fromPresenceField);
alertWnd.addItem(alertsGrid);
initWidget(alertWnd);
}
public void display() {
alertWnd.show();
}
public void hide() {
alertWnd.destroy();
}
public void addAlert(Alert alert) {
if (alert != null) {
ListGridRecord record = new ListGridRecord();
record.setAttribute("id", alert.getId());
record.setAttribute("type", alert.getType().name());
switch (alert.getType()) {
case CONTACT_INVITE:
Contact contact = (Contact) alert.getData();
record.setAttribute("from", contact.getName());
record.setAttribute("fromId", contact.getId());
record.setAttribute("fromPresence", contact.getPresence().name());
break;
}
int pos = alertsGrid.getRecordIndex(record);
if (pos >= 0) {
alertsDs.updateData(record);
} else {
alertsDs.addData(record);
alertWnd.flash();
}
}
}
private Menu createAlertPopup(final ListGridRecord contactRec) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.log(Level.INFO, "Creating alert popup menu for id[" + contactRec + "]");
}
final Menu menu = new Menu();
menu.setShowShadow(true);
menu.setShadowDepth(10);
MenuItem acceptItem = new MenuItem("Accept");
MenuItem denyItem = new MenuItem("Deny");
menu.setItems(acceptItem, denyItem);
acceptItem.addClickHandler(new com.smartgwt.client.widgets.menu.events.ClickHandler() {
@Override
public void onClick(MenuItemClickEvent menuItemClickEvent) {
SC.confirm("Do you really want to accept this invite?", new BooleanCallback() {
Contact contact = new Contact(contactRec.getAttributeAsLong("fromId"),
contactRec.getAttribute("from"), Presence.valueOf(contactRec.getAttribute("fromPresence")));
public void execute(Boolean value) {
if (value != null && value) {
mainView.getBackendService().acceptInvite(token, contactRec.getAttributeAsLong("id"),
new AsyncCallback<StatusResult>() {
@Override
public void onFailure(Throwable throwable) {
LOGGER.log(Level.SEVERE, "An unexpected error has occured.", throwable);
mainView.getMenuView().writeStatus("An unexpected error has occured.");
}
@Override
public void onSuccess(StatusResult result) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.log(Level.INFO, "Received statusResult:" + result.toString());
}
mainView.getMenuView().writeStatus(result.getStatus().getDescription());
if (result.getStatus().getCode() == 200) {
mainView.getContactListView().addContactToList(contact);
alertsDs.removeData(contactRec);
}
}
});
}
}
});
}
});
denyItem.addClickHandler(new com.smartgwt.client.widgets.menu.events.ClickHandler() {
@Override
public void onClick(MenuItemClickEvent menuItemClickEvent) {
SC.confirm("Do you really want to deny this invite?", new BooleanCallback() {
public void execute(Boolean value) {
if (value != null && value) {
mainView.getBackendService().denyInvite(token, contactRec.getAttributeAsLong("id"),
new AsyncCallback<StatusResult>() {
@Override
public void onFailure(Throwable throwable) {
LOGGER.log(Level.SEVERE, "An unexpected error has occured.", throwable);
mainView.getMenuView().writeStatus("An unexpected error has occured.");
}
@Override
public void onSuccess(StatusResult result) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.log(Level.INFO, "Received statusResult:" + result.toString());
}
mainView.getMenuView().writeStatus(result.getStatus().getDescription());
if (result.getStatus().getCode() == 200) {
alertsDs.removeData(contactRec);
}
}
});
}
}
});
}
});
return menu;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.contrib.streaming.state;
import org.rocksdb.BlockBasedTableConfig;
import org.rocksdb.BloomFilter;
import org.rocksdb.ColumnFamilyOptions;
import org.rocksdb.CompactionStyle;
import org.rocksdb.DBOptions;
import org.rocksdb.InfoLogLevel;
import java.util.ArrayList;
import java.util.Collection;
/**
* The {@code PredefinedOptions} are configuration settings for the {@link RocksDBStateBackend}. The
* various pre-defined choices are configurations that have been empirically determined to be
* beneficial for performance under different settings.
*
* <p>Some of these settings are based on experiments by the Flink community, some follow guides
* from the RocksDB project.
*
* <p>All of them effectively disable the RocksDB log by default because this file would grow
* indefinitely and will be deleted with the TM anyway.
*/
public enum PredefinedOptions {
/**
* Default options for all settings, except that writes are not forced to the disk.
*
* <p>Note: Because Flink does not rely on RocksDB data on disk for recovery, there is no need
* to sync data to stable storage.
*
* <p>The following options are set:
*
* <ul>
* <li>setUseFsync(false)
* <li>setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
* <li>setStatsDumpPeriodSec(0)
* </ul>
*/
DEFAULT {
@Override
public DBOptions createDBOptions(Collection<AutoCloseable> handlesToClose) {
return new DBOptions()
.setUseFsync(false)
.setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
.setStatsDumpPeriodSec(0);
}
@Override
public ColumnFamilyOptions createColumnOptions(Collection<AutoCloseable> handlesToClose) {
return new ColumnFamilyOptions();
}
},
/**
* Pre-defined options for regular spinning hard disks.
*
* <p>This constant configures RocksDB with some options that lead empirically to better
* performance when the machines executing the system use regular spinning hard disks.
*
* <p>The following options are set:
*
* <ul>
* <li>setCompactionStyle(CompactionStyle.LEVEL)
* <li>setLevelCompactionDynamicLevelBytes(true)
* <li>setIncreaseParallelism(4)
* <li>setUseFsync(false)
* <li>setDisableDataSync(true)
* <li>setMaxOpenFiles(-1)
* <li>setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
* <li>setStatsDumpPeriodSec(0)
* </ul>
*
* <p>Note: Because Flink does not rely on RocksDB data on disk for recovery, there is no need
* to sync data to stable storage.
*/
SPINNING_DISK_OPTIMIZED {
@Override
public DBOptions createDBOptions(Collection<AutoCloseable> handlesToClose) {
return new DBOptions()
.setIncreaseParallelism(4)
.setUseFsync(false)
.setMaxOpenFiles(-1)
.setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
.setStatsDumpPeriodSec(0);
}
@Override
public ColumnFamilyOptions createColumnOptions(Collection<AutoCloseable> handlesToClose) {
return new ColumnFamilyOptions()
.setCompactionStyle(CompactionStyle.LEVEL)
.setLevelCompactionDynamicLevelBytes(true);
}
},
/**
* Pre-defined options for better performance on regular spinning hard disks, at the cost of a
* higher memory consumption.
*
* <p><b>NOTE: These settings will cause RocksDB to consume a lot of memory for block caching
* and compactions. If you experience out-of-memory problems related to, RocksDB, consider
* switching back to {@link #SPINNING_DISK_OPTIMIZED}.</b>
*
* <p>The following options are set:
*
* <ul>
* <li>setLevelCompactionDynamicLevelBytes(true)
* <li>setTargetFileSizeBase(256 MBytes)
* <li>setMaxBytesForLevelBase(1 GByte)
* <li>setWriteBufferSize(64 MBytes)
* <li>setIncreaseParallelism(4)
* <li>setMinWriteBufferNumberToMerge(3)
* <li>setMaxWriteBufferNumber(4)
* <li>setUseFsync(false)
* <li>setMaxOpenFiles(-1)
* <li>setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
* <li>setStatsDumpPeriodSec(0)
* <li>BlockBasedTableConfig.setBlockCacheSize(256 MBytes)
* <li>BlockBasedTableConfigsetBlockSize(128 KBytes)
* </ul>
*
* <p>Note: Because Flink does not rely on RocksDB data on disk for recovery, there is no need
* to sync data to stable storage.
*/
SPINNING_DISK_OPTIMIZED_HIGH_MEM {
@Override
public DBOptions createDBOptions(Collection<AutoCloseable> handlesToClose) {
return new DBOptions()
.setIncreaseParallelism(4)
.setUseFsync(false)
.setMaxOpenFiles(-1)
.setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
.setStatsDumpPeriodSec(0);
}
@Override
public ColumnFamilyOptions createColumnOptions(Collection<AutoCloseable> handlesToClose) {
final long blockCacheSize = 256 * 1024 * 1024;
final long blockSize = 128 * 1024;
final long targetFileSize = 256 * 1024 * 1024;
final long writeBufferSize = 64 * 1024 * 1024;
BloomFilter bloomFilter = new BloomFilter();
handlesToClose.add(bloomFilter);
return new ColumnFamilyOptions()
.setCompactionStyle(CompactionStyle.LEVEL)
.setLevelCompactionDynamicLevelBytes(true)
.setTargetFileSizeBase(targetFileSize)
.setMaxBytesForLevelBase(4 * targetFileSize)
.setWriteBufferSize(writeBufferSize)
.setMinWriteBufferNumberToMerge(3)
.setMaxWriteBufferNumber(4)
.setTableFormatConfig(
new BlockBasedTableConfig()
.setBlockCacheSize(blockCacheSize)
.setBlockSize(blockSize)
.setFilter(bloomFilter));
}
},
/**
* Pre-defined options for Flash SSDs.
*
* <p>This constant configures RocksDB with some options that lead empirically to better
* performance when the machines executing the system use SSDs.
*
* <p>The following options are set:
*
* <ul>
* <li>setIncreaseParallelism(4)
* <li>setUseFsync(false)
* <li>setDisableDataSync(true)
* <li>setMaxOpenFiles(-1)
* <li>setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
* <li>setStatsDumpPeriodSec(0)
* </ul>
*
* <p>Note: Because Flink does not rely on RocksDB data on disk for recovery, there is no need
* to sync data to stable storage.
*/
FLASH_SSD_OPTIMIZED {
@Override
public DBOptions createDBOptions(Collection<AutoCloseable> handlesToClose) {
return new DBOptions()
.setIncreaseParallelism(4)
.setUseFsync(false)
.setMaxOpenFiles(-1)
.setInfoLogLevel(InfoLogLevel.HEADER_LEVEL)
.setStatsDumpPeriodSec(0);
}
@Override
public ColumnFamilyOptions createColumnOptions(Collection<AutoCloseable> handlesToClose) {
return new ColumnFamilyOptions();
}
};
// ------------------------------------------------------------------------
/**
* Creates the {@link DBOptions}for this pre-defined setting.
*
* @param handlesToClose The collection to register newly created {@link
* org.rocksdb.RocksObject}s.
* @return The pre-defined options object.
*/
public abstract DBOptions createDBOptions(Collection<AutoCloseable> handlesToClose);
/**
* @return The pre-defined options object.
* @deprecated use {@link #createColumnOptions(Collection)} instead.
*/
public DBOptions createDBOptions() {
return createDBOptions(new ArrayList<>());
}
/**
* Creates the {@link org.rocksdb.ColumnFamilyOptions}for this pre-defined setting.
*
* @param handlesToClose The collection to register newly created {@link
* org.rocksdb.RocksObject}s.
* @return The pre-defined options object.
*/
public abstract ColumnFamilyOptions createColumnOptions(
Collection<AutoCloseable> handlesToClose);
/**
* @return The pre-defined options object.
* @deprecated use {@link #createColumnOptions(Collection)} instead.
*/
public ColumnFamilyOptions createColumnOptions() {
return createColumnOptions(new ArrayList<>());
}
}
| |
/**
* Copyright 2010 The ForPlay Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package forplay.java;
import forplay.core.Gradient;
import forplay.core.Image;
import forplay.core.Path;
import forplay.core.Pattern;
import forplay.core.Canvas;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.geom.AffineTransform;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.util.Deque;
import java.util.LinkedList;
class JavaCanvas implements Canvas {
final Graphics2D gfx;
private final int width, height;
private Deque<JavaCanvasState> stateStack = new LinkedList<JavaCanvasState>();
private Ellipse2D.Float ellipse = new Ellipse2D.Float();
private Line2D.Float line = new Line2D.Float();
private Rectangle2D.Float rect = new Rectangle2D.Float();
JavaCanvas(Graphics2D graphics, int width, int height) {
this.gfx = graphics;
this.width = width;
this.height = height;
// push default state
stateStack.push(new JavaCanvasState());
// All clears go to rgba(0,0,0,0).
gfx.setBackground(new Color(0, true));
}
public void clear() {
gfx.clearRect(0, 0, width, height);
}
@Override
public void clip(Path path) {
assert path instanceof JavaPath;
currentState().clip = (JavaPath) path;
}
public void drawImage(Image img, float x, float y) {
assert img instanceof JavaImage;
JavaImage jimg = (JavaImage) img;
currentState().prepareFill(gfx);
int dx = (int) x, dy = (int) y, w = jimg.width(), h = jimg.height();
gfx.drawImage(jimg.img, dx, dy, dx + w, dy + h, 0, 0, w, h, null);
}
public void drawImageCentered(Image img, float x, float y) {
drawImage(img, x - img.width()/2, y - img.height()/2);
}
public void drawImage(Image img, float x, float y, float w, float h) {
assert img instanceof JavaImage;
JavaImage jimg = (JavaImage) img;
// For non-integer scaling, we have to use AffineTransform.
AffineTransform tx = new AffineTransform(w / jimg.width(), 0f, 0f, h / jimg.height(), x, y);
currentState().prepareFill(gfx);
gfx.drawImage(jimg.img, tx, null);
}
@Override
public void drawImage(Image img, float dx, float dy, float dw, float dh, float sx, float sy, float sw, float sh) {
assert img instanceof JavaImage;
JavaImage jimg = (JavaImage) img;
// TODO: use AffineTransform here as well?
currentState().prepareFill(gfx);
gfx.drawImage(jimg.img, (int)dx, (int)dy, (int)(dx + dw), (int)(dy + dh), (int)sx, (int)sy, (int)(sx + sw), (int)(sy + sh), null);
}
@Override
public void drawLine(float x0, float y0, float x1, float y1) {
currentState().prepareStroke(gfx);
line.setLine(x0, y0, x1, y1);
gfx.draw(line);
}
@Override
public void drawPoint(float x, float y) {
currentState().prepareStroke(gfx);
gfx.drawLine((int) x, (int) y, (int) x, (int) y);
}
@Override
public void drawText(String text, float x, float y) {
currentState().prepareFill(gfx);
gfx.drawString(text, x, y);
}
@Override
public void fillCircle(float x, float y, float radius) {
currentState().prepareFill(gfx);
ellipse.setFrame(x - radius, y - radius, 2 * radius, 2 * radius);
gfx.fill(ellipse);
}
@Override
public void fillPath(Path path) {
assert path instanceof JavaPath;
currentState().prepareFill(gfx);
gfx.fill(((JavaPath) path).path);
}
@Override
public void fillRect(float x, float y, float width, float height) {
currentState().prepareFill(gfx);
rect.setRect(x, y, width, height);
gfx.fill(rect);
}
@Override
public int height() {
return height;
}
@Override
public void restore() {
stateStack.pop();
gfx.setTransform(currentState().transform);
}
@Override
public void rotate(float angle) {
gfx.rotate(angle);
}
@Override
public void save() {
// update saved transform
currentState().transform = gfx.getTransform();
// clone to maintain current state
stateStack.push(new JavaCanvasState(currentState()));
}
@Override
public void scale(float x, float y) {
gfx.scale(x, y);
}
@Override
public void setCompositeOperation(Composite composite) {
currentState().composite = composite;
}
@Override
public void setFillColor(int color) {
currentState().fillColor = color;
currentState().fillGradient = null;
currentState().fillPattern = null;
}
@Override
public void setFillGradient(Gradient gradient) {
assert gradient instanceof JavaGradient;
currentState().fillGradient = (JavaGradient) gradient;
currentState().fillPattern = null;
currentState().fillColor = 0;
}
@Override
public void setFillPattern(Pattern pattern) {
assert pattern instanceof JavaPattern;
currentState().fillPattern = (JavaPattern) pattern;
currentState().fillGradient = null;
currentState().fillColor = 0;
}
@Override
public void setLineCap(LineCap cap) {
currentState().lineCap = cap;
}
@Override
public void setLineJoin(LineJoin join) {
currentState().lineJoin = join;
}
@Override
public void setMiterLimit(float miter) {
currentState().miterLimit = miter;
}
@Override
public void setStrokeColor(int color) {
currentState().strokeColor = color;
}
@Override
public void setStrokeWidth(float w) {
currentState().strokeWidth = w;
}
@Override
public void setTransform(float m11, float m12, float m21, float m22, float dx, float dy) {
gfx.setTransform(new AffineTransform(m11, m12, m21, m22, dx, dy));
}
@Override
public void strokeCircle(float x, float y, float radius) {
currentState().prepareStroke(gfx);
ellipse.setFrame(x - radius, y - radius, 2 * radius, 2 * radius);
gfx.draw(ellipse);
}
@Override
public void strokePath(Path path) {
currentState().prepareStroke(gfx);
gfx.setColor(new Color(currentState().strokeColor, false));
gfx.draw(((JavaPath) path).path);
}
@Override
public void strokeRect(float x, float y, float width, float height) {
currentState().prepareStroke(gfx);
rect.setRect(x, y, width, height);
gfx.draw(rect);
}
@Override
public void transform(float m11, float m12, float m21, float m22, float dx, float dy) {
gfx.transform(new AffineTransform(m11, m12, m21, m22, dx, dy));
}
@Override
public void translate(float x, float y) {
gfx.translate(x, y);
}
@Override
public int width() {
return width;
}
private JavaCanvasState currentState() {
return stateStack.getFirst();
}
}
| |
/**
Copyright (c) 2013 - 2017, Sergej Schefer
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package mathkit;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/** 3x3 Matrix */
/** http://deadkitteninaframe.com/wordpress/ */
/***
* Layout: Column-Major
*
* The data gets passed column by column to open gl.
*
* 00 10 20
* 01 11 21
* 02 12 22
*
* float[9] = {00, 01, 02, 03, 10, 11, 12, 13, 20, 21, 22};
*/
public class float3x3 {
public float m[][] = new float[3][3];
public float3x3(){
setIdentity();
}
/**
* builds a matrix from three 3d vectors
* the matrix uses the vectors as rows
* @param row0
* @param row1
* @param row2
*/
public float3x3(float3 row0, float3 row1, float3 row2){
m[0][0] = row0.x; m[1][0] = row0.y; m[2][0] = row0.z;
m[0][1] = row1.x; m[1][1] = row1.y; m[2][1] = row1.z;
m[0][2] = row2.x; m[1][2] = row2.y; m[2][2] = row2.z;
}
/**
* builds a 3x3 matrix from 9 floats column-wise
* @param mat
*/
public float3x3(float mat[]){
m[0][0] = mat[0]; m[1][0] = mat[3]; m[2][0] = mat[6];
m[0][1] = mat[1]; m[1][1] = mat[4]; m[2][1] = mat[7];
m[0][2] = mat[2]; m[1][2] = mat[5]; m[2][2] = mat[8];
}
/**
* builds a 3x3 matrix using the values which are contained in the
* parameter 3x3 matrix
* @param mat
*/
public float3x3(float3x3 mat){
this.m = mat.m.clone();
}
/**
* builds a 3x3 matrix from 4x4 matrix.
* the 4 row and column are cut off
* @param mat
*/
public float3x3(float4x4 mat){
m[0][0] = mat.m[0][0]; m[1][0] = mat.m[1][0]; m[2][0] = mat.m[2][0];
m[0][1] = mat.m[0][1]; m[1][1] = mat.m[1][1]; m[2][1] = mat.m[2][1];
m[0][2] = mat.m[0][2]; m[1][2] = mat.m[1][2]; m[2][2] = mat.m[2][2];
}
/**
* changes the values from this matrix,
* to the values of the parameter matrix
* @param mat
*/
public void set(float3x3 mat){
this.m = mat.m.clone();
}
/**
* resets the matrix to the identity matrix
*/
public void setIdentity(){
m[0][0] = 1.0f; m[1][0] = 0.0f; m[2][0] = 0.0f;
m[0][1] = 0.0f; m[1][1] = 1.0f; m[2][1] = 0.0f;
m[0][2] = 0.0f; m[1][2] = 0.0f; m[2][2] = 1.0f;
}
/**
* returns the x components as a 3d vector
* @return x components as a 3d vector
*/
public float3 getColumn0(){
return new float3(m[0][0], m[0][1], m[0][2]);
}
/**
* returns the y components as a 3d vector
* @return y components as a 3d vector
*/
public float3 getColumn1(){
return new float3(m[1][0], m[1][1], m[1][2]);
}
/**
* returns the z components as a 3d vector
* @return z components as a 3d vector
*/
public float3 getColumn2(){
return new float3(m[2][0], m[2][1], m[2][2]);
}
/**
* returns 0th row as a 3d vector
* @return 0th row as a 3d vector
*/
public float3 getRow0(){
return new float3(m[0][0], m[1][0], m[2][0]);
}
/**
* returns 1st row as a 3d vector
* @return 1st row as a 3d vector
*/
public float3 getRow1(){
return new float3(m[0][1], m[1][1], m[2][1]);
}
/**
* returns 2nd row as a 3d vector
* @return 2nd row as a 3d vector
*/
public float3 getRow2(){
return new float3(m[0][2], m[1][2], m[2][2]);
}
/**
* set the column 0
* @param column0 float3
*/
public void setColumn0(float3 column0){
m[0][0] = column0.x;
m[0][1] = column0.y;
m[0][2] = column0.z;
}
/**
* set the column 1
* @param column1 float3
*/
public void setColumn1(float3 column1){
m[1][0] = column1.x;
m[1][1] = column1.y;
m[1][2] = column1.z;
}
/**
* set the column 2
* @param column2 float3
*/
public void setColumn2(float3 column2){
m[2][0] = column2.x;
m[2][1] = column2.y;
m[2][2] = column2.z;
}
/**
* set the row 0
* @param row0 float3
*/
public void setRow0(float3 row0){
m[0][0] = row0.x;
m[1][0] = row0.y;
m[2][0] = row0.z;
}
/**
* set the row 1
* @param row1 float3
*/
public void setRow1(float3 row1){
m[0][1] = row1.x;
m[1][1] = row1.y;
m[2][1] = row1.z;
}
/**
* set the row 2
* @param row2 float3
*/
public void setRow2(float3 row2){
m[0][2] = row2.x;
m[1][2] = row2.y;
m[2][2] = row2.z;
}
/**
* returns the transposed matrix
* @return transposed matrix
*/
public float3x3 transpose(){
return new float3x3(getColumn0(), getColumn1(), getColumn2());
}
/**
* we assume that this is a pure open gl rotation matrix without scale.
* @return inversed rotation matrix
*/
public float3x3 inverse(){
/* because this is an OpenGL rotation matrix, we can simply transpose it to inverse it */
return transpose();
}
/* multiplications */
/**
* matrix multiplication
* @param mat
* @return (this * mat)
*/
public float3x3 multiply(float3x3 mat){
float3 row0 = new float3(getRow0().dot(mat.getColumn0()), getRow0().dot(mat.getColumn1()), getRow0().dot(mat.getColumn2()));
float3 row1 = new float3(getRow1().dot(mat.getColumn0()), getRow1().dot(mat.getColumn1()), getRow1().dot(mat.getColumn2()));
float3 row2 = new float3(getRow2().dot(mat.getColumn0()), getRow2().dot(mat.getColumn1()), getRow2().dot(mat.getColumn2()));
return new float3x3(row0, row1, row2);
}
/**
* multiplies a 3d vector with the matrix
* @param v
* @return (mat * vector)
*/
public float3 multiply(float3 v){
return new float3(v.dot(getRow0()), v.dot(getRow1()), v.dot(getRow2()));
}
/**
* multiplies the whole matrix componentwise by a scalar
* @param scalar
* @return scaled matrix
*/
public float3x3 multiply(float scalar){
float3x3 result = new float3x3();
result.m[0][0] = m[0][0] * scalar;
result.m[0][1] = m[0][1] * scalar;
result.m[0][2] = m[0][2] * scalar;
result.m[1][0] = m[1][0] * scalar;
result.m[1][1] = m[1][1] * scalar;
result.m[1][2] = m[1][2] * scalar;
result.m[2][0] = m[2][0] * scalar;
result.m[2][1] = m[2][1] * scalar;
result.m[2][2] = m[2][2] * scalar;
return result;
}
/**
* parses this matrix into a float array
* @return float array
*/
public float[] toFloat(){
float[] c0 = getColumn0().toFloat();
float[] c1 = getColumn1().toFloat();
float[] c2 = getColumn2().toFloat();
return new float[] { c0[0], c0[1], c0[2], c1[0], c1[1], c1[2], c2[0], c2[1], c2[2] };
}
/**
* parses this matrix into a float buffer
* column after column
* @return float buffer
*/
public FloatBuffer toBuffer(){
FloatBuffer buffer = ByteBuffer.allocateDirect(9 << 2).order(ByteOrder.nativeOrder()).asFloatBuffer();
buffer.put(toFloat());
buffer.flip();
return buffer;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
return m[0][0] + " | " + m[1][0] + " | " + m[2][0] + "\n"
+ m[0][1] + " | " + m[1][1] + " | " + m[2][1] + "\n"
+ m[0][2] + " | " + m[1][2] + " | " + m[2][2];
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.healthcare.v1.model;
/**
* Evaluate a user's Consents for all matching User data mappings. Note: User data mappings are
* indexed asynchronously, causing slight delays between the time mappings are created or updated
* and when they are included in EvaluateUserConsents results.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Healthcare API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class EvaluateUserConsentsRequest extends com.google.api.client.json.GenericJson {
/**
* Optional. Specific Consents to evaluate the access request against. These Consents must have
* the same `user_id` as the User data mappings being evalauted, must exist in the current
* `consent_store`, and must have a `state` of either `ACTIVE` or `DRAFT`. A maximum of 100
* Consents can be provided here. If unspecified, all `ACTIVE` unexpired Consents in the current
* `consent_store` will be evaluated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private ConsentList consentList;
/**
* Optional. Limit on the number of User data mappings to return in a single response. If not
* specified, 100 is used. May not be larger than 1000.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer pageSize;
/**
* Optional. Token to retrieve the next page of results, or empty to get the first page.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pageToken;
/**
* Required. The values of request attributes associated with this access request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> requestAttributes;
/**
* Optional. The values of resource attributes associated with the resources being requested. If
* no values are specified, then all resources are queried.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> resourceAttributes;
/**
* Optional. The view for EvaluateUserConsentsResponse. If unspecified, defaults to `BASIC` and
* returns `consented` as `TRUE` or `FALSE`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String responseView;
/**
* Required. User ID to evaluate consents for.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String userId;
/**
* Optional. Specific Consents to evaluate the access request against. These Consents must have
* the same `user_id` as the User data mappings being evalauted, must exist in the current
* `consent_store`, and must have a `state` of either `ACTIVE` or `DRAFT`. A maximum of 100
* Consents can be provided here. If unspecified, all `ACTIVE` unexpired Consents in the current
* `consent_store` will be evaluated.
* @return value or {@code null} for none
*/
public ConsentList getConsentList() {
return consentList;
}
/**
* Optional. Specific Consents to evaluate the access request against. These Consents must have
* the same `user_id` as the User data mappings being evalauted, must exist in the current
* `consent_store`, and must have a `state` of either `ACTIVE` or `DRAFT`. A maximum of 100
* Consents can be provided here. If unspecified, all `ACTIVE` unexpired Consents in the current
* `consent_store` will be evaluated.
* @param consentList consentList or {@code null} for none
*/
public EvaluateUserConsentsRequest setConsentList(ConsentList consentList) {
this.consentList = consentList;
return this;
}
/**
* Optional. Limit on the number of User data mappings to return in a single response. If not
* specified, 100 is used. May not be larger than 1000.
* @return value or {@code null} for none
*/
public java.lang.Integer getPageSize() {
return pageSize;
}
/**
* Optional. Limit on the number of User data mappings to return in a single response. If not
* specified, 100 is used. May not be larger than 1000.
* @param pageSize pageSize or {@code null} for none
*/
public EvaluateUserConsentsRequest setPageSize(java.lang.Integer pageSize) {
this.pageSize = pageSize;
return this;
}
/**
* Optional. Token to retrieve the next page of results, or empty to get the first page.
* @return value or {@code null} for none
*/
public java.lang.String getPageToken() {
return pageToken;
}
/**
* Optional. Token to retrieve the next page of results, or empty to get the first page.
* @param pageToken pageToken or {@code null} for none
*/
public EvaluateUserConsentsRequest setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
/**
* Required. The values of request attributes associated with this access request.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getRequestAttributes() {
return requestAttributes;
}
/**
* Required. The values of request attributes associated with this access request.
* @param requestAttributes requestAttributes or {@code null} for none
*/
public EvaluateUserConsentsRequest setRequestAttributes(java.util.Map<String, java.lang.String> requestAttributes) {
this.requestAttributes = requestAttributes;
return this;
}
/**
* Optional. The values of resource attributes associated with the resources being requested. If
* no values are specified, then all resources are queried.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getResourceAttributes() {
return resourceAttributes;
}
/**
* Optional. The values of resource attributes associated with the resources being requested. If
* no values are specified, then all resources are queried.
* @param resourceAttributes resourceAttributes or {@code null} for none
*/
public EvaluateUserConsentsRequest setResourceAttributes(java.util.Map<String, java.lang.String> resourceAttributes) {
this.resourceAttributes = resourceAttributes;
return this;
}
/**
* Optional. The view for EvaluateUserConsentsResponse. If unspecified, defaults to `BASIC` and
* returns `consented` as `TRUE` or `FALSE`.
* @return value or {@code null} for none
*/
public java.lang.String getResponseView() {
return responseView;
}
/**
* Optional. The view for EvaluateUserConsentsResponse. If unspecified, defaults to `BASIC` and
* returns `consented` as `TRUE` or `FALSE`.
* @param responseView responseView or {@code null} for none
*/
public EvaluateUserConsentsRequest setResponseView(java.lang.String responseView) {
this.responseView = responseView;
return this;
}
/**
* Required. User ID to evaluate consents for.
* @return value or {@code null} for none
*/
public java.lang.String getUserId() {
return userId;
}
/**
* Required. User ID to evaluate consents for.
* @param userId userId or {@code null} for none
*/
public EvaluateUserConsentsRequest setUserId(java.lang.String userId) {
this.userId = userId;
return this;
}
@Override
public EvaluateUserConsentsRequest set(String fieldName, Object value) {
return (EvaluateUserConsentsRequest) super.set(fieldName, value);
}
@Override
public EvaluateUserConsentsRequest clone() {
return (EvaluateUserConsentsRequest) super.clone();
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: http://lwjgl.org/license.php
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* <a href="https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkDisplayPlanePropertiesKHR.html">Khronos Reference Page</a><br>
* <a href="https://www.khronos.org/registry/vulkan/specs/1.0-wsi_extensions/xhtml/vkspec.html#VkDisplayPlanePropertiesKHR">Vulkan Specification</a>
*
* <p>Describes properties of a display plane.</p>
*
* <h5>Valid Usage</h5>
*
* <ul>
* <li>{@code currentDisplay} <b>must</b> be a valid {@code VkDisplayKHR} handle</li>
* </ul>
*
* <h3>Member documentation</h3>
*
* <ul>
* <li>{@code currentDisplay} – the handle of the display the plane is currently associated with. If the plane is not currently attached to any displays, this will be {@link VK10#VK_NULL_HANDLE NULL_HANDLE}.</li>
* <li>{@code currentStackIndex} – the current z-order of the plane</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>struct VkDisplayPlanePropertiesKHR {
VkDisplayKHR currentDisplay;
uint32_t currentStackIndex;
}</code></pre>
*/
public class VkDisplayPlanePropertiesKHR extends Struct {
/** The struct size in bytes. */
public static final int SIZEOF;
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
CURRENTDISPLAY,
CURRENTSTACKINDEX;
static {
Layout layout = __struct(
__member(8),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
CURRENTDISPLAY = layout.offsetof(0);
CURRENTSTACKINDEX = layout.offsetof(1);
}
VkDisplayPlanePropertiesKHR(long address, ByteBuffer container) {
super(address, container);
}
/**
* Creates a {@link VkDisplayPlanePropertiesKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkDisplayPlanePropertiesKHR(ByteBuffer container) {
this(memAddress(container), checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** Returns the value of the {@code currentDisplay} field. */
public long currentDisplay() { return ncurrentDisplay(address()); }
/** Returns the value of the {@code currentStackIndex} field. */
public int currentStackIndex() { return ncurrentStackIndex(address()); }
// -----------------------------------
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkDisplayPlanePropertiesKHR malloc() {
return create(nmemAlloc(SIZEOF));
}
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkDisplayPlanePropertiesKHR calloc() {
return create(nmemCalloc(1, SIZEOF));
}
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated with {@link BufferUtils}. */
public static VkDisplayPlanePropertiesKHR create() {
return new VkDisplayPlanePropertiesKHR(BufferUtils.createByteBuffer(SIZEOF));
}
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance for the specified memory address or {@code null} if the address is {@code NULL}. */
public static VkDisplayPlanePropertiesKHR create(long address) {
return address == NULL ? null : new VkDisplayPlanePropertiesKHR(address, null);
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer malloc(int capacity) {
return create(nmemAlloc(capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer calloc(int capacity) {
return create(nmemCalloc(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static Buffer create(int capacity) {
return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF));
}
/**
* Create a {@link VkDisplayPlanePropertiesKHR.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static Buffer create(long address, int capacity) {
return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity);
}
// -----------------------------------
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated on the thread-local {@link MemoryStack}. */
public static VkDisplayPlanePropertiesKHR mallocStack() {
return mallocStack(stackGet());
}
/** Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */
public static VkDisplayPlanePropertiesKHR callocStack() {
return callocStack(stackGet());
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkDisplayPlanePropertiesKHR mallocStack(MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkDisplayPlanePropertiesKHR callocStack(MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated on the thread-local {@link MemoryStack}.
*
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity) {
return mallocStack(capacity, stackGet());
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero.
*
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity) {
return callocStack(capacity, stackGet());
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity, MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity, MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #currentDisplay}. */
public static long ncurrentDisplay(long struct) { return memGetLong(struct + VkDisplayPlanePropertiesKHR.CURRENTDISPLAY); }
/** Unsafe version of {@link #currentStackIndex}. */
public static int ncurrentStackIndex(long struct) { return memGetInt(struct + VkDisplayPlanePropertiesKHR.CURRENTSTACKINDEX); }
// -----------------------------------
/** An array of {@link VkDisplayPlanePropertiesKHR} structs. */
public static final class Buffer extends StructBuffer<VkDisplayPlanePropertiesKHR, Buffer> {
/**
* Creates a new {@link VkDisplayPlanePropertiesKHR.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkDisplayPlanePropertiesKHR#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
return new Buffer(address, container, mark, pos, lim, cap);
}
@Override
protected VkDisplayPlanePropertiesKHR newInstance(long address) {
return new VkDisplayPlanePropertiesKHR(address, getContainer());
}
@Override
protected int sizeof() {
return SIZEOF;
}
/** Returns the value of the {@code currentDisplay} field. */
public long currentDisplay() { return VkDisplayPlanePropertiesKHR.ncurrentDisplay(address()); }
/** Returns the value of the {@code currentStackIndex} field. */
public int currentStackIndex() { return VkDisplayPlanePropertiesKHR.ncurrentStackIndex(address()); }
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.rescore;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
public class QueryRescorerBuilder extends RescorerBuilder<QueryRescorerBuilder> {
public static final String NAME = "query";
private static final ParseField RESCORE_QUERY_FIELD = new ParseField("rescore_query");
private static final ParseField QUERY_WEIGHT_FIELD = new ParseField("query_weight");
private static final ParseField RESCORE_QUERY_WEIGHT_FIELD = new ParseField("rescore_query_weight");
private static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
private static final ObjectParser<InnerBuilder, Void> QUERY_RESCORE_PARSER = new ObjectParser<>(NAME, null);
static {
QUERY_RESCORE_PARSER.declareObject(InnerBuilder::setQueryBuilder, (p, c) -> {
try {
return parseInnerQueryBuilder(p);
} catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query", e);
}
} , RESCORE_QUERY_FIELD);
QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setQueryWeight, QUERY_WEIGHT_FIELD);
QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setRescoreQueryWeight, RESCORE_QUERY_WEIGHT_FIELD);
QUERY_RESCORE_PARSER.declareString((struct, value) -> struct.setScoreMode(QueryRescoreMode.fromString(value)), SCORE_MODE_FIELD);
}
public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f;
public static final float DEFAULT_QUERYWEIGHT = 1.0f;
public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total;
private final QueryBuilder queryBuilder;
private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT;
private float queryWeight = DEFAULT_QUERYWEIGHT;
private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE;
/**
* Creates a new {@link QueryRescorerBuilder} instance
* @param builder the query builder to build the rescore query from
*/
public QueryRescorerBuilder(QueryBuilder builder) {
if (builder == null) {
throw new IllegalArgumentException("rescore_query cannot be null");
}
this.queryBuilder = builder;
}
/**
* Read from a stream.
*/
public QueryRescorerBuilder(StreamInput in) throws IOException {
super(in);
queryBuilder = in.readNamedWriteable(QueryBuilder.class);
scoreMode = QueryRescoreMode.readFromStream(in);
rescoreQueryWeight = in.readFloat();
queryWeight = in.readFloat();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(queryBuilder);
scoreMode.writeTo(out);
out.writeFloat(rescoreQueryWeight);
out.writeFloat(queryWeight);
}
@Override
public String getWriteableName() {
return NAME;
}
/**
* @return the query used for this rescore query
*/
public QueryBuilder getRescoreQuery() {
return this.queryBuilder;
}
/**
* Sets the original query weight for rescoring. The default is <tt>1.0</tt>
*/
public QueryRescorerBuilder setQueryWeight(float queryWeight) {
this.queryWeight = queryWeight;
return this;
}
/**
* Gets the original query weight for rescoring. The default is <tt>1.0</tt>
*/
public float getQueryWeight() {
return this.queryWeight;
}
/**
* Sets the original query weight for rescoring. The default is <tt>1.0</tt>
*/
public QueryRescorerBuilder setRescoreQueryWeight(float rescoreQueryWeight) {
this.rescoreQueryWeight = rescoreQueryWeight;
return this;
}
/**
* Gets the original query weight for rescoring. The default is <tt>1.0</tt>
*/
public float getRescoreQueryWeight() {
return this.rescoreQueryWeight;
}
/**
* Sets the original query score mode. The default is {@link QueryRescoreMode#Total}.
*/
public QueryRescorerBuilder setScoreMode(QueryRescoreMode scoreMode) {
this.scoreMode = scoreMode;
return this;
}
/**
* Gets the original query score mode. The default is <tt>total</tt>
*/
public QueryRescoreMode getScoreMode() {
return this.scoreMode;
}
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(RESCORE_QUERY_FIELD.getPreferredName(), queryBuilder);
builder.field(QUERY_WEIGHT_FIELD.getPreferredName(), queryWeight);
builder.field(RESCORE_QUERY_WEIGHT_FIELD.getPreferredName(), rescoreQueryWeight);
builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT));
builder.endObject();
}
public static QueryRescorerBuilder fromXContent(XContentParser parser) throws IOException {
InnerBuilder innerBuilder = QUERY_RESCORE_PARSER.parse(parser, new InnerBuilder(), null);
return innerBuilder.build();
}
@Override
public QueryRescoreContext innerBuildContext(int windowSize, QueryShardContext context) throws IOException {
QueryRescoreContext queryRescoreContext = new QueryRescoreContext(windowSize);
// query is rewritten at this point already
queryRescoreContext.setQuery(queryBuilder.toQuery(context));
queryRescoreContext.setQueryWeight(this.queryWeight);
queryRescoreContext.setRescoreQueryWeight(this.rescoreQueryWeight);
queryRescoreContext.setScoreMode(this.scoreMode);
return queryRescoreContext;
}
@Override
public final int hashCode() {
int result = super.hashCode();
return 31 * result + Objects.hash(scoreMode, queryWeight, rescoreQueryWeight, queryBuilder);
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
QueryRescorerBuilder other = (QueryRescorerBuilder) obj;
return super.equals(obj) &&
Objects.equals(scoreMode, other.scoreMode) &&
Objects.equals(queryWeight, other.queryWeight) &&
Objects.equals(rescoreQueryWeight, other.rescoreQueryWeight) &&
Objects.equals(queryBuilder, other.queryBuilder);
}
/**
* Helper to be able to use {@link ObjectParser}, since we need the inner query builder
* for the constructor of {@link QueryRescorerBuilder}, but {@link ObjectParser} only
* allows filling properties of an already constructed value.
*/
private static class InnerBuilder {
private QueryBuilder queryBuilder;
private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT;
private float queryWeight = DEFAULT_QUERYWEIGHT;
private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE;
void setQueryBuilder(QueryBuilder builder) {
this.queryBuilder = builder;
}
QueryRescorerBuilder build() {
QueryRescorerBuilder queryRescoreBuilder = new QueryRescorerBuilder(queryBuilder);
queryRescoreBuilder.setQueryWeight(queryWeight);
queryRescoreBuilder.setRescoreQueryWeight(rescoreQueryWeight);
queryRescoreBuilder.setScoreMode(scoreMode);
return queryRescoreBuilder;
}
void setQueryWeight(float queryWeight) {
this.queryWeight = queryWeight;
}
void setRescoreQueryWeight(float rescoreQueryWeight) {
this.rescoreQueryWeight = rescoreQueryWeight;
}
void setScoreMode(QueryRescoreMode scoreMode) {
this.scoreMode = scoreMode;
}
}
@Override
public QueryRescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException {
QueryBuilder rewrite = queryBuilder.rewrite(ctx);
if (rewrite == queryBuilder) {
return this;
}
return new QueryRescorerBuilder(rewrite);
}
}
| |
/**
* This project is licensed under the Apache License, Version 2.0
* if the following condition is met:
* (otherwise it cannot be used by anyone but the author, Kevin, only)
*
* The original KommonLee project is owned by Lee, Seong Hyun (Kevin).
*
* -What does it mean to you?
* Nothing, unless you want to take the ownership of
* "the original project" (not yours or forked & modified one).
* You are free to use it for both non-commercial and commercial projects
* and free to modify it as the Apache License allows.
*
* -So why is this condition necessary?
* It is only to protect the original project (See the case of Java).
*
*
* Copyright 2009 Lee, Seong Hyun (Kevin)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elixirian.kommonlee.reflect;
import static org.elixirian.kommonlee.test.CommonTestHelper.*;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import org.elixirian.kommonlee.test.CommonTestHelper.Accessibility;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* <pre>
* ____________ ___________ ____ _______ _________ _______ _______________ ____
* / / / /_ _/\ \/ / /_ _// __ //_ _// __ / \/ /
* / ___/ / / / \ / / / / /_/ / / / / /_/ / /
* / ___/ /_____/ /_ / \ _/ /_/ _/ _/ /_/ __ / /
* /_______/________/______/ /___/\___\/______/___/\___\ /______/___/ /___/___/\_____/
* </pre>
*
* <pre>
* ___ _____ __________ ___________ _____ ____
* / \/ / / \ \/ /_ _// \/ /
* / / / ___/\ / / / / /
* / \ / ___/ \ /_/ /_/ /
* /____/\____\/_______/ \__//______/___/\_____/
* </pre>
*
* @author Lee, SeongHyun (Kevin)
* @version 0.0.1 (2010-11-09)
*/
public class PrimitivesTest
{
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
}
@AfterClass
public static void tearDownAfterClass() throws Exception
{
}
@Before
public void setUp() throws Exception
{
}
@After
public void tearDown() throws Exception
{
}
@Test(expected = IllegalAccessException.class)
public final void testPrimitives() throws Exception
{
testNotAccessibleConstructor(Primitives.class, this, Accessibility.PRIVATE, classArrayOf(), objectArrayOf());
}
@Test
public final void testGetPrimitiveDefaultValueObject()
{
assertThat(Primitives.getPrimitiveDefaultValueObject(byte.class), is(equalTo(Byte.valueOf((byte) 0))));
assertThat(Primitives.getPrimitiveDefaultValueObject(short.class), is(equalTo(Short.valueOf((short) 0))));
assertThat(Primitives.getPrimitiveDefaultValueObject(int.class), is(equalTo(Integer.valueOf(0))));
assertThat(Primitives.getPrimitiveDefaultValueObject(long.class), is(equalTo(Long.valueOf(0L))));
assertThat(Primitives.getPrimitiveDefaultValueObject(float.class), is(equalTo(Float.valueOf(0.0f))));
assertThat(Primitives.getPrimitiveDefaultValueObject(double.class), is(equalTo(Double.valueOf(0.0d))));
assertThat(Primitives.getPrimitiveDefaultValueObject(char.class), is(equalTo(Character.valueOf('\u0000'))));
assertThat(Primitives.getPrimitiveDefaultValueObject(boolean.class), is(equalTo(Boolean.FALSE)));
}
@Test(expected = IllegalArgumentException.class)
public final void testGetPrimitiveDefaultValueObjectWithNonPrimitive()
{
Primitives.getPrimitiveDefaultValueObject(Object.class);
}
@Test
public final void testGetBoxedPrimitiveOf()
{
assertThat(Primitives.getBoxedPrimitiveOf(null), is(nullValue()));
assertThat(Primitives.getBoxedPrimitiveOf(Object.class), is(nullValue()));
assertThat(Primitives.getBoxedPrimitiveOf(byte.class), is(equalTo(Byte.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Byte.class), is(equalTo(Byte.class)));
assertThat(Primitives.getBoxedPrimitiveOf(short.class), is(equalTo(Short.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Short.class), is(equalTo(Short.class)));
assertThat(Primitives.getBoxedPrimitiveOf(int.class), is(equalTo(Integer.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Integer.class), is(equalTo(Integer.class)));
assertThat(Primitives.getBoxedPrimitiveOf(long.class), is(equalTo(Long.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Long.class), is(equalTo(Long.class)));
assertThat(Primitives.getBoxedPrimitiveOf(float.class), is(equalTo(Float.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Float.class), is(equalTo(Float.class)));
assertThat(Primitives.getBoxedPrimitiveOf(double.class), is(equalTo(Double.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Double.class), is(equalTo(Double.class)));
assertThat(Primitives.getBoxedPrimitiveOf(char.class), is(equalTo(Character.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Character.class), is(equalTo(Character.class)));
assertThat(Primitives.getBoxedPrimitiveOf(boolean.class), is(equalTo(Boolean.class)));
assertThat(Primitives.getBoxedPrimitiveOf(Boolean.class), is(equalTo(Boolean.class)));
}
@Test
public final void testGetPrimitiveOf()
{
assertThat(Primitives.getPrimitiveOf(null), is(nullValue()));
assertThat(Primitives.getPrimitiveOf(Object.class), is(nullValue()));
assertThat(Primitives.getPrimitiveOf(byte.class), is(equalTo(byte.class)));
assertThat(Primitives.getPrimitiveOf(Byte.class), is(equalTo(byte.class)));
assertThat(Primitives.getPrimitiveOf(short.class), is(equalTo(short.class)));
assertThat(Primitives.getPrimitiveOf(Short.class), is(equalTo(short.class)));
assertThat(Primitives.getPrimitiveOf(int.class), is(equalTo(int.class)));
assertThat(Primitives.getPrimitiveOf(Integer.class), is(equalTo(int.class)));
assertThat(Primitives.getPrimitiveOf(long.class), is(equalTo(long.class)));
assertThat(Primitives.getPrimitiveOf(Long.class), is(equalTo(long.class)));
assertThat(Primitives.getPrimitiveOf(float.class), is(equalTo(float.class)));
assertThat(Primitives.getPrimitiveOf(Float.class), is(equalTo(float.class)));
assertThat(Primitives.getPrimitiveOf(double.class), is(equalTo(double.class)));
assertThat(Primitives.getPrimitiveOf(Double.class), is(equalTo(double.class)));
assertThat(Primitives.getPrimitiveOf(char.class), is(equalTo(char.class)));
assertThat(Primitives.getPrimitiveOf(Character.class), is(equalTo(char.class)));
assertThat(Primitives.getPrimitiveOf(boolean.class), is(equalTo(boolean.class)));
assertThat(Primitives.getPrimitiveOf(Boolean.class), is(equalTo(boolean.class)));
}
@Test
public final void testIsPrimitive()
{
assertFalse(Primitives.isPrimitive(null));
assertFalse(Primitives.isPrimitive(Object.class));
assertTrue(Primitives.isPrimitive(byte.class));
assertFalse(Primitives.isPrimitive(Byte.class));
assertTrue(Primitives.isPrimitive(short.class));
assertFalse(Primitives.isPrimitive(Short.class));
assertTrue(Primitives.isPrimitive(int.class));
assertFalse(Primitives.isPrimitive(Integer.class));
assertTrue(Primitives.isPrimitive(long.class));
assertFalse(Primitives.isPrimitive(Long.class));
assertTrue(Primitives.isPrimitive(float.class));
assertFalse(Primitives.isPrimitive(Float.class));
assertTrue(Primitives.isPrimitive(double.class));
assertFalse(Primitives.isPrimitive(Double.class));
assertTrue(Primitives.isPrimitive(char.class));
assertFalse(Primitives.isPrimitive(Character.class));
assertTrue(Primitives.isPrimitive(boolean.class));
assertFalse(Primitives.isPrimitive(Boolean.class));
}
@Test
public final void testIsBoxedPrimitive()
{
assertFalse(Primitives.isBoxedPrimitive(null));
assertFalse(Primitives.isBoxedPrimitive(Object.class));
assertFalse(Primitives.isBoxedPrimitive(byte.class));
assertTrue(Primitives.isBoxedPrimitive(Byte.class));
assertFalse(Primitives.isBoxedPrimitive(short.class));
assertTrue(Primitives.isBoxedPrimitive(Short.class));
assertFalse(Primitives.isBoxedPrimitive(int.class));
assertTrue(Primitives.isBoxedPrimitive(Integer.class));
assertFalse(Primitives.isBoxedPrimitive(long.class));
assertTrue(Primitives.isBoxedPrimitive(Long.class));
assertFalse(Primitives.isBoxedPrimitive(float.class));
assertTrue(Primitives.isBoxedPrimitive(Float.class));
assertFalse(Primitives.isBoxedPrimitive(double.class));
assertTrue(Primitives.isBoxedPrimitive(Double.class));
assertFalse(Primitives.isBoxedPrimitive(char.class));
assertTrue(Primitives.isBoxedPrimitive(Character.class));
assertFalse(Primitives.isBoxedPrimitive(boolean.class));
assertTrue(Primitives.isBoxedPrimitive(Boolean.class));
}
}
| |
package com.mgatelabs.swftools.exploit.gui;
import com.mgatelabs.swftools.exploit.hook.BlockEditInterface;
import com.mgatelabs.swftools.support.filters.CommonFileFilter;
import com.mgatelabs.swftools.support.plugins.BlockPlugin;
import com.mgatelabs.swftools.support.plugins.Plugin;
import com.mgatelabs.swftools.support.swf.io.BlockIO;
import com.mgatelabs.swftools.support.swf.objects.FMovie;
import com.mgatelabs.swftools.support.swf.tags.TBlock;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.StringTokenizer;
import java.util.Vector;
public class BlockEditDialog extends GuiInternal implements BlockEditInterface {
//Variables
private BlockEditDialog mySelf;
private ExploitActionListener myExploitActionListener;
//private FormMouseListener myFormMouseListener;
private JMenuItem menuFileOpen;
private JMenuItem menuFileClose;
private JMenuItem menuFileSave;
private JMenuItem menuFileSaveCompress;
private JMenu menuRoot, menuEdit;
private JMenuItem menuExportData;
private JMenuItem menuExportBlock;
private JMenuItem menuEditCopy;
private JMenuItem menuEditCut;
private JMenuItem menuEditPaste;
private JMenuItem menuBlockRemove;
private JMenuItem menuBlockReplace;
private JMenuItem menuBlockInsert;
private JMenuItem menuBlockId;
private JMenuItem menuAlterVersion;
private JFileChooser myJFileChooser;
private JDesktopPane jde;
private JFrame myMaster;
private Vector myWindows;
private Vector myPlugins;
private TBlock copyBuffer;
public BlockEditDialog(JFrame master) {
super("Block Editor (Beta)", false, false, (master != null), false);
this.setFrameIcon(new ImageIcon(this.getClass().getResource("/com/mgatelabs/swftools/exploit/gui/images/block.png")));
myWindows = new Vector();
myPlugins = null;
copyBuffer = null;
myMaster = master;
//myVector = null;
//myMovie = null;
mySelf = this;
buildControls();
menuMaster(false);
this.getContentPane().add(buildGui());
this.pack();
}
// Frame Information
public JFrame getFrame() {
return myMaster;
}
// Plugin Updates
public void updateMenuWithPlugins(Vector plugins) {
myPlugins = plugins;
for (int x = 0; x < plugins.size(); x++) {
Plugin p = (Plugin) plugins.get(x);
if (p.getType() == Plugin.BLOCK) {
if (p instanceof BlockPlugin) {
BlockPlugin bp = (BlockPlugin) p;
// Get the Proposed Menu Path
StringTokenizer st = new StringTokenizer(bp.getMenuPath(), ":");
JMenu targetMenu = menuRoot;
while (st.hasMoreElements()) {
String tName = st.nextToken();
if (tName.length() > 0) {
targetMenu = findSubMenu(targetMenu, tName);
}
}
JMenuItem aItem = new JMenuItem(bp.getMenuName());
aItem.setActionCommand("!" + x);
targetMenu.add(aItem);
aItem.addActionListener(myExploitActionListener);
}
}
//menuRoot
}
}
public JMenu findSubMenu(JMenu parent, String name) {
Component[] cm = parent.getMenuComponents();
for (int x = 0; x < cm.length; x++) {
if (cm[x] instanceof JMenu) {
JMenu aMenu = (JMenu) cm[x];
if (aMenu.getText().trim().equals(name)) {
return aMenu;
}
}
}
JMenu nMenu = new JMenu(name);
parent.add(nMenu);
return nMenu;
}
// Controls
private void buildControls() {
myJFileChooser = new JFileChooser(new File("."));
myJFileChooser.setFileFilter(new CommonFileFilter(".swf", "Adobe Flash Files"));
myExploitActionListener = new ExploitActionListener();
menuFileOpen = new JMenuItem("Open");
menuFileClose = new JMenuItem("Close");
menuFileSave = new JMenuItem("Save");
menuFileSaveCompress = new JMenuItem("Save (Compress)");
menuExportData = new JMenuItem("Data");
menuExportBlock = new JMenuItem("Block");
menuEditCopy = new JMenuItem("Copy");
menuEditCut = new JMenuItem("Cut");
menuEditPaste = new JMenuItem("Paste");
menuBlockRemove = new JMenuItem("Remove");
menuBlockReplace = new JMenuItem("Replace");
menuBlockInsert = new JMenuItem("Insert (Above)");
menuAlterVersion = new JMenuItem("Alter Version");
menuEditCopy.addActionListener(myExploitActionListener);
menuEditCut.addActionListener(myExploitActionListener);
menuEditPaste.addActionListener(myExploitActionListener);
menuFileOpen.addActionListener(myExploitActionListener);
menuFileClose.addActionListener(myExploitActionListener);
menuFileSave.addActionListener(myExploitActionListener);
menuFileSaveCompress.addActionListener(myExploitActionListener);
menuExportData.addActionListener(myExploitActionListener);
menuExportBlock.addActionListener(myExploitActionListener);
menuBlockRemove.addActionListener(myExploitActionListener);
menuBlockReplace.addActionListener(myExploitActionListener);
menuBlockInsert.addActionListener(myExploitActionListener);
menuAlterVersion.addActionListener(myExploitActionListener);
//menuBlockId.addActionListener(myExploitActionListener);
JMenuBar myMenuBar = new JMenuBar();
JMenu menuFile = new JMenu("File");
menuFile.add(menuFileOpen);
menuFile.add(menuFileClose);
menuFile.addSeparator();
menuFile.add(menuFileSave);
menuFile.add(menuFileSaveCompress);
myMenuBar.add(menuFile);
menuEdit = new JMenu("Edit");
menuEdit.add(menuEditCopy);
menuEdit.add(menuEditCut);
menuEdit.add(menuEditPaste);
myMenuBar.add(menuEdit);
menuRoot = new JMenu("Plugins");
JMenu menuExport = new JMenu("Export");
JMenu menuEdit = new JMenu("Edit");
JMenu menuBuiltIn = new JMenu("Built-In");
menuExport.add(menuExportData);
menuExport.add(menuExportBlock);
menuRoot.add(menuExport);
menuRoot.add(menuEdit);
menuRoot.add(menuBuiltIn);
menuBuiltIn.add(menuBlockRemove);
menuBuiltIn.add(menuBlockReplace);
menuBuiltIn.addSeparator();
menuBuiltIn.add(menuBlockInsert);
menuBuiltIn.addSeparator();
menuBuiltIn.add(menuAlterVersion);
myMenuBar.add(menuRoot);
this.setJMenuBar(myMenuBar);
}
// Form Building
private JPanel buildGui() {
JPanel master = new JPanel();
master.setLayout(new BoxLayout(master, BoxLayout.X_AXIS));
jde = new JDesktopPane();
jde.setDragMode(jde.OUTLINE_DRAG_MODE);
master.add(jde);
return master;
}
////////////////////////////////////////////////////////////////////////////
private void menuMaster(boolean open) {
menuFileClose.setEnabled(open);
menuFileSave.setEnabled(open);
menuFileSaveCompress.setEnabled(open);
menuRoot.setEnabled(open);
menuEdit.setEnabled(open);
}
////////////////////////////////////////////////////////////////////////////
private void clear() {
menuMaster(false);
}
// IO Functions/////////////////////////////////////////////////////////////
private boolean loadFlash(File aFile) {
try {
// Get the Reader
BlockIO bio = new BlockIO();
Object[] objects = bio.readFlash(aFile);
BlockEditInternal bei = new BlockEditInternal(aFile, (FMovie) objects[0], (Vector) objects[1]);
GuiInternal gi = new GuiInternal(bei, aFile.getName(), 320, 480, true, true);
gi.setFrameIcon(new ImageIcon(this.getClass().getResource("/com/mgatelabs/swftools/exploit/gui/images/flash.png")));
myWindows.add(gi);
jde.add(gi);
gi.show();
menuMaster(true);
return true;
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(mySelf, "Java has experienced an error, most likely\ndue to running out of ram.\nPlease close this program and run\nit with a higher ram setting." + "\n\n" + e, "Error", JOptionPane.ERROR_MESSAGE);
return false;
}
}
private BlockEditInternal getTopWindow() {
GuiInternal gi = (GuiInternal) jde.getSelectedFrame();
JComponent jc = gi.getComponent();
if (jc instanceof BlockEditInternal) {
return (BlockEditInternal) jc;
} else {
return null;
}
}
private boolean saveFlash(File aFile, boolean compress) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
try {
// Get the Reader
BlockIO bio = new BlockIO();
FMovie aMovie = bei.getMovie();// myMovieInfo.get();
if (aMovie == null) {
JOptionPane.showMessageDialog(mySelf, "There is a error in the movie information on the right side of the window.", "Error", JOptionPane.ERROR_MESSAGE);
return false;
} else {
if (bio.writeFlash(aFile, aMovie, bei.getObjects(), compress)) {
}
}
return true;
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(mySelf, "Java has experienced an error, most likely\ndue to running out of ram.\nPlease close this program and run\nit with a higher ram setting." + "\n\n" + e, "Error", JOptionPane.ERROR_MESSAGE);
return false;
}
}
public TBlock getSelectedBlock() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return null;
}
return bei.getSelectedObject();
}
public int getSelectedIndex() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return -1;
}
return bei.getSelectedIndex();
}
public TBlock getBlock(int index) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return null;
}
Vector dlo = bei.getObjects();
if (index >= 0 && index < dlo.size()) {
return (TBlock) dlo.get(index);
}
return null;
}
// Special Access
public boolean copyBlock(int index) {
TBlock aBlock = getBlock(index);
if (aBlock != null) {
copyBuffer = aBlock;
aBlock = null;
return true;
} else {
return false;
}
}
public boolean cutBlock(int index) {
if (copyBlock(index)) {
return removeBlock(index);
}
return false;
}
public boolean pasteBlock(int index) {
if (copyBuffer != null) {
return insertBlock(index, copyBuffer);
}
return false;
}
public boolean replaceBlock(int index, TBlock aBlock) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
DefaultListModel dlm = bei.getModel();
Vector dlo = bei.getObjects();
dlo.set(index, aBlock);
dlm.set(index, aBlock);
bei.update(dlm);
return false;
}
public void alterVersionNumber() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return;
}
FMovie m = bei.getMovie();
int i = m.getVersion();
i--;
if (i < 0 || i > 14) {
i = 9;
}
String[] versions = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15"};
String selectedValue = (String) JOptionPane.showInputDialog(null,
"Version", "Alter Flash Version",
JOptionPane.INFORMATION_MESSAGE, null,
versions, (String) versions[i]);
if (selectedValue != null && selectedValue.length() > 0) {
try {
i = Integer.parseInt(selectedValue);
m.setVersion(i);
bei.updateMovieInfo();
} catch (Exception ex) {
JOptionPane.showMessageDialog(null, "Could not alter flash version", "Internal Error", JOptionPane.ERROR_MESSAGE);
}
}
}
public void updateList() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return;
}
DefaultListModel dlm = bei.getModel();
bei.update(dlm);
}
public int getObjectCount() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return -1;
}
Vector dlo = bei.getObjects();
return dlo.size();
}
////////////////////////////////////////////////////////////////////////////
// Save Block
public boolean saveBlock(int index, File aPath) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index < 0) {
return false;
}
try {
// Get the Reader
BlockIO bio = new BlockIO();
TBlock aBlock = bei.getObjectAtIndex(index);
if (aBlock != null) {
FMovie aMovie = bei.getMovie();// myMovieInfo.get();
bio.writeBlock(aPath, aMovie, aBlock);
}
return true;
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(mySelf, "Java has experienced an error, most likely\ndue to running out of ram.\nPlease close this program and run\nit with a higher ram setting." + "\n\n" + e, "Error", JOptionPane.ERROR_MESSAGE);
}
return false;
}
////////////////////////////////////////////////////////////////////////////
// Save Data
public boolean saveData(int index, File aFile) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index < 0) {
return false;
}
try {
// Get the Reader
BlockIO bio = new BlockIO();
TBlock aBlock = bei.getObjectAtIndex(index);
if (aBlock != null) {
FMovie aMovie = bei.getMovie();// myMovieInfo.get();
bio.writeData(aFile, aMovie, aBlock);
}
return true;
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(mySelf, "Java has experienced an error, most likely\ndue to running out of ram.\nPlease close this program and run\nit with a higher ram setting." + "\n\n" + e, "Error", JOptionPane.ERROR_MESSAGE);
}
return false;
}
////////////////////////////////////////////////////////////////////////////
public boolean removeBlock(int index) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index < 0) {
return false;
}
if (index >= 0) {
DefaultListModel dlm = bei.getModel();
Vector dlo = bei.getObjects();
dlo.remove(index);
dlm.removeElementAt(index);
bei.update(dlm);
return true;
} else {
return false;
}
}
////////////////////////////////////////////////////////////////////////////
public boolean replaceBlock(int index, File target) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index < 0) {
return false;
}
if (index >= 0) {
try {
BlockIO bio = new BlockIO();
FMovie aMovie = bei.getMovie();
TBlock aBlock = bio.readBlock(target, aMovie);
DefaultListModel dlm = bei.getModel();
Vector dlo = bei.getObjects();
dlo.set(index, aBlock);
dlm.set(index, aBlock);
bei.update(dlm);
return true;
} catch (Exception e) {
System.out.println(e);
}
}
return false;
}
////////////////////////////////////////////////////////////////////////////
public boolean insertBlock(int index, TBlock aBlock) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index >= 0) {
try {
BlockIO bio = new BlockIO();
DefaultListModel dlm = bei.getModel();
Vector dlo = bei.getObjects();
dlo.add(index, aBlock);
dlm.add(index, aBlock);
bei.update(dlm);
return true;
} catch (Exception e) {
System.out.println(e);
}
}
return false;
}
public boolean insertBlock(int index, File target) {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return false;
}
if (index >= 0) {
try {
BlockIO bio = new BlockIO();
FMovie aMovie = bei.getMovie();
TBlock aBlock = bio.readBlock(target, aMovie);
DefaultListModel dlm = bei.getModel();
Vector dlo = bei.getObjects();
dlo.add(index, aBlock);
dlm.add(index, aBlock);
bei.update(dlm);
return true;
} catch (Exception e) {
System.out.println(e);
}
}
return false;
}
////////////////////////////////////////////////////////////////////////////
public int getFlashVersion() {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return -1;
}
FMovie aMovie = bei.getMovie();
return aMovie.getVersion();
}
////////////////////////////////////////////////////////////////////////////
private class ExploitActionListener implements ActionListener {
public void actionPerformed(ActionEvent e) {
if (e.getSource() == menuFileOpen) {
clear();
int returnVal = myJFileChooser.showOpenDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
loadFlash(myJFileChooser.getSelectedFile());
}
} else if (e.getSource() == menuFileClose) {
GuiInternal gi = (GuiInternal) jde.getSelectedFrame();
JComponent jc = gi.getComponent();
if (jc instanceof BlockEditInternal) {
gi.setVisible(false);
myWindows.remove(gi);
jde.remove(gi);
}
if (myWindows.size() == 0) {
menuMaster(false);
}
} else if (myWindows.size() > 0) {
if (e.getSource() == menuFileSave || e.getSource() == menuFileSaveCompress) {
int returnVal = myJFileChooser.showSaveDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
saveFlash(myJFileChooser.getSelectedFile(), e.getSource() == menuFileSaveCompress);
}
} else if (e.getSource() == menuAlterVersion) {
alterVersionNumber();
}
// Export
else {
BlockEditInternal bei = getTopWindow();
if (bei == null) {
return;
}
int index = bei.getSelectedIndex();
if (index < 0) {
return;
}
if (e.getSource() == menuExportData) {
int returnVal = myJFileChooser.showSaveDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
saveData(index, myJFileChooser.getSelectedFile());
}
} else if (e.getSource() == menuExportBlock) {
int returnVal = myJFileChooser.showSaveDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
saveBlock(index, myJFileChooser.getSelectedFile());
}
} else if (e.getSource() == menuBlockRemove) {
removeBlock(index);
} else if (e.getSource() == menuBlockInsert) {
int returnVal = myJFileChooser.showOpenDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
insertBlock(index, myJFileChooser.getSelectedFile());
}
} else if (e.getSource() == menuBlockReplace) {
int returnVal = myJFileChooser.showOpenDialog(mySelf);
if (returnVal == JFileChooser.APPROVE_OPTION) { // Were Good
replaceBlock(index, myJFileChooser.getSelectedFile());
}
} else if (e.getSource() == menuEditCopy) {
copyBlock(index);
} else if (e.getSource() == menuEditCut) {
cutBlock(index);
} else if (e.getSource() == menuEditPaste) {
pasteBlock(index);
}
// Plugin Check
else if (e.getSource() instanceof JMenuItem) {
JMenuItem jmi = (JMenuItem) e.getSource();
if (jmi.getActionCommand().startsWith("!")) {
String temp = jmi.getActionCommand().substring(1);
try {
int pIndex = Integer.parseInt(temp);
Plugin p = (Plugin) myPlugins.get(pIndex);
if (p instanceof BlockPlugin) {
BlockPlugin bp = (BlockPlugin) p;
TBlock selectedObject = bei.getSelectedObject();
if (bp.isApplicable(selectedObject.getId())) {
bp.work(mySelf);
} else {
JOptionPane.showMessageDialog(mySelf, "I'm sorry but the active plugin\nrejected the currently selected block", "Plugin Error", JOptionPane.ERROR_MESSAGE);
}
}
} catch (Exception ex) {
System.out.println(ex);
}
}
}
}
}
}
}
}
| |
package com.projectchanged.server.dataAccess;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import com.projectchanged.server.dataAccess.helpers.QueryExecutor;
public class RoleModel implements RoleInterface {
private long id;
private String name;
private String description;
private ArrayList<UserInterface> members;
private boolean isFetched;
private boolean isDirty;
private boolean isDeleted;
public RoleModel() {
id = 0;
isFetched = true;
isDirty = true;
members = null;
isDeleted = false;
}
public RoleModel(long id) {
this();
this.id = id;
isFetched = false;
isDirty = false;
}
public boolean Fetch() throws AlreadyDeletedException {
if (!isFetched) {
String sql = "SELECT Name, Description, IsDeleted FROM UserRole WHERE UserRole.RoleId = ?";
try {
ResultSet rs = QueryExecutor.ExecuteReader(sql,
new Object[] { id });
if (rs.next()) {
name = rs.getString("Name");
description = rs.getString("Description");
isDeleted = rs.getBoolean("IsDeleted");
}
isFetched = true;
if (isDeleted) {
throw new AlreadyDeletedException(this);
}
} catch (SQLException sqlException) {
sqlException.printStackTrace();
return false;
}
}
return true;
}
public boolean Persist() throws AlreadyDeletedException {
if (isDirty) {
String sql = "";
if (id == 0) {
sql = "INSERT INTO UserRole (Name, Description) VALUES (?, ?)";
if (QueryExecutor.ExecuteNonQuery(sql, new Object[] { name,
description })) {
isDirty = false;
sql = "SELECT RoleId FROM UserRole WHERE UserRole.Name = ? AND UserRole.Description = ?";
try {
ResultSet rs = QueryExecutor.ExecuteReader(sql,
new Object[] { name, description });
if (rs.next()) {
id = rs.getLong("RoleId");
return true;
} else {
return false;
}
} catch (SQLException sqlException) {
sqlException.printStackTrace();
return false;
}
} else if (isDeleted) {
throw new AlreadyDeletedException(this);
} else {
return false;
}
} else {
sql = "UPDATE UserRole SET Name = ?, Description = ? WHERE RoleId = ?";
if (QueryExecutor.ExecuteNonQuery(sql, new Object[] { name,
description, id })) {
isDirty = false;
return true;
} else {
return false;
}
}
}
return true;
}
public boolean Refresh() {
try {
if (isFetched) {
isFetched = false;
members = null;
}
return Fetch();
} catch (AlreadyDeletedException e) {
return false;
}
}
public boolean Delete() {
if (id == 0) {
return false;
}
if (!isDeleted) {
isDeleted = true;
return QueryExecutor.ExecuteNonQuery(
"UPDATE [UserRole] SET IsDeleted = 1 WHERE RoleId = ?",
new Object[] { id });
} else {
return true;
}
}
public boolean Recover() {
if (id == 0) {
return false;
}
if (isDeleted) {
isDeleted = false;
return QueryExecutor.ExecuteNonQuery(
"UPDATE [UserRole] SET IsDeleted = 0 WHERE RoleId = ?",
new Object[] { id });
} else {
return true;
}
}
public boolean IsDeleted() {
if (!isFetched) {
try {
if (!Fetch()) {
return false;
}
} catch (AlreadyDeletedException e) {
return true;
}
}
return isDeleted;
}
public long GetId() {
return id;
}
public String GetName() {
return name;
}
public boolean SetName(String newName) {
name = newName;
isDirty = true;
return isDirty;
}
public String GetDescription() {
return description;
}
public boolean SetDescription(String newDescription) {
description = newDescription;
isDirty = true;
return isDirty;
}
public ArrayList<UserInterface> GetMembers() {
// do not allow modification of this members list here.
// any changes to this list will not be persisted.
if (members == null) {
members = new ArrayList<UserInterface>();
try {
String sql = "SELECT UserId FROM [User] WHERE RoleId = ? AND IsDeleted = 0";
ResultSet rs = QueryExecutor.ExecuteReader(sql,
new Object[] { id });
while (rs.next()) {
members.add(ObjectFinder.GetUser(rs.getLong("UserId")));
}
} catch (SQLException sqlException) {
sqlException.printStackTrace();
members = null;
return members;
}
}
return (ArrayList<UserInterface>) Collections.unmodifiableList(members);
}
public int compareTo(RoleInterface o) {
// non-zero value indicates inequality.
// this is a shallow comparison, relying on the uniqueness of the Id.
return (int) (id - o.GetId());
}
public boolean equals(Object o) {
return (o instanceof RoleInterface)
&& (((RoleInterface) o).GetId() == id);
}
}
| |
/*
* Copyright 2003 - 2016 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.esjp.products;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.efaps.admin.datamodel.Status;
import org.efaps.admin.datamodel.ui.IUIValue;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.event.Return;
import org.efaps.admin.event.Return.ReturnValues;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.admin.ui.AbstractUserInterfaceObject.TargetMode;
import org.efaps.db.Context;
import org.efaps.db.Insert;
import org.efaps.db.Instance;
import org.efaps.db.InstanceQuery;
import org.efaps.db.MultiPrintQuery;
import org.efaps.db.PrintQuery;
import org.efaps.db.QueryBuilder;
import org.efaps.db.SelectBuilder;
import org.efaps.db.Update;
import org.efaps.esjp.ci.CIFormProducts;
import org.efaps.esjp.ci.CIProducts;
import org.efaps.esjp.common.AbstractCommon;
import org.efaps.esjp.erp.Currency;
import org.efaps.esjp.erp.RateInfo;
import org.efaps.util.EFapsException;
import org.joda.time.DateTime;
/**
* TODO comment!
*
* @author The eFaps Team
*/
@EFapsUUID("a7a6c100-8fce-4217-bc8e-c9066b1ab9e2")
@EFapsApplication("eFapsApp-Products")
public abstract class Cost_Base
extends AbstractCommon
{
/** The Constant CACHKEY. */
protected static final String CACHKEY = Cost.class.getName() + ".CacheKey4Cost";
/**
* Method to get the value for the date field "Valid until". On create mode
* a date in ten years future is returned.
*
* @param _parameter Paramter as passed from the eFaps esjp API
* @return Return containing the value
* @throws EFapsException on error
*/
public Return getValidUntilUI(final Parameter _parameter)
throws EFapsException
{
final IUIValue fValue = (IUIValue) _parameter.get(ParameterValues.UIOBJECT);
final DateTime value;
if (TargetMode.CREATE.equals(_parameter.get(ParameterValues.ACCESSMODE))) {
value = new DateTime().plusYears(10);
} else {
value = (DateTime) fValue.getObject();
}
final Return ret = new Return();
ret.put(ReturnValues.VALUES, value);
return ret;
}
/**
* Method is executed as a insert trigger on type "Products_ProductCost". It
* corrects the valid until date of all other Products_ProductCost.
*
* @param _parameter Paramter as passed from the eFaps esjp API
* @return Return containing the value
* @throws EFapsException on error
*/
public Return trigger4InsertPost(final Parameter _parameter)
throws EFapsException
{
final Instance costInstance = _parameter.getInstance();
final PrintQuery print = new PrintQuery(costInstance);
print.addAttribute(CIProducts.ProductCostAbstract.ValidFrom,
CIProducts.ProductCostAbstract.ValidUntil,
CIProducts.ProductCostAbstract.ProductLink,
CIProducts.ProductCostAbstract.CurrencyLink);
print.executeWithoutAccessCheck();
final DateTime from = print.getAttribute(CIProducts.ProductCostAbstract.ValidFrom);
final DateTime to = print.getAttribute(CIProducts.ProductCostAbstract.ValidUntil);
final QueryBuilder queryBldr = new QueryBuilder(costInstance.getType());
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Active));
queryBldr.addWhereAttrNotEqValue(CIProducts.ProductCostAbstract.ID, costInstance);
queryBldr.addWhereAttrGreaterValue(CIProducts.ProductCostAbstract.ValidUntil, from.minusSeconds(1));
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.ProductLink,
print.<Long>getAttribute(CIProducts.ProductCostAbstract.ProductLink));
if (costInstance.getType().isCIType(CIProducts.ProductCostAlternative)) {
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.CurrencyLink,
print.<Long>getAttribute(CIProducts.ProductCostAbstract.CurrencyLink));
}
queryBldr.addOrderByAttributeAsc(CIProducts.ProductCostAbstract.ValidFrom);
final MultiPrintQuery multi = queryBldr.getPrint();
multi.addAttribute(CIProducts.ProductCostAbstract.ValidUntil, CIProducts.ProductCostAbstract.ValidFrom,
CIProducts.ProductCostAbstract.Price, CIProducts.ProductCostAbstract.ProductLink,
CIProducts.ProductCostAbstract.CurrencyLink);
multi.setEnforceSorted(true);
multi.executeWithoutAccessCheck();
while (multi.next()) {
final DateTime currentFrom = multi.getAttribute(CIProducts.ProductCostAbstract.ValidFrom);
final DateTime currentTo = multi.getAttribute(CIProducts.ProductCostAbstract.ValidUntil);
DateTime newFrom = currentFrom;
DateTime newTo = currentTo;
boolean deactivate = false;
boolean create = false;
if (!(currentFrom.isAfter(from) && currentFrom.isAfter(to))) {
if (currentFrom.isBefore(from) && (currentTo.isAfter(from)
|| currentTo.toLocalDate().equals(from.toLocalDate()))) {
newTo = from.minusDays(1);
create = true;
// new one is inside
if (currentTo.isAfter(to)) {
final Insert insert = new Insert(multi.getCurrentInstance().getType());
insert.add(CIProducts.ProductCostAbstract.ValidFrom, to.plusDays(1));
insert.add(CIProducts.ProductCostAbstract.ValidUntil, currentTo);
insert.add(CIProducts.ProductCostAbstract.Price,
multi.<BigDecimal>getAttribute(CIProducts.ProductCostAbstract.Price));
insert.add(CIProducts.ProductCostAbstract.ProductLink,
multi.<Long>getAttribute(CIProducts.ProductCostAbstract.ProductLink));
insert.add(CIProducts.ProductCostAbstract.CurrencyLink,
multi.<Long>getAttribute(CIProducts.ProductCostAbstract.CurrencyLink));
insert.add(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Active));
insert.executeWithoutTrigger();
}
} else if (currentFrom.isAfter(from) && currentFrom.toLocalDate().equals(to.toLocalDate())) {
newFrom = to.minusDays(1);
create = true;
} else if (currentFrom.isAfter(from) && currentFrom.isBefore(to) && currentTo.isAfter(to)) {
// in case that it must be move to far into the future just deactivate it
if (to.minusYears(5).isAfter(new DateTime()) && currentTo.minusYears(5).isAfter(new DateTime())) {
deactivate = true;
} else {
newFrom = to.plusDays(1);
create = true;
}
} else if (currentFrom.isAfter(from) && currentFrom.isBefore(to)
&& (currentTo.isBefore(to) || currentTo.toLocalDate().equals(to.toLocalDate()))) {
deactivate = true;
} else if (currentFrom.toLocalDate().equals(from.toLocalDate())
&& currentFrom.toLocalDate().equals(currentTo.toLocalDate())) {
deactivate = true;
} else if (currentFrom.toLocalDate().equals(from.toLocalDate())
&& currentTo.isBefore(to)) {
deactivate = true;
} else if (currentFrom.isAfter(from) && currentTo.isBefore(to)) {
deactivate = true;
} else if (currentFrom.toLocalDate().equals(from.toLocalDate())
&& currentTo.toLocalDate().equals(to.toLocalDate())) {
deactivate = true;
}
}
if (create || deactivate) {
final Update update = new Update(multi.getCurrentInstance());
update.add(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Inactive));
update.executeWithoutTrigger();
}
if (create) {
final Insert insert = new Insert(multi.getCurrentInstance().getType());
insert.add(CIProducts.ProductCostAbstract.ValidFrom, newFrom);
insert.add(CIProducts.ProductCostAbstract.ValidUntil, newTo);
insert.add(CIProducts.ProductCostAbstract.Price,
multi.<BigDecimal>getAttribute(CIProducts.ProductCostAbstract.Price));
insert.add(CIProducts.ProductCostAbstract.ProductLink,
multi.<Long>getAttribute(CIProducts.ProductCostAbstract.ProductLink));
insert.add(CIProducts.ProductCostAbstract.CurrencyLink,
multi.<Long>getAttribute(CIProducts.ProductCostAbstract.CurrencyLink));
insert.add(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Active));
insert.executeWithoutTrigger();
}
}
return new Return();
}
/**
* Method for update a field of the contact.
*
* @param _parameter Parameter as passed from the eFaps API.
* @return retVal with values of the contact.
* @throws EFapsException on error.
*/
public Return updateFields4CostCalculation(final Parameter _parameter)
throws EFapsException
{
final String quantityStr = _parameter
.getParameterValue(CIFormProducts.Products_ProductCostCalculateForm.quantity.name);
final String costStr = _parameter.getParameterValue(CIFormProducts.Products_ProductCostCalculateForm.cost.name);
final BigDecimal quantity = parse(quantityStr);
final BigDecimal cost = parse(costStr);
BigDecimal price = BigDecimal.ZERO;
if (quantity.signum() > 0 && cost.signum() > 0) {
final QueryBuilder queryBldr = new QueryBuilder(CIProducts.ProductCost);
queryBldr.addWhereAttrEqValue(CIProducts.ProductCost.ProductLink, _parameter.getInstance().getId());
queryBldr.addOrderByAttributeDesc(CIProducts.ProductCost.ValidUntil);
final InstanceQuery query = queryBldr.getQuery();
query.setLimit(1);
final List<Instance> instances = query.execute();
if (!instances.isEmpty()) {
final PrintQuery print = new PrintQuery(instances.get(0));
print.addAttribute(CIProducts.ProductCost.Price);
print.execute();
final BigDecimal oldPrice = print.<BigDecimal>getAttribute(CIProducts.ProductCost.Price);
final QueryBuilder invQueryBldr = new QueryBuilder(CIProducts.Inventory);
invQueryBldr.addWhereAttrEqValue(CIProducts.Inventory.Product, _parameter.getInstance().getId());
final MultiPrintQuery multi = invQueryBldr.getPrint();
multi.addAttribute(CIProducts.Inventory.Quantity, CIProducts.Inventory.Reserved);
multi.execute();
BigDecimal oldQuantity = BigDecimal.ZERO;
while (multi.next()) {
final BigDecimal quantTmp = multi.<BigDecimal>getAttribute(CIProducts.Inventory.Quantity);
final BigDecimal resTmp = multi.<BigDecimal>getAttribute(CIProducts.Inventory.Reserved);
if (quantTmp != null) {
oldQuantity = oldQuantity.add(quantTmp);
}
if (resTmp != null) {
oldQuantity = oldQuantity.add(resTmp);
}
}
price = oldPrice.multiply(oldQuantity).add(cost.multiply(quantity)).setScale(8)
.divide(oldQuantity.add(quantity), BigDecimal.ROUND_HALF_UP)
.setScale(getFractionDigit(), BigDecimal.ROUND_HALF_UP);
} else {
price = cost;
}
}
final Map<?, ?> properties = (Map<?, ?>) _parameter.get(ParameterValues.PROPERTIES);
final List<Map<String, String>> list = new ArrayList<>();
final Map<String, String> map = new HashMap<>();
final String targetFieldName = (String) properties.get("fieldName");
if (targetFieldName != null) {
map.put(targetFieldName, price.toString());
}
final String diplayFieldName = (String) properties.get("displayFieldName");
if (diplayFieldName != null) {
final DecimalFormat formater = (DecimalFormat) NumberFormat.getInstance(Context.getThreadContext()
.getLocale());
formater.setMaximumFractionDigits(getFractionDigit());
map.put(diplayFieldName, formater.format(price));
}
if (!map.isEmpty()) {
list.add(map);
}
final Return retVal = new Return();
retVal.put(ReturnValues.VALUES, list);
return retVal;
}
/**
* Gets the fraction digit.
*
* @return the fraction digit
*/
protected Integer getFractionDigit()
{
return 2;
}
/**
* @param _value value to be parsed to an BigDecimal
* @return BigDecimal
* @throws EFapsException on parse exception
*/
public BigDecimal parse(final String _value)
throws EFapsException
{
final DecimalFormat formater = (DecimalFormat) NumberFormat.getInstance(Context.getThreadContext().getLocale());
formater.setParseBigDecimal(true);
BigDecimal ret;
try {
if (_value != null && _value.length() > 0) {
ret = (BigDecimal) formater.parse(_value);
} else {
ret = BigDecimal.ZERO;
}
} catch (final ParseException e) {
ret = BigDecimal.ZERO;
}
return ret;
}
/**
* Gets the cost.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _prodInst the prod inst
* @return the cost
* @throws EFapsException on error
*/
public CostBean getCost(final Parameter _parameter,
final Instance _prodInst)
throws EFapsException
{
return getCost(_parameter, new DateTime(), _prodInst);
}
/**
* Gets the cost.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _prodInst the prod inst
* @return the cost
* @throws EFapsException on error
*/
public CostBean getCost(final Parameter _parameter,
final DateTime _date,
final Instance _prodInst)
throws EFapsException
{
return getCosts(_parameter, _date, _prodInst).get(_prodInst);
}
/**
* Gets the cost.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _alterCurrencyInstance the alter currency instance
* @param _prodInst the prod inst
* @return the cost
* @throws EFapsException on error
*/
public CostBean getAlternativeCost(final Parameter _parameter,
final DateTime _date,
final Instance _alterCurrencyInstance,
final Instance _prodInst)
throws EFapsException
{
return getAlternativeCosts(_parameter, _date, _alterCurrencyInstance, _prodInst).get(_prodInst);
}
/**
* Gets the costs.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _prodInst the prod inst
* @return the costs
* @throws EFapsException on error
*/
public Map<Instance, CostBean> getCosts(final Parameter _parameter,
final Instance... _prodInst)
throws EFapsException
{
return getCosts(_parameter, new DateTime(), _prodInst);
}
/**
* Gets the costs.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _alterCurrencyInstance the alter currency instance
* @param _prodInsts the prod insts
* @return the costs
* @throws EFapsException on error
*/
public Map<Instance, CostBean> getAlternativeCosts(final Parameter _parameter,
final DateTime _date,
final Instance _alterCurrencyInstance,
final Instance... _prodInsts)
throws EFapsException
{
return getCostsInternal(_parameter, _date, null, _alterCurrencyInstance, _prodInsts);
}
/**
* Gets the costs.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _prodInsts the prod insts
* @return the costs
* @throws EFapsException on error
*/
public Map<Instance, CostBean> getCosts(final Parameter _parameter,
final DateTime _date,
final Instance... _prodInsts)
throws EFapsException
{
return getCostsInternal(_parameter, _date, null, null, _prodInsts);
}
/**
* Gets the costs.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _costDate the cost date
* @param _requestDate the request date
* @param _alterCurrencyInstance the alter currency instance
* @param _prodInsts the prod insts
* @return the costs
* @throws EFapsException on error
*/
protected Map<Instance, CostBean> getCostsInternal(final Parameter _parameter,
final DateTime _costDate,
final DateTime _requestDate,
final Instance _alterCurrencyInstance,
final Instance... _prodInsts)
throws EFapsException
{
final Map<Instance, CostBean> ret = new HashMap<>();
if (_prodInsts != null && _prodInsts.length > 0) {
final QueryBuilder queryBldr;
if (_alterCurrencyInstance != null && _alterCurrencyInstance.isValid()) {
queryBldr = new QueryBuilder(CIProducts.ProductCostAlternative);
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAlternative.CurrencyLink, _alterCurrencyInstance);
} else {
queryBldr = new QueryBuilder(CIProducts.ProductCost);
}
queryBldr.addWhereAttrLessValue(CIProducts.ProductCostAbstract.ValidFrom, _costDate.withTimeAtStartOfDay()
.plusMinutes(1));
queryBldr.addWhereAttrGreaterValue(CIProducts.ProductCostAbstract.ValidUntil, _costDate
.withTimeAtStartOfDay().minusMinutes(1));
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.ProductLink, (Object[]) _prodInsts);
if (_requestDate == null) {
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Active));
} else {
queryBldr.addWhereAttrEqValue(CIProducts.ProductCostAbstract.StatusAbstract,
Status.find(CIProducts.ProductCostStatus.Inactive));
queryBldr.addWhereAttrLessValue(CIProducts.ProductCostAbstract.Created, _requestDate
.withTimeAtStartOfDay().plusMinutes(1));
}
final MultiPrintQuery multi = queryBldr.getCachedPrint(Cost.CACHKEY)
.setLifespan(30).setLifespanUnit(TimeUnit.MINUTES);
final SelectBuilder selCurInst = SelectBuilder.get().linkto(CIProducts.ProductCostAbstract.CurrencyLink)
.instance();
final SelectBuilder selProdInst = SelectBuilder.get().linkto(CIProducts.ProductCostAbstract.ProductLink)
.instance();
multi.addSelect(selCurInst, selProdInst);
multi.addAttribute(CIProducts.ProductCostAbstract.Price, CIProducts.ProductCostAbstract.ValidFrom,
CIProducts.ProductCostAbstract.ValidUntil, CIProducts.ProductCostAbstract.Created);
multi.execute();
while (multi.next()) {
final Instance prodInst = multi.getSelect(selProdInst);
final CostBean bean = new CostBean()
.setCostInstance(multi.getCurrentInstance())
.setDate(_costDate)
.setProductInstance(prodInst)
.setCurrencyInstance(multi.<Instance>getSelect(selCurInst))
.setCost(multi.<BigDecimal>getAttribute(CIProducts.ProductCost.Price))
.setValidFrom(multi.<DateTime>getAttribute(CIProducts.ProductCost.ValidFrom))
.setValidUntil(multi.<DateTime>getAttribute(CIProducts.ProductCost.ValidUntil))
.setCreated(multi.<DateTime>getAttribute(CIProducts.ProductCost.Created));
// if an historic value is searched evaluate which will stay
if (_requestDate != null && ret.containsKey(prodInst)) {
final CostBean current = ret.get(prodInst);
if (current.getCreated().isBefore(bean.getCreated())
|| (!current.getCreated().isAfter(bean.getCreated())
&& current.getCostInstance().getId() < bean.getCostInstance().getId())) {
ret.put(prodInst, bean);
}
} else {
ret.put(prodInst, bean);
}
}
}
return ret;
}
/**
* Gets the bean.
*
* @param _parameter Parameter as passed by the eFaps API
* @return the bean
*/
protected CostBean getBean(final Parameter _parameter)
{
return new CostBean();
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getCost4Currency(final Parameter _parameter,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
return Cost.getCost4Currency(_parameter, new DateTime(), _productInstance, _currencyInstance);
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _alterCurrencyInstance the alter currency instance
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getAlternativeCost4Currency(final Parameter _parameter,
final Instance _alterCurrencyInstance,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
return Cost.getAlternativeCost4Currency(_parameter, new DateTime(), _alterCurrencyInstance,
_productInstance, _currencyInstance);
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getCost4Currency(final Parameter _parameter,
final DateTime _date,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
BigDecimal ret = BigDecimal.ZERO;
final CostBean costBean = new Cost().getCost(_parameter, _date, _productInstance);
if (costBean != null) {
ret = costBean.getCost4Currency(_parameter, _currencyInstance);
}
return ret;
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _alterCurrencyInstance the alter currency instance
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getAlternativeCost4Currency(final Parameter _parameter,
final DateTime _date,
final Instance _alterCurrencyInstance,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
BigDecimal ret = BigDecimal.ZERO;
final CostBean costBean = new Cost().getAlternativeCost(_parameter, _date, _alterCurrencyInstance,
_productInstance);
if (costBean != null) {
ret = costBean.getCost4Currency(_parameter, _currencyInstance);
}
return ret;
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _requestDate the request date
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getHistoricCost4Currency(final Parameter _parameter,
final DateTime _date,
final DateTime _requestDate,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
BigDecimal ret = BigDecimal.ZERO;
final CostBean costBean = new Cost().getCostsInternal(_parameter, _date, _requestDate, null, _productInstance)
.get(_productInstance);
if (costBean != null) {
ret = costBean.getCost4Currency(_parameter, _currencyInstance);
}
return ret;
}
/**
* Gets the cost4 currency.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _date the date
* @param _requestDate the request date
* @param _alterCurrencyInstance the alter currency instance
* @param _productInstance the product instance
* @param _currencyInstance the currency instance
* @return the cost4 currency
* @throws EFapsException on error
*/
protected static BigDecimal getHistoricAlternativeCost4Currency(final Parameter _parameter,
final DateTime _date,
final DateTime _requestDate,
final Instance _alterCurrencyInstance,
final Instance _productInstance,
final Instance _currencyInstance)
throws EFapsException
{
BigDecimal ret = BigDecimal.ZERO;
final CostBean costBean = new Cost().getCostsInternal(_parameter, _date, _requestDate, _alterCurrencyInstance,
_productInstance).get(_productInstance);
if (costBean != null) {
ret = costBean.getCost4Currency(_parameter, _currencyInstance);
}
return ret;
}
/**
* The Class CostBean.
*/
public static class CostBean
{
/** The cost instance. */
private Instance costInstance;
/** The date. */
private DateTime validFrom;
/** The date. */
private DateTime validUntil;
/** The date. */
private DateTime created;
/** The date. */
private DateTime date;
/** The currency instance. */
private Instance currencyInstance;
/** The product instance. */
private Instance productInstance;
/** The cost. */
private BigDecimal cost;
/**
* Getter method for the instance variable {@link #currencyInstance}.
*
* @return value of instance variable {@link #currencyInstance}
*/
public Instance getCurrencyInstance()
{
return this.currencyInstance;
}
/**
* Setter method for instance variable {@link #currencyInstance}.
*
* @param _currencyInstance value for instance variable
* {@link #currencyInstance}
* @return the cost bean
*/
public CostBean setCurrencyInstance(final Instance _currencyInstance)
{
this.currencyInstance = _currencyInstance;
return this;
}
/**
* Getter method for the instance variable {@link #productInstance}.
*
* @return value of instance variable {@link #productInstance}
*/
public Instance getProductInstance()
{
return this.productInstance;
}
/**
* Setter method for instance variable {@link #productInstance}.
*
* @param _productInstance value for instance variable
* {@link #productInstance}
* @return the cost bean
*/
public CostBean setProductInstance(final Instance _productInstance)
{
this.productInstance = _productInstance;
return this;
}
/**
* Getter method for the instance variable {@link #cost}.
*
* @return value of instance variable {@link #cost}
*/
public BigDecimal getCost()
{
return this.cost;
}
/**
* Gets the cost4 currency.
*
* @param _parameter the _parameter
* @param _currencyInst the _currency inst
* @return the cost4 currency
* @throws EFapsException the e faps exception
*/
public BigDecimal getCost4Currency(final Parameter _parameter,
final Instance _currencyInst)
throws EFapsException
{
BigDecimal ret = BigDecimal.ZERO;
if (getCurrencyInstance().equals(_currencyInst)) {
ret = getCost();
} else if (getCost().compareTo(BigDecimal.ZERO) != 0) {
final RateInfo[] rateInfos = new Currency().evaluateRateInfos(_parameter, getDate(),
getCurrencyInstance(), _currencyInst);
final RateInfo rateInfo = rateInfos[2];
ret = getCost().setScale(8, BigDecimal.ROUND_HALF_UP)
.divide(rateInfo.getRate(), BigDecimal.ROUND_HALF_UP);
}
return ret;
}
/**
* Setter method for instance variable {@link #cost}.
*
* @param _cost value for instance variable {@link #cost}
* @return the cost bean
*/
public CostBean setCost(final BigDecimal _cost)
{
this.cost = _cost;
return this;
}
/**
* Getter method for the instance variable {@link #date}.
*
* @return value of instance variable {@link #date}
*/
public DateTime getDate()
{
return this.date;
}
/**
* Setter method for instance variable {@link #date}.
*
* @param _date value for instance variable {@link #date}
* @return the cost bean
*/
public CostBean setDate(final DateTime _date)
{
this.date = _date;
return this;
}
/**
* Getter method for the instance variable {@link #validFrom}.
*
* @return value of instance variable {@link #validFrom}
*/
public DateTime getValidFrom()
{
return this.validFrom;
}
/**
* Setter method for instance variable {@link #validFrom}.
*
* @param _validFrom value for instance variable {@link #validFrom}
* @return the cost bean
*/
public CostBean setValidFrom(final DateTime _validFrom)
{
this.validFrom = _validFrom;
return this;
}
/**
* Getter method for the instance variable {@link #validUntil}.
*
* @return value of instance variable {@link #validUntil}
*/
public DateTime getValidUntil()
{
return this.validUntil;
}
/**
* Setter method for instance variable {@link #validUntil}.
*
* @param _validUntil value for instance variable {@link #validUntil}
* @return the cost bean
*/
public CostBean setValidUntil(final DateTime _validUntil)
{
this.validUntil = _validUntil;
return this;
}
/**
* Getter method for the instance variable {@link #created}.
*
* @return value of instance variable {@link #created}
*/
public DateTime getCreated()
{
return this.created;
}
/**
* Setter method for instance variable {@link #created}.
*
* @param _created value for instance variable {@link #created}
* @return the cost bean
*/
public CostBean setCreated(final DateTime _created)
{
this.created = _created;
return this;
}
/**
* Gets the cost instance.
*
* @return the cost instance
*/
public Instance getCostInstance()
{
return this.costInstance;
}
/**
* Sets the cost instance.
*
* @param _costInstance the cost instance
* @return the cost bean
*/
public CostBean setCostInstance(final Instance _costInstance)
{
this.costInstance = _costInstance;
return this;
}
}
}
| |
/*
* Conditions Of Use
*
* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), an agency of the Federal Government.
* Pursuant to title 15 Untied States Code Section 105, works of NIST
* employees are not subject to copyright protection in the United States
* and are considered to be in the public domain. As a result, a formal
* license is not needed to use the software.
*
* This software is provided by NIST as a service and is expressly
* provided "AS IS." NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof, including but
* not limited to the correctness, accuracy, reliability or usefulness of
* the software.
*
* Permission to use this software is contingent upon your acceptance
* of the terms of this agreement
*
* .
*
*/
/*******************************************************************************
* Product of NIST/ITL Advanced Networking Technologies Division (ANTD). *
*******************************************************************************/
package gov.nist.javax.sip.header;
import gov.nist.core.*;
/**
* Challenge part of the Auth header. This is only used by the parser interface
*
* @author M. Ranganathan <br/>
* @version 1.2 $Revision: 1.8 $ $Date: 2010-05-06 14:07:54 $
* @since 1.1
*
*/
public class Challenge extends SIPObject {
/**
* Comment for <code>serialVersionUID</code>
*/
private static final long serialVersionUID = 5944455875924336L;
private static String DOMAIN = ParameterNames.DOMAIN;
private static String REALM = ParameterNames.REALM;
private static String OPAQUE = ParameterNames.OPAQUE;
private static String ALGORITHM = ParameterNames.ALGORITHM;
private static String QOP = ParameterNames.QOP;
private static String STALE = ParameterNames.STALE;
private static String SIGNATURE = ParameterNames.SIGNATURE;
private static String RESPONSE = ParameterNames.RESPONSE;
private static String SIGNED_BY = ParameterNames.SIGNED_BY;
private static String URI = ParameterNames.URI;
/**
* scheme field
*/
protected String scheme;
/**
* authParms list
*/
protected NameValueList authParams;
/**
* Default constructor
*/
public Challenge() {
authParams = new NameValueList();
authParams.setSeparator(COMMA);
}
/**
* Encode the challenge in canonical form.
* @return String
*/
public String encode() {
return new StringBuilder(scheme)
.append(SP)
.append(authParams.encode())
.toString();
}
/**
* get the scheme field
* @return String
*/
public String getScheme() {
return scheme;
}
/**
* get AuthParms list.
* @return NameValueList
*/
public NameValueList getAuthParams() {
return authParams;
}
/**
* get the domain
* @return String
*/
public String getDomain() {
return (String) authParams.getValue(DOMAIN);
}
/**
* get the URI field
* @return String
*/
public String getURI() {
return (String) authParams.getValue(URI);
}
/**
* get the Opaque field
* @return String
*/
public String getOpaque() {
return (String) authParams.getValue(OPAQUE);
}
/**
* get QOP value
* @return String
*/
public String getQOP() {
return (String) authParams.getValue(QOP);
}
/**
* get the Algorithm value.
* @return String
*/
public String getAlgorithm() {
return (String) authParams.getValue(ALGORITHM);
}
/**
* get the State value.
* @return String
*/
public String getStale() {
return (String) authParams.getValue(STALE);
}
/**
* get the Signature value.
* @return String
*/
public String getSignature() {
return (String) authParams.getValue(SIGNATURE);
}
/**
* get the signedBy value.
* @return String
*/
public String getSignedBy() {
return (String) authParams.getValue(SIGNED_BY);
}
/**
* get the Response value.
* @return String
*/
public String getResponse() {
return (String) authParams.getValue(RESPONSE);
}
/**
* get the realm value.
* @return String.
*/
public String getRealm() {
return (String) authParams.getValue(REALM);
}
/**
* get the specified parameter
* @param name String to set
* @return String to set
*/
public String getParameter(String name) {
return (String) authParams.getValue(name);
}
/**
* boolean function
* @param name String to set
* @return true if this header has the specified parameter, false otherwise.
*/
public boolean hasParameter(String name) {
return authParams.getNameValue(name) != null;
}
/**
* Boolean function
* @return true if this header has some parameters.
*/
public boolean hasParameters() {
return authParams.size() != 0;
}
/**
* delete the specified parameter
* @param name String
* @return true if the specified parameter has been removed, false
* otherwise.
*/
public boolean removeParameter(String name) {
return authParams.delete(name);
}
/**
* remove all parameters
*/
public void removeParameters() {
authParams = new NameValueList();
}
/**
* set the specified parameter
* @param nv NameValue to set
*/
public void setParameter(NameValue nv) {
authParams.set(nv);
}
/**
* Set the scheme member
* @param s String to set
*/
public void setScheme(String s) {
scheme = s;
}
/**
* Set the authParams member
* @param a NameValueList to set
*/
public void setAuthParams(NameValueList a) {
authParams = a;
}
public Object clone() {
Challenge retval = (Challenge) super.clone();
if (this.authParams != null)
retval.authParams = (NameValueList) this.authParams.clone();
return retval;
}
}
| |
package org.quinto.math;
import java.math.BigInteger;
import java.util.Iterator;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.quinto.math.PrimeUtils.isPrime;
import static org.quinto.math.PrimesIterable.*;
import static org.quinto.math.TestUtils.bi;
public class PrimesIterableTest
{
private static final int FIRST_PRIMES[] = new int[]{ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71 };
public PrimesIterableTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test( timeout = 5000L )
public void getBigIntegerTotallyCasual()
{
int i = 0;
for ( BigInteger bi : getBigIntegerTotally( bi( 5 ) ) )
{
assertEquals( FIRST_PRIMES[ i ], bi.intValue() );
i++;
}
Iterator< BigInteger > it = getBigIntegerTotally( bi( 1 ) ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, it.next().intValue() );
assertFalse( it.hasNext() );
it = getBigIntegerTotally( bi( 0 ) ).iterator();
assertFalse( it.hasNext() );
it = getBigIntegerTotally( bi( -5 ) ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( BigInteger bi : getBigIntegerTotally( bi( "10000000000000000000000000000000000000000000000" ) ) )
{
assertEquals( FIRST_PRIMES[ i ], bi.intValue() );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L, expected = IllegalArgumentException.class )
public void getBigIntegerTotallyError()
{
getBigIntegerTotally( null );
}
@Test( timeout = 5000L )
public void getLongTotallyCasual()
{
int i = 0;
for ( long l : getLongTotally( 5L ) )
{
assertEquals( FIRST_PRIMES[ i ], ( int )l );
i++;
}
Iterator< Long > it = getLongTotally( 2L ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, it.next().intValue() );
assertTrue( it.hasNext() );
assertEquals( 3, it.next().intValue() );
assertFalse( it.hasNext() );
it = getLongTotally( 0L ).iterator();
assertFalse( it.hasNext() );
it = getLongTotally( -5L ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( long l : getLongTotally( Long.MAX_VALUE ) )
{
assertEquals( FIRST_PRIMES[ i ], ( int )l );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L )
public void getIntTotallyCasual()
{
int i = 0;
for ( int l : getIntegerTotally( 5 ) )
{
assertEquals( FIRST_PRIMES[ i ], l );
i++;
}
Iterator< Integer > it = getIntegerTotally( 2 ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, ( int )it.next() );
assertTrue( it.hasNext() );
assertEquals( 3, ( int )it.next() );
assertFalse( it.hasNext() );
it = getIntegerTotally( 0 ).iterator();
assertFalse( it.hasNext() );
it = getIntegerTotally( -5 ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( int l : getIntegerTotally( Integer.MAX_VALUE ) )
{
assertEquals( FIRST_PRIMES[ i ], l );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L )
public void getBigIntegerMaxCasual()
{
int i = 0;
for ( BigInteger bi : getBigIntegerMax( bi( 5 ) ) )
{
assertEquals( FIRST_PRIMES[ i ], bi.intValue() );
i++;
}
Iterator< BigInteger > it = getBigIntegerMax( bi( 3 ) ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, it.next().intValue() );
assertTrue( it.hasNext() );
assertEquals( 3, it.next().intValue() );
assertFalse( it.hasNext() );
it = getBigIntegerMax( bi( 0 ) ).iterator();
assertFalse( it.hasNext() );
it = getBigIntegerMax( bi( 1 ) ).iterator();
assertFalse( it.hasNext() );
it = getBigIntegerMax( bi( 2 ) ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, it.next().intValue() );
assertFalse( it.hasNext() );
it = getBigIntegerMax( bi( -5 ) ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( BigInteger bi : getBigIntegerMax( bi( "10000000000000000000000000000000000000000000000" ) ) )
{
assertEquals( FIRST_PRIMES[ i ], bi.intValue() );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L, expected = IllegalArgumentException.class )
public void getBigIntegerMaxError()
{
getBigIntegerMax( null );
}
@Test( timeout = 5000L )
public void getLongMaxCasual()
{
int i = 0;
for ( long l : getLongMax( 5L ) )
{
assertEquals( FIRST_PRIMES[ i ], ( int )l );
i++;
}
Iterator< Long > it = getLongMax( 2L ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, it.next().intValue() );
assertFalse( it.hasNext() );
it = getLongMax( 0L ).iterator();
assertFalse( it.hasNext() );
it = getLongMax( 1L ).iterator();
assertFalse( it.hasNext() );
it = getLongMax( -5L ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( long l : getLongMax( Long.MAX_VALUE ) )
{
assertEquals( FIRST_PRIMES[ i ], ( int )l );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L )
public void getIntMaxCasual()
{
int i = 0;
for ( int l : getIntegerMax( 5 ) )
{
assertEquals( FIRST_PRIMES[ i ], l );
i++;
}
Iterator< Integer > it = getIntegerMax( 2 ).iterator();
assertTrue( it.hasNext() );
assertEquals( 2, ( int )it.next() );
assertFalse( it.hasNext() );
it = getIntegerMax( 0 ).iterator();
assertFalse( it.hasNext() );
it = getIntegerMax( 1 ).iterator();
assertFalse( it.hasNext() );
it = getIntegerMax( -5 ).iterator();
assertFalse( it.hasNext() );
i = 0;
for ( int l : getIntegerMax( Integer.MAX_VALUE ) )
{
assertEquals( FIRST_PRIMES[ i ], l );
i++;
if ( i == FIRST_PRIMES.length ) break;
}
}
@Test( timeout = 5000L )
public void getNextBigIntegerCasual()
{
assertEquals( bi( 2 ), getNext( bi( 0 ) ) );
assertEquals( bi( 2 ), getNext( bi( 1 ) ) );
assertEquals( bi( 2 ), getNext( bi( 2 ) ) );
assertEquals( bi( 3 ), getNext( bi( 3 ) ) );
assertEquals( bi( 5 ), getNext( bi( 4 ) ) );
assertEquals( bi( 5 ), getNext( bi( 5 ) ) );
assertEquals( bi( 7 ), getNext( bi( 6 ) ) );
assertEquals( bi( 7 ), getNext( bi( 7 ) ) );
assertEquals( bi( 11 ), getNext( bi( 8 ) ) );
assertEquals( bi( 11 ), getNext( bi( 9 ) ) );
assertEquals( bi( 11 ), getNext( bi( 10 ) ) );
assertEquals( bi( -2 ), getNext( bi( -1 ) ) );
assertEquals( bi( -2 ), getNext( bi( -2 ) ) );
assertEquals( bi( -3 ), getNext( bi( -3 ) ) );
assertEquals( bi( -5 ), getNext( bi( -4 ) ) );
assertEquals( bi( 9223372036854775783L ), getNext( bi( 9223372036854775782L ) ) );
assertEquals( bi( 9223372036854775783L ), getNext( bi( 9223372036854775783L ) ) );
assertEquals( bi( "9223372036854775837" ), getNext( bi( 9223372036854775784L ) ) );
assertEquals( bi( "9223372036854775907" ), getNext( bi( "9223372036854775838" ) ) );
assertEquals( null, getNext( null ) );
}
@Test( timeout = 5000L )
public void getNextBigIntegerRange()
{
BigInteger prev = null;
for ( int i = 0; i < 1000; i++ )
{
BigInteger bi = bi( i );
BigInteger current = getNext( bi );
assertTrue( isPrime( current ) );
assertTrue( current.compareTo( bi ) >= 0 );
assertEquals( isPrime( bi ), bi.equals( current ) );
if ( i > 0 ) assertEquals( current.negate(), getNext( bi.negate() ) );
if ( current.intValue() == i && prev != null && prev.intValue() != i - 1 ) assertEquals( prev, current );
prev = current;
}
}
@Test( timeout = 10000L )
public void getNextBigIntegerSpecial()
{
for ( BigInteger bi : TestUtils.getBigIntegers() )
{
BigInteger current = getNext( bi );
assertTrue( isPrime( current ) );
if ( bi.signum() != 0 ) assertEquals( current.negate(), getNext( bi.negate() ) );
boolean isPrime = isPrime( bi );
assertEquals( isPrime, bi.equals( current ) );
if ( isPrime )
{
if ( bi.signum() > 0 ) assertTrue( getNext( bi.add( BigInteger.ONE ) ).compareTo( bi ) > 0 );
else assertTrue( getNext( bi.subtract( BigInteger.ONE ) ).compareTo( bi ) < 0 );
}
}
}
@Test( timeout = 5000L )
public void getNextLongCasual()
{
assertEquals( 2L, getNext( 0L ) );
assertEquals( 2L, getNext( 1L ) );
assertEquals( 2L, getNext( 2L ) );
assertEquals( 3L, getNext( 3L ) );
assertEquals( 5L, getNext( 4L ) );
assertEquals( 5L, getNext( 5L ) );
assertEquals( 7L, getNext( 6L ) );
assertEquals( 7L, getNext( 7L ) );
assertEquals( 11L, getNext( 8L ) );
assertEquals( 11L, getNext( 9L ) );
assertEquals( 11L, getNext( 10L ) );
assertEquals( -2L, getNext( -1L ) );
assertEquals( -2L, getNext( -2L ) );
assertEquals( -3L, getNext( -3L ) );
assertEquals( -5L, getNext( -4L ) );
assertEquals( 9223372036854775783L, getNext( 9223372036854775782L ) );
assertEquals( 9223372036854775783L, getNext( 9223372036854775783L ) );
assertEquals( -9223372036854775783L, getNext( 9223372036854775784L ) );
assertEquals( -9223372036854775783L, getNext( Long.MAX_VALUE ) );
assertEquals( -9223372036854775783L, getNext( -9223372036854775782L ) );
assertEquals( -9223372036854775783L, getNext( -9223372036854775783L ) );
assertEquals( 9223372036854775783L, getNext( -9223372036854775784L ) );
assertEquals( 9223372036854775783L, getNext( -Long.MAX_VALUE ) );
assertEquals( 9223372036854775783L, getNext( Long.MIN_VALUE ) );
}
@Test( timeout = 5000L )
public void getNextLongRange()
{
long prev = -1L;
for ( long l = 0L; l < 1000L; l++ )
{
long current = getNext( l );
assertTrue( isPrime( current ) );
assertTrue( current >= l );
assertEquals( isPrime( l ), l == current );
if ( l > 0L ) assertEquals( -current, getNext( -l ) );
if ( current == l && prev >= 0L && prev != l - 1L ) assertEquals( prev, current );
prev = current;
}
}
@Test( timeout = 10000L )
public void getNextLongSpecial()
{
for ( long l : TestUtils.getLongs() )
{
long current = getNext( l );
assertTrue( isPrime( current ) );
if ( l != 0L && l != Long.MIN_VALUE ) assertEquals( "For " + l, -current, getNext( -l ) );
boolean isPrime = isPrime( l );
assertEquals( isPrime, l == current );
if ( isPrime )
{
if ( l > 0L )
{
if ( l < PrimesIterable.LAST_LONG_PRIME ) assertTrue( "For " + l, getNext( l + 1L ) > l );
}
else
{
if ( l > -PrimesIterable.LAST_LONG_PRIME ) assertTrue( "For " + l, getNext( l - 1L ) < l );
}
}
}
}
@Test( timeout = 5000L )
public void getNextIntCasual()
{
assertEquals( 2, getNext( 0 ) );
assertEquals( 2, getNext( 1 ) );
assertEquals( 2, getNext( 2 ) );
assertEquals( 3, getNext( 3 ) );
assertEquals( 5, getNext( 4 ) );
assertEquals( 5, getNext( 5 ) );
assertEquals( 7, getNext( 6 ) );
assertEquals( 7, getNext( 7 ) );
assertEquals( 11, getNext( 8 ) );
assertEquals( 11, getNext( 9 ) );
assertEquals( 11, getNext( 10 ) );
assertEquals( -2, getNext( -1 ) );
assertEquals( -2, getNext( -2 ) );
assertEquals( -3, getNext( -3 ) );
assertEquals( -5, getNext( -4 ) );
assertEquals( Integer.MAX_VALUE, getNext( Integer.MAX_VALUE - 1 ) );
assertEquals( Integer.MAX_VALUE, getNext( Integer.MAX_VALUE ) );
assertEquals( -Integer.MAX_VALUE, getNext( -Integer.MAX_VALUE ) );
assertEquals( -Integer.MAX_VALUE, getNext( -Integer.MAX_VALUE + 1 ) );
assertEquals( Integer.MAX_VALUE, getNext( Integer.MIN_VALUE ) );
}
@Test( timeout = 5000L )
public void getNextIntRange()
{
int prev = -1;
for ( int l = 0; l < 1000; l++ )
{
int current = getNext( l );
assertTrue( isPrime( current ) );
assertTrue( current >= l );
assertEquals( isPrime( l ), l == current );
if ( l > 0 ) assertEquals( -current, getNext( -l ) );
if ( current == l && prev >= 0 && prev != l - 1 ) assertEquals( prev, current );
prev = current;
}
}
@Test( timeout = 10000L )
public void getNextIntSpecial()
{
for ( int l : TestUtils.getInts() )
{
int current = getNext( l );
assertTrue( isPrime( current ) );
if ( l != 0 && l != Integer.MIN_VALUE ) assertEquals( "For " + l, -current, getNext( -l ) );
boolean isPrime = isPrime( l );
assertEquals( isPrime, l == current );
if ( isPrime )
{
if ( l > 0 )
{
if ( l < Integer.MAX_VALUE ) assertTrue( "For " + l, getNext( l + 1 ) > l );
}
else
{
if ( l > -Integer.MAX_VALUE ) assertTrue( "For " + l, getNext( l - 1 ) < l );
}
}
}
}
}
| |
/*
* Copyright 2015, Randy Saborio & Tinbytes, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.tinbytes.samples.showhidetoolbar.util;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.annotation.TargetApi;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.animation.LinearInterpolator;
/**
* Helper class for RecyclerView/Toolbar scroll listener.
*/
public final class RecyclerViewUtils {
/**
* This class simplifies the hide/show Toolbar animation depicted in MainActivity.java.
* Check MainActivity2.java to see how to use it.
*/
public static class ShowHideToolbarOnScrollingListener extends RecyclerView.OnScrollListener {
public static final String SHOW_HIDE_TOOLBAR_LISTENER_STATE = "show-hide-toolbar-listener-state";
// The elevation of the toolbar when content is scrolled behind
private static final float TOOLBAR_ELEVATION = 14f;
private Toolbar toolbar;
private State state;
public ShowHideToolbarOnScrollingListener(Toolbar toolbar) {
this.toolbar = toolbar;
this.state = new State();
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void toolbarSetElevation(float elevation) {
if (AndroidUtils.isLollipop()) {
toolbar.setElevation(elevation == 0 ? 0 : TOOLBAR_ELEVATION);
}
}
private void toolbarAnimateShow(final int verticalOffset) {
toolbar.animate()
.translationY(0)
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
toolbarSetElevation(verticalOffset == 0 ? 0 : TOOLBAR_ELEVATION);
}
});
}
private void toolbarAnimateHide() {
toolbar.animate()
.translationY(-toolbar.getHeight())
.setInterpolator(new LinearInterpolator())
.setDuration(180)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
toolbarSetElevation(0);
}
});
}
@Override
public final void onScrollStateChanged(RecyclerView recyclerView, int newState) {
if (newState == RecyclerView.SCROLL_STATE_IDLE) {
if (state.scrollingOffset > 0) {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(state.verticalOffset);
}
} else if (state.scrollingOffset < 0) {
if (toolbar.getTranslationY() < toolbar.getHeight() * -0.6 && state.verticalOffset > toolbar.getHeight()) {
toolbarAnimateHide();
} else {
toolbarAnimateShow(state.verticalOffset);
}
}
}
}
@Override
public final void onScrolled(RecyclerView recyclerView, int dx, int dy) {
state.verticalOffset = recyclerView.computeVerticalScrollOffset();
state.scrollingOffset = dy;
int toolbarYOffset = (int) (dy - toolbar.getTranslationY());
toolbar.animate().cancel();
if (state.scrollingOffset > 0) {
if (toolbarYOffset < toolbar.getHeight()) {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
toolbar.setTranslationY(state.translationY = -toolbarYOffset);
} else {
toolbarSetElevation(0);
toolbar.setTranslationY(state.translationY = -toolbar.getHeight());
}
} else if (state.scrollingOffset < 0) {
if (toolbarYOffset < 0) {
if (state.verticalOffset <= 0) {
toolbarSetElevation(0);
}
toolbar.setTranslationY(state.translationY = 0);
} else {
if (state.verticalOffset > toolbar.getHeight()) {
toolbarSetElevation(TOOLBAR_ELEVATION);
}
toolbar.setTranslationY(state.translationY = -toolbarYOffset);
}
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void onRestoreInstanceState(State state) {
this.state.verticalOffset = state.verticalOffset;
this.state.scrollingOffset = state.scrollingOffset;
if (AndroidUtils.isLollipop()) {
toolbar.setElevation(state.elevation);
toolbar.setTranslationY(state.translationY);
}
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public State onSaveInstanceState() {
state.translationY = toolbar.getTranslationY();
if (AndroidUtils.isLollipop()) {
state.elevation = toolbar.getElevation();
}
return state;
}
/**
* Parcelable RecyclerView/Toolbar state for simpler saving/restoring its current state.
*/
public static final class State implements Parcelable {
public static Creator<State> CREATOR = new Creator<State>() {
public State createFromParcel(Parcel parcel) {
return new State(parcel);
}
public State[] newArray(int size) {
return new State[size];
}
};
// Keeps track of the overall vertical offset in the list
private int verticalOffset;
// Determines the scroll UP/DOWN offset
private int scrollingOffset;
// Toolbar values
private float translationY;
private float elevation;
State() {
}
State(Parcel parcel) {
this.verticalOffset = parcel.readInt();
this.scrollingOffset = parcel.readInt();
this.translationY = parcel.readFloat();
this.elevation = parcel.readFloat();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int flags) {
parcel.writeInt(verticalOffset);
parcel.writeInt(scrollingOffset);
parcel.writeFloat(translationY);
parcel.writeFloat(elevation);
}
}
}
private RecyclerViewUtils() {
}
}
| |
/*
* Copyright 2016 John Grosh <john.a.grosh@gmail.com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jagrosh.jmusicbot;
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer;
import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager;
import com.sedmelluq.discord.lavaplayer.player.DefaultAudioPlayerManager;
import com.sedmelluq.discord.lavaplayer.source.AudioSourceManagers;
import com.sedmelluq.discord.lavaplayer.track.AudioTrack;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import com.jagrosh.jdautilities.commandclient.Command.Category;
import com.jagrosh.jdautilities.commandclient.CommandEvent;
import com.jagrosh.jdautilities.waiter.EventWaiter;
import com.jagrosh.jmusicbot.audio.AudioHandler;
import com.jagrosh.jmusicbot.gui.GUI;
import com.jagrosh.jmusicbot.utils.FormatUtil;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.Permission;
import net.dv8tion.jda.core.entities.Game;
import net.dv8tion.jda.core.entities.Guild;
import net.dv8tion.jda.core.entities.Role;
import net.dv8tion.jda.core.entities.TextChannel;
import net.dv8tion.jda.core.entities.VoiceChannel;
import net.dv8tion.jda.core.events.ReadyEvent;
import net.dv8tion.jda.core.events.ShutdownEvent;
import net.dv8tion.jda.core.events.guild.GuildJoinEvent;
import net.dv8tion.jda.core.hooks.ListenerAdapter;
import net.dv8tion.jda.core.utils.SimpleLog;
import org.json.JSONException;
import org.json.JSONObject;
/**
*
* @author John Grosh <john.a.grosh@gmail.com>
*/
public class Bot extends ListenerAdapter {
private final HashMap<String,Settings> settings;
private final AudioPlayerManager manager;
private final EventWaiter waiter;
private final ScheduledExecutorService threadpool;
private final Config config;
private JDA jda;
private GUI gui;
//private GuildsPanel panel;
public final Category MUSIC = new Category("Music");
public final Category DJ = new Category("DJ", event ->
{
if(event.getAuthor().getId().equals(event.getClient().getOwnerId()))
return true;
if(event.getGuild()==null)
return true;
if(event.getMember().hasPermission(Permission.MANAGE_SERVER))
return true;
Role dj = event.getGuild().getRoleById(getSettings(event.getGuild()).getRoleId());
return event.getMember().getRoles().contains(dj);
});
public final Category ADMIN = new Category("Admin", event ->
{
if(event.getAuthor().getId().equals(event.getClient().getOwnerId()))
return true;
if(event.getGuild()==null)
return true;
return event.getMember().hasPermission(Permission.MANAGE_SERVER);
});
public final Category OWNER = new Category("Owner");
public Bot(EventWaiter waiter, Config config)
{
this.config = config;
this.waiter = waiter;
this.settings = new HashMap<>();
manager = new DefaultAudioPlayerManager();
threadpool = Executors.newSingleThreadScheduledExecutor();
AudioSourceManagers.registerRemoteSources(manager);
AudioSourceManagers.registerLocalSource(manager);
try {
JSONObject loadedSettings = new JSONObject(new String(Files.readAllBytes(Paths.get("serversettings.json"))));
loadedSettings.keySet().forEach((id) -> {
JSONObject o = loadedSettings.getJSONObject(id);
settings.put(id, new Settings(
o.has("text_channel_id") ? o.getString("text_channel_id") : null,
o.has("voice_channel_id")? o.getString("voice_channel_id"): null,
o.has("dj_role_id") ? o.getString("dj_role_id") : null,
o.has("volume") ? o.getInt("volume") : 100,
o.has("default_playlist")? o.getString("default_playlist"): null));
});
} catch(IOException | JSONException e) {
SimpleLog.getLog("Settings").warn("Failed to load server settings: "+e);
}
}
public EventWaiter getWaiter()
{
return waiter;
}
public AudioPlayerManager getAudioManager()
{
return manager;
}
public int queueTrack(CommandEvent event, AudioTrack track)
{
return setUpHandler(event).addTrack(track, event.getAuthor());
}
public AudioHandler setUpHandler(CommandEvent event)
{
return setUpHandler(event.getGuild());
}
public AudioHandler setUpHandler(Guild guild)
{
AudioHandler handler;
if(guild.getAudioManager().getSendingHandler()==null)
{
AudioPlayer player = manager.createPlayer();
if(settings.containsKey(guild.getId()))
player.setVolume(settings.get(guild.getId()).getVolume());
handler = new AudioHandler(player, guild, this);
player.addListener(handler);
guild.getAudioManager().setSendingHandler(handler);
threadpool.scheduleWithFixedDelay(() -> updateTopic(guild,handler), 0, 5, TimeUnit.SECONDS);
}
else
handler = (AudioHandler)guild.getAudioManager().getSendingHandler();
return handler;
}
public void resetGame()
{
if(config.getGame()==null || config.getGame().equalsIgnoreCase("none"))
jda.getPresence().setGame(null);
else
jda.getPresence().setGame(Game.of(config.getGame()));
}
private void updateTopic(Guild guild, AudioHandler handler)
{
TextChannel tchan = guild.getTextChannelById(getSettings(guild).getTextId());
if(tchan!=null && guild.getSelfMember().hasPermission(tchan, Permission.MANAGE_CHANNEL))
{
String otherText;
if(tchan.getTopic()==null || tchan.getTopic().isEmpty())
otherText = "\u200B";
else if(tchan.getTopic().contains("\u200B"))
otherText = tchan.getTopic().substring(tchan.getTopic().indexOf("\u200B"));
else
otherText = "\u200B\n "+tchan.getTopic();
String text = FormatUtil.formattedAudio(handler, guild.getJDA())+otherText;
if(!text.equals(tchan.getTopic()))
tchan.getManager().setTopic(text).queue();
}
}
public void shutdown(){
manager.shutdown();
threadpool.shutdownNow();
jda.getGuilds().stream().forEach(g -> {
g.getAudioManager().closeAudioConnection();
AudioHandler ah = (AudioHandler)g.getAudioManager().getSendingHandler();
if(ah!=null)
{
ah.getQueue().clear();
ah.getPlayer().destroy();
updateTopic(g, ah);
}
});
jda.shutdown();
}
public void setGUI(GUI gui)
{
this.gui = gui;
}
@Override
public void onShutdown(ShutdownEvent event) {
if(gui!=null)
gui.dispose();
}
@Override
public void onReady(ReadyEvent event) {
this.jda = event.getJDA();
if(jda.getGuilds().isEmpty())
{
SimpleLog.getLog("MusicBot").warn("This bot is not on any guilds! Use the following link to add the bot to your guilds!");
SimpleLog.getLog("MusicBot").warn(event.getJDA().asBot().getInviteUrl(JMusicBot.RECOMMENDED_PERMS));
}
credit(event.getJDA());
jda.getGuilds().forEach((guild) -> {
try
{
String defpl = getSettings(guild).getDefaultPlaylist();
VoiceChannel vc = guild.getVoiceChannelById(getSettings(guild).getVoiceId());
if(defpl!=null && vc!=null)
{
if(setUpHandler(guild).playFromDefault())
guild.getAudioManager().openAudioConnection(vc);
}
}
catch(Exception ex) {System.err.println(ex);}
});
}
@Override
public void onGuildJoin(GuildJoinEvent event) {
credit(event.getJDA());
}
// make sure people aren't adding clones to dbots
private void credit(JDA jda)
{
Guild dbots = jda.getGuildById(110373943822540800L);
if(dbots==null)
return;
if(config.getDBots())
return;
jda.getTextChannelById(119222314964353025L)
.sendMessage("<@113156185389092864>: This account is running JMusicBot. Please do not list bot clones on this server, <@"+config.getOwnerId()+">.").complete();
dbots.leave().queue();
}
// settings
public Settings getSettings(Guild guild)
{
return settings.getOrDefault(guild.getId(), Settings.DEFAULT_SETTINGS);
}
public void setTextChannel(TextChannel channel)
{
Settings s = settings.get(channel.getGuild().getId());
if(s==null)
{
settings.put(channel.getGuild().getId(), new Settings(channel.getId(),null,null,100,null));
}
else
{
s.setTextId(channel.getIdLong());
}
writeSettings();
}
public void setVoiceChannel(VoiceChannel channel)
{
Settings s = settings.get(channel.getGuild().getId());
if(s==null)
{
settings.put(channel.getGuild().getId(), new Settings(null,channel.getId(),null,100,null));
}
else
{
s.setVoiceId(channel.getIdLong());
}
writeSettings();
}
public void setRole(Role role)
{
Settings s = settings.get(role.getGuild().getId());
if(s==null)
{
settings.put(role.getGuild().getId(), new Settings(null,null,role.getId(),100,null));
}
else
{
s.setRoleId(role.getIdLong());
}
writeSettings();
}
public void setDefaultPlaylist(Guild guild, String playlist)
{
Settings s = settings.get(guild.getId());
if(s==null)
{
settings.put(guild.getId(), new Settings(null,null,null,100,playlist));
}
else
{
s.setDefaultPlaylist(playlist);
}
writeSettings();
}
public void setVolume(Guild guild, int volume)
{
Settings s = settings.get(guild.getId());
if(s==null)
{
settings.put(guild.getId(), new Settings(null,null,null,volume,null));
}
else
{
s.setVolume(volume);
}
writeSettings();
}
public void clearTextChannel(Guild guild)
{
Settings s = getSettings(guild);
if(s!=Settings.DEFAULT_SETTINGS)
{
if(s.getVoiceId()==0 && s.getRoleId()==0)
settings.remove(guild.getId());
else
s.setTextId(0);
writeSettings();
}
}
public void clearVoiceChannel(Guild guild)
{
Settings s = getSettings(guild);
if(s!=Settings.DEFAULT_SETTINGS)
{
if(s.getTextId()==0 && s.getRoleId()==0)
settings.remove(guild.getId());
else
s.setVoiceId(0);
writeSettings();
}
}
public void clearRole(Guild guild)
{
Settings s = getSettings(guild);
if(s!=Settings.DEFAULT_SETTINGS)
{
if(s.getVoiceId()==0 && s.getTextId()==0)
settings.remove(guild.getId());
else
s.setRoleId(0);
writeSettings();
}
}
private void writeSettings()
{
JSONObject obj = new JSONObject();
settings.keySet().stream().forEach(key -> {
JSONObject o = new JSONObject();
Settings s = settings.get(key);
if(s.getTextId()!=0)
o.put("text_channel_id", Long.toString(s.getTextId()));
if(s.getVoiceId()!=0)
o.put("voice_channel_id", Long.toString(s.getVoiceId()));
if(s.getRoleId()!=0)
o.put("dj_role_id", Long.toString(s.getRoleId()));
if(s.getVolume()!=100)
o.put("volume",s.getVolume());
if(s.getDefaultPlaylist()!=null)
o.put("default_playlist", s.getDefaultPlaylist());
obj.put(key, o);
});
try {
Files.write(Paths.get("serversettings.json"), obj.toString(4).getBytes());
} catch(IOException ex){
SimpleLog.getLog("Settings").warn("Failed to write to file: "+ex);
}
}
//gui stuff
/*public void registerPanel(GuildsPanel panel)
{
this.panel = panel;
threadpool.scheduleWithFixedDelay(() -> updatePanel(), 0, 5, TimeUnit.SECONDS);
}
public void updatePanel()
{
System.out.println("updating...");
Guild guild = jda.getGuilds().get(panel.getIndex());
panel.updatePanel((AudioHandler)guild.getAudioManager().getSendingHandler());
}
@Override
public void onGuildJoin(GuildJoinEvent event) {
if(panel!=null)
panel.updateList(event.getJDA().getGuilds());
}
@Override
public void onGuildLeave(GuildLeaveEvent event) {
if(panel!=null)
panel.updateList(event.getJDA().getGuilds());
}
@Override
public void onShutdown(ShutdownEvent event) {
((GUI)panel.getTopLevelAncestor()).dispose();
}*/
}
| |
package uk.co.jemos.podam.test.unit.features.constructors;
import net.serenitybdd.junit.runners.SerenityRunner;
import net.thucydides.core.annotations.Title;
import org.junit.Test;
import org.junit.runner.RunWith;
import uk.co.jemos.podam.api.PodamFactory;
import uk.co.jemos.podam.test.dto.*;
import uk.co.jemos.podam.test.dto.issue123.GenericCollectionsConstructorPojo;
import uk.co.jemos.podam.test.unit.AbstractPodamSteps;
import javax.xml.bind.JAXBElement;
import java.util.Date;
import java.util.Observable;
import java.util.TimeZone;
/**
* @author daivanov
*
*/
@RunWith(SerenityRunner.class)
public class ConstructorsUnitTest extends AbstractPodamSteps {
@Test
@Title("Podam should handle generics in the constructor")
public void podamShouldHandleGenericsInConstructor() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
GenericInConstructorPojo pojo
= podamInvocationSteps.whenIInvokeTheFactoryForClass(GenericInConstructorPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getVector(), String.class);
}
@Test
@Title("Podam should handle generics in setters during Pojo instantiation")
public void podamShouldHandleGenericsInSettersDuringPojoInstantiation() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
GenericInSetterPojo pojo
= podamInvocationSteps.whenIInvokeTheFactoryForClass(GenericInSetterPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getVector(), String.class);
}
@Test
@Title("Podam should handle generics in static constructors during POJO instantiation")
public void podamShouldHandleGenericsInStaticConstructorsDuringPojoInstantiation() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
GenericInStaticConstructorPojo pojo
= podamInvocationSteps.whenIInvokeTheFactoryForClass(GenericInStaticConstructorPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getVector(), String.class);
}
@Test
@Title("Podam should handle constructors with generic arrays during Pojo instantiation")
public void podamShouldHandleConstructorsWithGenericArraysDuringPojoInstantiation() {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
GenericArrayInConstructorPojo<?> pojo
= podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
GenericArrayInConstructorPojo.class, podamFactory, String.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theArrayOfTheGivenTypeShouldNotBeNullOrEmptyAndContainElementsOfTheRightType(
pojo.getArray(), String.class);
}
@Test
@Title("Podam should handle constructors with multiple generics during Pojo instantiation")
public void podamShouldHandleConstructorsWithMultipleGenericsDuringPojoInstantiation() {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
MultipleGenericInConstructorPojo<?, ?, ?, ?> pojo
= podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(MultipleGenericInConstructorPojo.class,
podamFactory, String.class, Character.class, Byte.class, Integer.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theTwoObjectsShouldBeEqual(String.class, pojo.getType());
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList(), Character.class);
podamValidationSteps.theMapShouldNotBeNullOrEmptyAndContainElementsOfType(
pojo.getMap(), Byte.class, Integer.class);
}
@Test
@Title("Podam should handle classes with generic Key/Value types")
public void podamShouldHandleClassesWithKeyValueGenericTypes() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
DefaultFieldPojo<?,?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
DefaultFieldPojo.class, podamFactory, String.class, Long.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getMap());
podamValidationSteps.theMapShouldNotBeNullOrEmptyAndContainElementsOfType(
pojo.getMap(), String.class, Long.class);
}
@Test
@Title("Podam should be able to manufacture instances of the Observable class")
public void podamShouldBeAbleToManufactureInstancesOfTheObservableClass() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
Observable observable = podamInvocationSteps.whenIInvokeTheFactoryForClass(Observable.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(observable);
}
@Test
@Title("Podam should be able to manufacture POJOs which contain immutable collections")
public void podamShouldBeAbleToManufacturePojosWhichContainImmutableCollections() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
ImmutableDefaultFieldsPojo model =
podamInvocationSteps.whenIInvokeTheFactoryForClass(ImmutableDefaultFieldsPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(model);
podamValidationSteps.theListShouldNotBeNullAndContainAtLeastOneNonEmptyElement(model.getList());
podamValidationSteps.theListShouldHaveExactlyTheExpectedNumberOfElements(model.getList(),
podamFactory.getStrategy().getNumberOfCollectionElements(model.getList().getClass()));
podamValidationSteps.theMapShouldContainAtLeastOneNonEmptyElement(model.getMap());
podamValidationSteps.theMapShouldHaveExactlyTheExpectedNumberOfElements(model.getMap(),
podamFactory.getStrategy().getNumberOfCollectionElements(model.getMap().getClass()));
}
@Test
@Title("Podam should be able to manufacture any type of Lists")
public void podamShouldBeAbleToManufactureAnyTypeOfCollections() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
UnsupportedCollectionInConstructorPojo<?> pojo =
podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
UnsupportedCollectionInConstructorPojo.class, podamFactory, String.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getVector(), String.class);
}
@Test
@Title("Podam should be able to manufacture any type of Maps")
public void podamShouldBeAbleToManufactureAnyTypeOfMaps() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
UnsupportedMapInConstructorPojo<?,?> pojo =
podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
UnsupportedMapInConstructorPojo.class, podamFactory, String.class, Integer.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theMapShouldNotBeNullOrEmptyAndContainElementsOfType(
pojo.getHashTable(), String.class, Integer.class);
}
@Test
@Title("Podam should be able to instantiate POJOs with immutable Collections")
public void podamShouldBeAbleToInstantiatePojosWithImmutableCollections() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
ImmutableVector<?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
ImmutableVector.class, podamFactory, String.class);
podamValidationSteps.theCollectionShouldBeEmpty(pojo);
}
@Test
@Title("Podam should be able to instantiate POJOs with immutable Maps")
public void podamShouldBeAbleToInstantiatePojosWithImmutableMaps() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
ImmutableHashtable<?,?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
ImmutableHashtable.class, podamFactory, String.class, Integer.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theMapShouldBeEmtpy(pojo);
}
@Test
@Title("Podam should be able to create instances of abstract POJOs with factory methods which return concrete types")
public void podamShouldInstantiateAbstractClassesForWhichItKnowsConcreteTypes() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
TimeZone pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass(TimeZone.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
}
@Test
@Title("Podam should be able to create instances of generic POJOs with factory methods when the concrete type is known")
public void podamShouldCreateInstancesOfGenericPojosWithFactoryMethodsWhenTheConcreteTypeIsKnown() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
FactoryInstantiablePojo<?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
FactoryInstantiablePojo.class, podamFactory, Date.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
Object value = pojo.getTypedValue();
podamValidationSteps.theObjectShouldNotBeNull(value);
podamValidationSteps.theTwoObjectsShouldBeEqual(Date.class, value.getClass());
}
@Test
@Title("Podam should choose the fullest constructor when invoked for full data")
public void podamShouldChooseTheFullestConstructorWhenInvokedForFullData() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
ImmutablePojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClassWithFullConstructor(
ImmutablePojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue());
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue2());
}
@Test
@Title("Podam should choose the lightest constructor when the standard manufacturing method is invoked")
public void testImmutablePojoConstructionFailure() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
ImmutablePojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass(ImmutablePojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldBeNull(pojo.getValue());
podamValidationSteps.theObjectShouldBeNull(pojo.getValue2());
}
@Test
@Title("Podam should be able to create instances of inner classes")
public void podamShouldCreateInstancesOfInnerClasses() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
InnerClassPojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass(InnerClassPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getIp());
}
@Test
@Title("Podam should be able to create instances of JAXBElement")
public void podamShouldCreateInstancesOfJAXBElements() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
JAXBElement<?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
JAXBElement.class, podamFactory, String.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getName());
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue());
podamValidationSteps.theTwoObjectsShouldBeEqual(String.class, pojo.getValue().getClass());
}
@Test
@Title("Podam should be able to create instances of JAXBElements declared as instance variables in a POJO")
public void podamShouldCreateInstancesOfJAXBElementsDeclaredAsInstanceVariablesInAPojo() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
JAXBElementPojo<?> pojo = podamInvocationSteps.whenIInvokeTheFactoryForGenericTypeWithSpecificType(
JAXBElementPojo.class, podamFactory, String.class);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue());
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue().getName());
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue().getValue());
podamValidationSteps.theTwoObjectsShouldBeEqual(String.class, pojo.getValue().getValue().getClass());
}
@Test
@Title("Podam should manufacture package private POJOs")
public void podamShouldManufacturePackagePrivatePojos() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
PackagePrivatePojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass(
PackagePrivatePojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getValue());
}
@Test
@Title("Podam should create instances of POJOs extending generic classes")
public void podamShouldCreateInstancesOfPojosExtendingGenericClasses() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
TypedClassPojo2 pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass(TypedClassPojo2.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theObjectShouldNotBeNull(pojo.getTypedValue());
podamValidationSteps.theTwoObjectsShouldBeEqual(String.class, pojo.getTypedValue().getClass());
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getTypedList(), String.class);
}
@Test
@Title("Podam should correctly handle generic collections in constructor with memoization disabled")
public void podamShouldHandleGenericCollectionsInConstructorWithMemoizationDisabled() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactory();
GenericCollectionsConstructorPojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass
(GenericCollectionsConstructorPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList1(), Long.class);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList2(), String.class);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList3(), Integer
.class);
}
@Test
@Title("Podam should correctly handle generic collections in constructor with memoization enabled")
public void podamShouldHandleGenericCollectionsInConstructorWithMemoizationEnabled() throws Exception {
PodamFactory podamFactory = podamFactorySteps.givenAStandardPodamFactoryWithMemoizationEnabled();
GenericCollectionsConstructorPojo pojo = podamInvocationSteps.whenIInvokeTheFactoryForClass
(GenericCollectionsConstructorPojo.class, podamFactory);
podamValidationSteps.theObjectShouldNotBeNull(pojo);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList1(), Long.class);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList2(), String.class);
podamValidationSteps.theCollectionShouldNotBeNullOrEmptyAndContainElementsOfType(pojo.getList3(), Integer
.class);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.tez.plan.operator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparator;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.AccumulativeTupleBuffer;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.CombinerPackager;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.LitePackager;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.Packager;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.TezInput;
import org.apache.pig.backend.hadoop.executionengine.util.AccumulatorOptimizerUtil;
import org.apache.pig.data.AccumulativeBag;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.InternalCachedBag;
import org.apache.pig.data.ReadOnceBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.io.NullableTuple;
import org.apache.pig.impl.io.PigNullableWritable;
import org.apache.tez.runtime.api.LogicalInput;
import org.apache.tez.runtime.library.api.KeyValuesReader;
import org.apache.tez.runtime.library.common.ConfigUtils;
public class POShuffleTezLoad extends POPackage implements TezInput {
private static final long serialVersionUID = 1L;
private static final Log LOG = LogFactory.getLog(POShuffleTezLoad.class);
protected List<String> inputKeys = new ArrayList<String>();
private boolean isSkewedJoin = false;
private transient List<LogicalInput> inputs;
private transient List<KeyValuesReader> readers;
private transient int numTezInputs;
private transient boolean[] finished;
private transient boolean[] readOnce;
private transient WritableComparator comparator = null;
private transient WritableComparator groupingComparator = null;
private transient Configuration conf;
private transient int accumulativeBatchSize;
private transient boolean readOnceOneBag;
public POShuffleTezLoad(POPackage pack) {
super(pack);
}
@Override
public String[] getTezInputs() {
return inputKeys.toArray(new String[inputKeys.size()]);
}
@Override
public void replaceInput(String oldInputKey, String newInputKey) {
while (inputKeys.remove(oldInputKey)) {
inputKeys.add(newInputKey);
}
}
@Override
public void addInputsToSkip(Set<String> inputsToSkip) {
}
@Override
public void attachInputs(Map<String, LogicalInput> inputs, Configuration conf)
throws ExecException {
this.conf = conf;
this.inputs = new ArrayList<LogicalInput>();
this.readers = new ArrayList<KeyValuesReader>();
this.comparator = (WritableComparator) ConfigUtils.getIntermediateInputKeyComparator(conf);
this.groupingComparator = (WritableComparator) ConfigUtils.getInputKeySecondaryGroupingComparator(conf);
this.accumulativeBatchSize = AccumulatorOptimizerUtil.getAccumulativeBatchSize();
try {
for (String inputKey : inputKeys) {
LogicalInput input = inputs.get(inputKey);
// 1) Case of self join/cogroup/cross with Split - numTezInputs < numInputs/inputKeys
// - Same TezInput will contain multiple indexes in case of join
// 2) data unioned within Split - inputKeys > numInputs/numTezInputs
// - Input key will be repeated, but index would be same within a TezInput
if (!this.inputs.contains(input)) {
this.inputs.add(input);
KeyValuesReader reader = (KeyValuesReader)input.getReader();
this.readers.add(reader);
LOG.info("Attached input from vertex " + inputKey
+ " : input=" + input + ", reader=" + reader);
}
}
this.numInputs = this.pkgr.getKeyInfo().size();
this.numTezInputs = this.inputs.size();
readOnce = new boolean[numInputs];
for (int i = 0; i < numInputs; i++) {
readOnce[i] = false;
}
finished = new boolean[numTezInputs];
for (int i = 0; i < numTezInputs; i++) {
finished[i] = !readers.get(i).next();
}
this.readOnceOneBag = (numInputs == 1)
&& (pkgr instanceof CombinerPackager
|| pkgr instanceof LitePackager || pkgr instanceof BloomPackager);
if (readOnceOneBag) {
readOnce[0] = true;
}
} catch (Exception e) {
throw new ExecException(e);
}
}
@Override
public Result getNextTuple() throws ExecException {
Result res = pkgr.getNext();
TezAccumulativeTupleBuffer buffer = null;
if (isAccumulative()) {
buffer = new TezAccumulativeTupleBuffer(accumulativeBatchSize);
}
while (res.returnStatus == POStatus.STATUS_EOP) {
boolean hasData = false;
Object cur = null;
PigNullableWritable min = null;
try {
if (numTezInputs == 1) {
if (!finished[0]) {
hasData = true;
cur = readers.get(0).getCurrentKey();
// Just move to the next key without comparison
min = ((PigNullableWritable)cur).clone();
}
} else {
for (int i = 0; i < numTezInputs; i++) {
if (!finished[i]) {
hasData = true;
cur = readers.get(i).getCurrentKey();
// TODO: PIG-4652 should compare key bytes instead
// of deserialized objects when using BytesComparator
// for faster comparison
if (min == null || comparator.compare(min, cur) > 0) {
//Not a deep clone. Writable is referenced.
min = ((PigNullableWritable)cur).clone();
}
}
}
}
} catch (Exception e) {
throw new ExecException(e);
}
if (!hasData) {
// For certain operators (such as STREAM), we could still have some work
// to do even after seeing the last input. These operators set a flag that
// says all input has been sent and to run the pipeline one more time.
if (Boolean.valueOf(conf.get(JobControlCompiler.END_OF_INP_IN_MAP, "false"))) {
this.parentPlan.endOfAllInput = true;
}
return RESULT_EOP;
}
key = pkgr.getKey(min);
keyWritable = min;
try {
DataBag[] bags = new DataBag[numInputs];
if (isAccumulative()) {
buffer.setCurrentKey(min);
for (int i = 0; i < numInputs; i++) {
bags[i] = new AccumulativeBag(buffer, i);
}
} else {
if (readOnceOneBag) {
bags[0] = new TezReadOnceBag(pkgr, min);
} else {
for (int i = 0; i < numInputs; i++) {
bags[i] = new InternalCachedBag(numInputs);
}
if (numTezInputs == 1) {
do {
Iterable<Object> vals = readers.get(0).getCurrentValues();
for (Object val : vals) {
NullableTuple nTup = (NullableTuple) val;
int index = nTup.getIndex();
Tuple tup = pkgr.getValueTuple(keyWritable, nTup, index);
bags[index].add(tup);
}
finished[0] = !readers.get(0).next();
if (finished[0]) {
break;
}
cur = readers.get(0).getCurrentKey();
} while (groupingComparator.compare(min, cur) == 0); // We need to loop in case of Grouping Comparators
} else {
for (int i = 0; i < numTezInputs; i++) {
if (!finished[i]) {
cur = readers.get(i).getCurrentKey();
// We need to loop in case of Grouping Comparators
while (groupingComparator.compare(min, cur) == 0) {
Iterable<Object> vals = readers.get(i).getCurrentValues();
for (Object val : vals) {
NullableTuple nTup = (NullableTuple) val;
int index = nTup.getIndex();
Tuple tup = pkgr.getValueTuple(keyWritable, nTup, index);
bags[index].add(tup);
}
finished[i] = !readers.get(i).next();
if (finished[i]) {
break;
}
cur = readers.get(i).getCurrentKey();
}
}
}
}
}
}
pkgr.attachInput(key, bags, readOnce);
res = pkgr.getNext();
} catch (IOException e) {
throw new ExecException(e);
}
}
return res;
}
public void setInputKeys(List<String> inputKeys) {
this.inputKeys = inputKeys;
}
public void addInputKey(String inputKey) {
inputKeys.add(inputKey);
}
public void setSkewedJoins(boolean isSkewedJoin) {
this.isSkewedJoin = isSkewedJoin;
}
public boolean isSkewedJoin() {
return isSkewedJoin;
}
@Override
public boolean supportsMultipleInputs() {
return true;
}
private class TezAccumulativeTupleBuffer implements AccumulativeTupleBuffer {
private int batchSize;
private List<Tuple>[] bags;
private PigNullableWritable min;
private boolean clearedCurrent = true;
@SuppressWarnings("unchecked")
public TezAccumulativeTupleBuffer(int batchSize) {
this.batchSize = batchSize;
this.bags = new List[numInputs];
for (int i = 0; i < numInputs; i++) {
this.bags[i] = new ArrayList<Tuple>(batchSize);
}
}
public void setCurrentKey(PigNullableWritable curKey) {
if (!clearedCurrent) {
// If buffer.clear() is not called from POForEach ensure it is called here.
clear();
}
this.min = curKey;
clearedCurrent = false;
}
@Override
public boolean hasNextBatch() {
Object cur = null;
try {
for (int i = 0; i < numTezInputs; i++) {
if (!finished[i]) {
cur = readers.get(i).getCurrentKey();
if (groupingComparator.compare(min, cur) == 0) {
return true;
}
}
}
} catch (IOException e) {
throw new RuntimeException(
"Error while checking for next Accumulator batch", e);
}
return false;
}
@Override
public void nextBatch() throws IOException {
Object cur = null;
for (int i = 0; i < bags.length; i++) {
bags[i].clear();
}
try {
for (int i = 0; i < numTezInputs; i++) {
if (!finished[i]) {
cur = readers.get(i).getCurrentKey();
int batchCount = 0;
while (groupingComparator.compare(min, cur) == 0) {
Iterator<Object> iter = readers.get(i).getCurrentValues().iterator();
while (iter.hasNext() && batchCount < batchSize) {
NullableTuple nTup = (NullableTuple) iter.next();
int index = nTup.getIndex();
bags[index].add(pkgr.getValueTuple(keyWritable, nTup, index));
batchCount++;
}
if (batchCount == batchSize) {
if (!iter.hasNext()) {
// Move to next key and update finished
finished[i] = !readers.get(i).next();
}
break;
}
finished[i] = !readers.get(i).next();
if (finished[i]) {
break;
}
cur = readers.get(i).getCurrentKey();
}
}
}
} catch (IOException e) {
throw new RuntimeException(
"Error while reading next Accumulator batch", e);
}
}
@Override
public void clear() {
for (int i = 0; i < bags.length; i++) {
bags[i].clear();
}
// Skip through current keys and its values not processed because of
// early termination of accumulator
Object cur = null;
try {
for (int i = 0; i < numTezInputs; i++) {
if (!finished[i]) {
cur = readers.get(i).getCurrentKey();
while (groupingComparator.compare(min, cur) == 0) {
finished[i] = !readers.get(i).next();
if (finished[i]) {
break;
}
cur = readers.get(i).getCurrentKey();
}
}
}
} catch (IOException e) {
throw new RuntimeException(
"Error while cleaning up for next Accumulator batch", e);
}
clearedCurrent = true;
}
@Override
public Iterator<Tuple> getTuples(int index) {
return bags[index].iterator();
}
//TODO: illustratorMarkup
}
private class TezReadOnceBag extends ReadOnceBag {
private static final long serialVersionUID = 1L;
private Iterator<Object> iter;
public TezReadOnceBag(Packager pkgr,
PigNullableWritable currentKey) throws IOException {
this.pkgr = pkgr;
this.keyWritable = currentKey;
this.iter = readers.get(0).getCurrentValues().iterator();
}
@Override
public Iterator<Tuple> iterator() {
return new TezReadOnceBagIterator();
}
private class TezReadOnceBagIterator implements Iterator<Tuple> {
@Override
public boolean hasNext() {
if (iter.hasNext()) {
return true;
} else {
try {
finished[0] = !readers.get(0).next();
if (finished[0]) {
return false;
}
// Currently combiner is not being applied when secondary key(grouping comparator) is used
// But might change in future. So check if the next key is same and return its values
Object cur = readers.get(0).getCurrentKey();
if (groupingComparator.compare(keyWritable, cur) == 0) {
iter = readers.get(0).getCurrentValues().iterator();
// Key should at least have one value. But doing a check just for safety
if (iter.hasNext()) {
return true;
} else {
throw new RuntimeException("Unexpected. Key " + keyWritable + " does not have any values");
}
}
return false;
} catch (IOException e) {
throw new RuntimeException("ReadOnceBag failed to get value tuple : ", e);
}
}
}
@Override
public Tuple next() {
NullableTuple ntup = (NullableTuple) iter.next();
int index = ntup.getIndex();
Tuple ret = null;
try {
ret = pkgr.getValueTuple(keyWritable, ntup, index);
} catch (ExecException e) {
throw new RuntimeException("ReadOnceBag failed to get value tuple : ", e);
}
return ret;
}
@Override
public void remove() {
throw new RuntimeException("ReadOnceBag.iterator().remove() is not allowed");
}
}
}
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.transport.tcp.specification;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.kaazing.test.util.ITUtil.createRuleChain;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.future.ConnectFuture;
import org.apache.mina.core.session.IoSession;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.kaazing.gateway.transport.IoHandlerAdapter;
import org.kaazing.k3po.junit.annotation.Specification;
import org.kaazing.k3po.junit.rules.K3poRule;
import org.kaazing.mina.core.buffer.IoBufferAllocatorEx;
import org.kaazing.mina.core.buffer.IoBufferEx;
import org.kaazing.mina.core.session.IoSessionEx;
/**
* RFC-793
*/
public class TcpConnectorIT {
private final K3poRule k3po = new K3poRule().setScriptRoot("org/kaazing/specification/tcp/rfc793");
private TcpConnectorRule connector = new TcpConnectorRule();
@Rule
public TestRule chain = createRuleChain(connector, k3po);
private void connectTo8080(IoHandlerAdapter<IoSessionEx> handler) throws InterruptedException {
final String connectURIString = "tcp://127.0.0.1:8080";
ConnectFuture x = connector.connect(connectURIString, handler, null);
x.await(1, SECONDS);
Assert.assertTrue("Failed to connect, exception " + x.getException(), x.isConnected());
}
private void writeStringMessageToSession(String message, IoSession session) {
ByteBuffer data = ByteBuffer.allocate(message.length());
data.put(message.getBytes());
data.flip();
IoBufferAllocatorEx<?> allocator = ((IoSessionEx) session).getBufferAllocator();
session.write(allocator.wrap(data.duplicate(), IoBufferEx.FLAG_SHARED));
}
@Test
@Specification({
"establish.connection/server"
})
public void shouldEstablishConnection() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(new IoHandlerAdapter<IoSessionEx>());
k3po.finish();
}
@Test
@Specification({
"server.sent.data/server"
})
public void shouldReceiveServerSentData() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(new IoHandlerAdapter<IoSessionEx>());
k3po.finish();
}
@Test
@Specification({
"client.sent.data/server"
})
public void shouldReceiveClientSentData() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(new IoHandlerAdapter<IoSessionEx>(){
@Override
protected void doSessionOpened(IoSessionEx session) throws Exception {
ByteBuffer data = ByteBuffer.allocate(20);
String str = "client data";
data.put(str.getBytes());
data.flip();
IoBufferAllocatorEx<?> allocator = session.getBufferAllocator();
session.write(allocator.wrap(data.duplicate(), IoBufferEx.FLAG_SHARED));
}
});
k3po.finish();
}
@Test
@Specification({
"echo.data/server"
})
public void shouldEchoData() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(new IoHandlerAdapter<IoSessionEx>(){
private int counter = 1;
private DataMatcher dataMatch = new DataMatcher("server data " + counter);
@Override
protected void doSessionOpened(IoSessionEx session) throws Exception {
writeStringMessageToSession("client data " + counter, session);
}
@Override
protected void doMessageReceived(IoSessionEx session, Object message) throws Exception {
String decoded = new String(((IoBuffer) message).array());
if (dataMatch.addFragment(decoded) && counter < 2) {
counter++;
writeStringMessageToSession("client data " + counter, session);
dataMatch = new DataMatcher("server data " + counter);
}
}
});
k3po.finish();
}
@Test
@Specification({
"server.close/server"
})
public void shouldHandleServerClose() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
CountDownLatch closed = new CountDownLatch(1);
connectTo8080(new IoHandlerAdapter<IoSessionEx>() {
@Override
protected void doSessionClosed(IoSessionEx session) throws Exception {
closed.countDown();
}
});
k3po.notifyBarrier("CLOSEABLE");
closed.await(5, SECONDS);
k3po.finish();
}
@Test
@Specification({
"client.close/server"
})
public void shouldIssueClientClose() throws Exception {
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(new IoHandlerAdapter<IoSessionEx>(){
@Override
protected void doSessionOpened(IoSessionEx session) throws Exception {
session.close(true);
}
});
k3po.finish();
}
@Test
@Specification({
"concurrent.connections/server"
})
public void shouldEstablishConcurrentConnections() throws Exception {
IoHandlerAdapter<IoSessionEx> adapter = new IoHandlerAdapter<IoSessionEx>(){
@Override
protected void doSessionOpened(IoSessionEx session) throws Exception {
session.setAttribute("dataMatch", new DataMatcher("Hello"));
writeStringMessageToSession("Hello", session);
}
@Override
protected void doMessageReceived(IoSessionEx session, Object message) throws Exception {
String decoded = new String(((IoBuffer) message).array());
DataMatcher dataMatch = (DataMatcher) session.getAttribute("dataMatch");
if (dataMatch.addFragment(decoded)) {
if (dataMatch.target.equals("Hello")) {
dataMatch = new DataMatcher("Goodbye");
writeStringMessageToSession("Goodbye", session);
} else {
session.close(true);
}
session.setAttribute("dataMatch", dataMatch);
}
}
};
k3po.start();
k3po.awaitBarrier("BOUND");
connectTo8080(adapter);
connectTo8080(adapter);
connectTo8080(adapter);
k3po.finish();
}
}
| |
/*
* Copyright 2011-2013, by Vladimir Kostyukov and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): -
*
*/
package org.la4j.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import junit.framework.TestCase;
import org.la4j.factory.Basic1DFactory;
import org.la4j.factory.Basic2DFactory;
import org.la4j.factory.CCSFactory;
import org.la4j.factory.CRSFactory;
import org.la4j.factory.Factory;
import org.la4j.matrix.Matrix;
import org.la4j.vector.Vector;
public abstract class AbstractStreamTest extends TestCase {
public Factory[] factories() {
return new Factory[] {
new Basic1DFactory(),
new Basic2DFactory(),
new CRSFactory(),
new CCSFactory()
};
}
public void testMatrix_3x3() throws IOException {
for (Factory factory: factories()) {
Matrix a = factory.createMatrix(new double[][] {
{ 1.0, 0.0, 3.0 },
{ 0.0, 5.0, 0.0 },
{ 7.0, 0.0, 9.0 }
});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
MatrixStream out = (MatrixStream) stream(bos);
out.writeMatrix(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
MatrixStream in = (MatrixStream) stream(bis);
Matrix b = in.readMatrix(factory);
assertEquals(a, b);
}
}
public void testMatrix_2x5() throws IOException {
for (Factory factory: factories()) {
Matrix a = factory.createMatrix(new double[][] {
{ 1.0, 0.0, 3.0, 0.0, 5.0 },
{ 0.0, 7.0, 0.0, 9.0, 0.0 },
});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
MatrixStream out = (MatrixStream) stream(bos);
out.writeMatrix(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
MatrixStream in = (MatrixStream) stream(bis);
Matrix b = in.readMatrix(factory);
assertEquals(a, b);
}
}
public void testMatrix_3x1() throws IOException {
for (Factory factory: factories()) {
Matrix a = factory.createMatrix(new double[][] {
{ 1.0 },
{ 0.0 },
{ 3.0 }
});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
MatrixStream out = (MatrixStream) stream(bos);
out.writeMatrix(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
MatrixStream in = (MatrixStream) stream(bis);
Matrix b = in.readMatrix(factory);
assertEquals(a, b);
}
}
public void testMatrix_1x1() throws IOException {
for (Factory factory: factories()) {
Matrix a = factory.createMatrix(new double[][] {{ 1.0 }});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
MatrixStream out = (MatrixStream) stream(bos);
out.writeMatrix(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
MatrixStream in = (MatrixStream) stream(bis);
Matrix b = in.readMatrix(factory);
assertEquals(a, b);
}
}
public void testMatrix_0x0() throws IOException {
for (Factory factory: factories()) {
Matrix a = factory.createMatrix(new double[][] {{}});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
MatrixStream out = (MatrixStream) stream(bos);
out.writeMatrix(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
MatrixStream in = (MatrixStream) stream(bis);
Matrix b = in.readMatrix(factory);
assertEquals(a, b);
}
}
public void testVector_3() throws IOException {
for (Factory factory: factories()) {
Vector a = factory.createVector(new double[] { 1.0, 0.0, 3.0 });
ByteArrayOutputStream bos = new ByteArrayOutputStream();
VectorStream out = (VectorStream) stream(bos);
out.writeVector(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
VectorStream in = (VectorStream) stream(bis);
Vector b = in.readVector(factory);
assertEquals(a, b);
}
}
public void testVector_1() throws IOException {
for (Factory factory: factories()) {
Vector a = factory.createVector(new double[] { 1.0 });
ByteArrayOutputStream bos = new ByteArrayOutputStream();
VectorStream out = (VectorStream) stream(bos);
out.writeVector(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
VectorStream in = (VectorStream) stream(bis);
Vector b = in.readVector(factory);
assertEquals(a, b);
}
}
public void testVector_0() throws IOException {
for (Factory factory: factories()) {
Vector a = factory.createVector(new double[] {});
ByteArrayOutputStream bos = new ByteArrayOutputStream();
VectorStream out = (VectorStream) stream(bos);
out.writeVector(a);
ByteArrayInputStream bis =
new ByteArrayInputStream(bos.toByteArray());
VectorStream in = (VectorStream) stream(bis);
Vector b = in.readVector(factory);
assertEquals(a, b);
}
}
public abstract AbstractStream stream(InputStream in);
public abstract AbstractStream stream(OutputStream out);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.om.types;
import static org.apache.asterix.om.types.ATypeTag.VALUE_TYPE_MAPPING;
import org.apache.asterix.om.base.IAObject;
import org.apache.hyracks.api.io.IJsonSerializable;
import org.apache.hyracks.api.io.IPersistedResourceRegistry;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
public abstract class BuiltinType implements IAType {
private static final long serialVersionUID = 1L;
private static final String TAG_FIELD = "tag";
public abstract static class LowerCaseConstructorType extends BuiltinType {
private static final long serialVersionUID = 1L;
@Override
public String getConstructor() {
return getTypeTag().toString().toLowerCase();
}
}
/** the type of all types */
public static final BuiltinType ALL_TYPE = new BuiltinType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.TYPE;
}
@Override
public String getDisplayName() {
return "AllType";
}
@Override
public String getTypeName() {
return "ALL_TYPE";
}
@Override
public String getConstructor() {
return null;
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ALL_TYPE");
return type;
}
};
public static final BuiltinType AINT8 = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AInt8";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.TINYINT;
}
@Override
public String getTypeName() {
return "int8";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AInt8");
return type;
}
};
public static final BuiltinType AINT16 = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AInt16";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.SMALLINT;
}
@Override
public String getTypeName() {
return "int16";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AInt16");
return type;
}
};
public static final BuiltinType AINT32 = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AInt32";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.INTEGER;
}
@Override
public String getTypeName() {
return "int32";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AInt32");
return type;
}
};
public static final BuiltinType AINT64 = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.BIGINT;
}
@Override
public String getDisplayName() {
return "AInt64";
}
@Override
public String getTypeName() {
return "int64";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AInt64");
return type;
}
};
public static final BuiltinType ABINARY = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ABinary";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.BINARY;
}
@Override
public String getTypeName() {
return "binary";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ABinary");
return type;
}
};
public static final BuiltinType AFLOAT = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AFloat";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.FLOAT;
}
@Override
public String getTypeName() {
return "float";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AFloat");
return type;
}
};
public static final BuiltinType ADOUBLE = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ADouble";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.DOUBLE;
}
@Override
public String getTypeName() {
return "double";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ADouble");
return type;
}
};
public static final BuiltinType ASTRING = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AString";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.STRING;
}
@Override
public String getTypeName() {
return "string";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AString");
return type;
}
};
public static final BuiltinType AMISSING = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "Missing";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.MISSING;
}
@Override
public String getTypeName() {
return "missing";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AMISSING");
return type;
}
};
public static final BuiltinType ANULL = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "Null";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.NULL;
}
@Override
public String getTypeName() {
return "null";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ANULL");
return type;
}
};
public static final BuiltinType ABOOLEAN = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ABoolean";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.BOOLEAN;
}
@Override
public String getTypeName() {
return "boolean";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ABoolean");
return type;
}
};
public static final BuiltinType ATIME = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ATime";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.TIME;
}
@Override
public String getTypeName() {
return "time";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ATime");
return type;
}
};
public static final BuiltinType ADATE = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ADate";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.DATE;
}
@Override
public String getTypeName() {
return "date";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ADate");
return type;
}
};
public static final BuiltinType ADATETIME = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ADateTime";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.DATETIME;
}
@Override
public String getTypeName() {
return "datetime";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ADateTime");
return type;
}
};
public static final BuiltinType ADURATION = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ADuration";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.DURATION;
}
@Override
public String getTypeName() {
return "duration";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ADuration");
return type;
}
};
public static final BuiltinType AYEARMONTHDURATION = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AYearMonthDuration";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.YEARMONTHDURATION;
}
@Override
public String getTypeName() {
return "year-month-duration";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "AYearMonthDuration");
return type;
}
};
public static final BuiltinType ADAYTIMEDURATION = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ADayTimeDuration";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.DAYTIMEDURATION;
}
@Override
public String getTypeName() {
return "day-time-duration";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ADayTimeDuration");
return type;
}
};
public static final BuiltinType AINTERVAL = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AInterval";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.INTERVAL;
}
@Override
public String getTypeName() {
return "interval";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
return null;
}
};
public static final BuiltinType APOINT = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.POINT;
}
@Override
public String getDisplayName() {
return "APoint";
}
@Override
public String getTypeName() {
return "point";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "APoint");
return type;
}
};
public static final BuiltinType APOINT3D = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.POINT3D;
}
@Override
public String getDisplayName() {
return "APoint3D";
}
@Override
public String getTypeName() {
return "point3d";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "APoint3D");
return type;
}
};
public static final BuiltinType ALINE = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ALINE";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.LINE;
}
@Override
public String getTypeName() {
return "line";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ALINE");
return type;
}
};
public static final BuiltinType APOLYGON = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "APOLYGON";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.POLYGON;
}
@Override
public String getTypeName() {
return "polygon";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "APOLYGON");
return type;
}
};
public static final BuiltinType AGEOMETRY = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "AGEOMETRY";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.GEOMETRY;
}
@Override
public String getTypeName() {
return "geometry";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectNode type = new ObjectMapper().createObjectNode();
type.put("type", "AGEOMETRY");
return type;
}
};
public static final BuiltinType ACIRCLE = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ACIRCLE";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.CIRCLE;
}
@Override
public String getTypeName() {
return "circle";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ACIRCLE");
return type;
}
};
public static final BuiltinType ARECTANGLE = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public String getDisplayName() {
return "ARECTANGLE";
}
@Override
public ATypeTag getTypeTag() {
return ATypeTag.RECTANGLE;
}
@Override
public String getTypeName() {
return "rectangle";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ARECTANGLE");
return type;
}
};
public static final IAType ABITARRAY = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.BITARRAY;
}
@Override
public String getDisplayName() {
return "ABitArray";
}
@Override
public String getTypeName() {
return "abitarray";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ABitArray");
return type;
}
};
public static final BuiltinType AUUID = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.UUID;
}
@Override
public String getDisplayName() {
return "UUID";
}
@Override
public String getTypeName() {
return "uuid";
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", getDisplayName());
return type;
}
};
public static final BuiltinType ANY = new BuiltinType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.ANY;
}
@Override
public String getTypeName() {
return "any";
}
@Override
public String getDisplayName() {
return "ANY";
}
@Override
public String getConstructor() {
return null;
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "ANY");
return type;
}
};
public static final BuiltinType SHORTWITHOUTTYPEINFO = new BuiltinType() {
private static final long serialVersionUID = 1L;
@Override
public ATypeTag getTypeTag() {
return ATypeTag.SHORTWITHOUTTYPEINFO;
}
@Override
public String getTypeName() {
return "shortwithouttypeinfo";
}
@Override
public String getDisplayName() {
return "SHORTWITHOUTTYPEINFO";
}
@Override
public String getConstructor() {
return null;
}
@Override
public JsonNode toJson(IPersistedResourceRegistry registry) {
return convertToJson(registry, getTypeTag().serialize(), serialVersionUID);
}
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", "SHORTWITHOUTTYPEINFO");
return type;
}
};
public abstract String getConstructor();
@Override
public IAType getType() {
return ALL_TYPE;
}
@Override
public String toString() {
return getTypeTag().toString();
}
@Override
public boolean deepEqual(IAObject obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof BuiltinType)) {
return false;
}
return ((BuiltinType) obj).getTypeTag().equals(getTypeTag());
}
@Override
public boolean equals(Object object) {
return object instanceof IAObject && deepEqual((IAObject) object);
}
@Override
public int hashCode() {
return getTypeTag().hashCode();
}
@Override
public int hash() {
return getType().getTypeTag().serialize();
}
private static JsonNode convertToJson(IPersistedResourceRegistry registry, short tag, long version) {
ObjectNode jsonNode = registry.getClassIdentifier(BuiltinType.class, version);
jsonNode.put(TAG_FIELD, tag);
return jsonNode;
}
@SuppressWarnings("squid:S1172") // unused parameter
public static IJsonSerializable fromJson(IPersistedResourceRegistry registry, JsonNode json) {
byte tag = (byte) json.get(TAG_FIELD).shortValue();
ATypeTag typeTag = VALUE_TYPE_MAPPING[tag];
switch (typeTag) {
case TYPE:
return ALL_TYPE;
case TINYINT:
return AINT8;
case SMALLINT:
return AINT16;
case INTEGER:
return AINT32;
case BIGINT:
return AINT64;
case FLOAT:
return AFLOAT;
case DOUBLE:
return ADOUBLE;
case STRING:
return ASTRING;
case BINARY:
return ABINARY;
case MISSING:
return AMISSING;
case NULL:
return ANULL;
case BOOLEAN:
return ABOOLEAN;
case TIME:
return ATIME;
case DATE:
return ADATE;
case DATETIME:
return ADATETIME;
case DURATION:
return ADURATION;
case YEARMONTHDURATION:
return AYEARMONTHDURATION;
case DAYTIMEDURATION:
return ADAYTIMEDURATION;
case INTERVAL:
return AINTERVAL;
case POINT:
return APOINT;
case POINT3D:
return APOINT3D;
case LINE:
return ALINE;
case POLYGON:
return APOLYGON;
case GEOMETRY:
return AGEOMETRY;
case CIRCLE:
return ACIRCLE;
case RECTANGLE:
return ARECTANGLE;
case BITARRAY:
return ABITARRAY;
case UUID:
return AUUID;
case ANY:
return ANY;
case SHORTWITHOUTTYPEINFO:
return SHORTWITHOUTTYPEINFO;
default:
throw new UnsupportedOperationException(typeTag.toString());
}
}
}
| |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work;
import com.thoughtworks.go.agent.testhelpers.FakeBuildRepositoryRemote;
import com.thoughtworks.go.config.ArtifactStores;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.Materials;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterial;
import com.thoughtworks.go.config.materials.perforce.P4Material;
import com.thoughtworks.go.config.materials.svn.SvnMaterial;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.domain.builder.Builder;
import com.thoughtworks.go.domain.builder.CommandBuilder;
import com.thoughtworks.go.domain.builder.NullBuilder;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.perforce.P4Client;
import com.thoughtworks.go.domain.materials.perforce.P4Fixture;
import com.thoughtworks.go.domain.materials.svn.SvnCommand;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.plugin.access.packagematerial.PackageRepositoryExtension;
import com.thoughtworks.go.plugin.access.pluggabletask.TaskExtension;
import com.thoughtworks.go.plugin.access.scm.SCMExtension;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import com.thoughtworks.go.utils.SvnRepoFixture;
import org.apache.commons.io.FileUtils;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mock;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static com.thoughtworks.go.matchers.ConsoleOutMatcher.printedEnvVariable;
import static com.thoughtworks.go.util.SystemUtil.currentWorkingDirectory;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.MockitoAnnotations.initMocks;
public class BuildWorkEnvironmentVariablesTest {
private static final String JOB_NAME = "one";
private static final String STAGE_NAME = "first";
private static final String PIPELINE_NAME = "cruise";
private static final String AGENT_UUID = "uuid";
private static final String TRIGGERED_BY_USER = "approver";
private File dir;
private PipelineConfig pipelineConfig;
private EnvironmentVariableContext environmentVariableContext;
private SvnCommand command;
private HgTestRepo hgTestRepo;
private HgMaterial hgMaterial;
private SvnMaterial svnMaterial;
private DependencyMaterial dependencyMaterial;
private DependencyMaterial dependencyMaterialWithName;
private SvnRepoFixture svnRepoFixture;
@Mock
private PackageRepositoryExtension packageRepositoryExtension;
@Mock
private SCMExtension scmExtension;
@Mock
private TaskExtension taskExtension;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private P4Material p4Material;
private P4Fixture p4Fixture;
private P4Client p4Client;
private SystemEnvironment systemEnvironment = new SystemEnvironment();
@Before
public void setUp() throws Exception {
initMocks(this);
dir = temporaryFolder.newFolder("someFolder");
environmentVariableContext = new EnvironmentVariableContext();
svnRepoFixture = new SvnRepoFixture("../common/src/test/resources/data/svnrepo", temporaryFolder);
svnRepoFixture.createRepository();
command = new SvnCommand(null, svnRepoFixture.getEnd2EndRepoUrl());
pipelineConfig = PipelineConfigMother.createPipelineConfig(PIPELINE_NAME, STAGE_NAME, JOB_NAME);
svnMaterial = SvnMaterial.createSvnMaterialWithMock(command);
dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("upstream1"), new CaseInsensitiveString(STAGE_NAME));
dependencyMaterialWithName = new DependencyMaterial(new CaseInsensitiveString("upstream2"), new CaseInsensitiveString(STAGE_NAME));
dependencyMaterialWithName.setName(new CaseInsensitiveString("dependency_material_name"));
setupHgRepo();
p4Fixture = new P4Fixture();
p4Material = getP4Material();
}
@After
public void teardown() throws Exception {
p4Fixture.stop(p4Client);
TestRepo.internalTearDown();
hgTestRepo.tearDown();
FileUtils.deleteQuietly(dir);
}
@Test
public void shouldSetUpEnvironmentContextCorrectly() throws Exception {
new SystemEnvironment().setProperty("serviceUrl", "some_random_place");
Materials materials = new Materials(svnMaterial);
EnvironmentVariableContext environmentVariableContext = doWorkWithMaterials(materials);
assertThat(environmentVariableContext.getProperty("GO_REVISION"), is("3"));
assertThat(environmentVariableContext.getProperty("GO_SERVER_URL"), is("some_random_place"));
assertThat(environmentVariableContext.getProperty("GO_PIPELINE_NAME"), is(PIPELINE_NAME));
assertThat(environmentVariableContext.getProperty("GO_PIPELINE_LABEL"), is("1"));
assertThat(environmentVariableContext.getProperty("GO_STAGE_NAME"), is(STAGE_NAME));
assertThat(environmentVariableContext.getProperty("GO_STAGE_COUNTER"), is("1"));
assertThat(environmentVariableContext.getProperty("GO_JOB_NAME"), is(JOB_NAME));
assertThat(environmentVariableContext.getProperty("GO_TRIGGER_USER"), is(TRIGGERED_BY_USER));
}
@Test
public void shouldSetUpP4ClientEnvironmentVariableEnvironmentContextCorrectly() {
new SystemEnvironment().setProperty("serviceUrl", "some_random_place");
BuildWork work = getBuildWorkWithP4MaterialRevision(p4Material);
EnvironmentVariableContext environmentVariableContext = new EnvironmentVariableContext();
AgentIdentifier agentIdentifier = new AgentIdentifier("somename", "127.0.0.1", AGENT_UUID);
work.doWork(environmentVariableContext, new AgentWorkContext(agentIdentifier, new FakeBuildRepositoryRemote(),
new GoArtifactsManipulatorStub(), new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie"), packageRepositoryExtension, scmExtension, taskExtension, null, null));
assertThat(environmentVariableContext.getProperty("GO_REVISION"), is("10"));
assertThat(environmentVariableContext.getProperty("GO_SERVER_URL"), is("some_random_place"));
assertThat(environmentVariableContext.getProperty("GO_TRIGGER_USER"), is(TRIGGERED_BY_USER));
assertThat(environmentVariableContext.getProperty("GO_P4_CLIENT"), is(p4Material.clientName(dir)));
}
private BuildWork getBuildWorkWithP4MaterialRevision(P4Material p4Material) {
pipelineConfig.setMaterialConfigs(new Materials(p4Material).convertToConfigs());
JobPlan plan = new DefaultJobPlan(new Resources(), new ArrayList<>(), -1, new JobIdentifier(PIPELINE_NAME, 1, "1", STAGE_NAME, "1", JOB_NAME, 123L), null, new EnvironmentVariables(), new EnvironmentVariables(), null, null);
MaterialRevisions materialRevisions = new MaterialRevisions(new MaterialRevision(p4Material, new Modification("user", "comment", "a@b.com", new Date(), "10")));
BuildCause buildCause = BuildCause.createWithModifications(materialRevisions, TRIGGERED_BY_USER);
List<Builder> builders = new ArrayList<>();
builders.add(new CommandBuilder("ant", "", dir, new RunIfConfigs(), new NullBuilder(), ""));
BuildAssignment assignment = BuildAssignment.create(plan, buildCause, builders, dir, environmentVariableContext, new ArtifactStores());
return new BuildWork(assignment, systemEnvironment.consoleLogCharset());
}
private P4Material getP4Material() throws Exception {
String view = "//depot/... //something/...";
P4TestRepo repo = P4TestRepo.createP4TestRepo(temporaryFolder, temporaryFolder.newFolder());
repo.onSetup();
p4Fixture.setRepo(repo);
p4Client = p4Fixture.createClient();
return p4Fixture.material(view);
}
@Test
public void shouldMergeEnvironmentVariablesFromInitialContext() throws IOException {
pipelineConfig.setMaterialConfigs(new MaterialConfigs());
BuildAssignment buildAssignment = createAssignment(new EnvironmentVariableContext("foo", "bar"));
BuildWork work = new BuildWork(buildAssignment, systemEnvironment.consoleLogCharset());
EnvironmentVariableContext environmentContext = new EnvironmentVariableContext();
AgentIdentifier agentIdentifier = new AgentIdentifier("somename", "127.0.0.1", AGENT_UUID);
work.doWork(environmentContext, new AgentWorkContext(agentIdentifier, new FakeBuildRepositoryRemote(), new GoArtifactsManipulatorStub(), new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie"), packageRepositoryExtension, scmExtension, taskExtension, null, null));
assertEnvironmentContext(environmentContext, "foo", is("bar"));
}
private void assertEnvironmentContext(EnvironmentVariableContext environmentVariableContext, String key, Matcher<String> matcher) {
assertThat("Properties: \n" + environmentVariableContext.getProperties(), environmentVariableContext.getProperty(key), matcher);
}
@Test
public void shouldSetupEnvironmentVariableForDependencyMaterial() throws IOException {
EnvironmentVariableContext environmentVariableContext = doWorkWithMaterials(new Materials());
assertThat("Properties: \n" + environmentVariableContext.getProperties(),
environmentVariableContext.getProperty("GO_DEPENDENCY_LOCATOR_UPSTREAM1"), is("upstream1/0/first/1"));
assertThat("Properties: \n" + environmentVariableContext.getProperties(),
environmentVariableContext.getProperty("GO_DEPENDENCY_LABEL_UPSTREAM1"), is("upstream1-label"));
}
@Test
public void shouldSetupEnvironmentVariableUsingDependencyMaterialName() throws IOException {
EnvironmentVariableContext environmentVariableContext = doWorkWithMaterials(new Materials());
assertThat("Properties: \n" + environmentVariableContext.getProperties(),
environmentVariableContext.getProperty("GO_DEPENDENCY_LOCATOR_DEPENDENCY_MATERIAL_NAME"), is("upstream2/0/first/1"));
assertThat("Properties: \n" + environmentVariableContext.getProperties(),
environmentVariableContext.getProperty("GO_DEPENDENCY_LABEL_DEPENDENCY_MATERIAL_NAME"), is("upstream2-label"));
}
@Test
public void shouldUseSvnMaterialNameIfPresent() throws IOException {
svnMaterial.setName(new CaseInsensitiveString("Cruise"));
pipelineConfig.setMaterialConfigs(new MaterialConfigs(svnMaterial.config()));
BuildAssignment buildAssigment = createAssignment(null);
BuildWork work = new BuildWork(buildAssigment, systemEnvironment.consoleLogCharset());
EnvironmentVariableContext environmentVariableContext = new EnvironmentVariableContext();
new SystemEnvironment().setProperty("serviceUrl", "some_random_place");
AgentIdentifier agentIdentifier = new AgentIdentifier("somename", "127.0.0.1", AGENT_UUID);
work.doWork(environmentVariableContext, new AgentWorkContext(agentIdentifier, new FakeBuildRepositoryRemote(),
new GoArtifactsManipulatorStub(), new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie"), packageRepositoryExtension, scmExtension, taskExtension, null, null));
assertThat(environmentVariableContext.getProperty("GO_REVISION_CRUISE"), is("3"));
}
@Test
public void shouldSetUpRevisionIntoEnvironmentContextCorrectlyForMutipleMaterial() throws IOException {
svnMaterial.setFolder("svn-Dir");
EnvironmentVariableContext environmentVariableContext = doWorkWithMaterials(new Materials(svnMaterial, hgMaterial));
assertThat(environmentVariableContext.getProperty("GO_REVISION_SVN_DIR"), is("3"));
assertThat(environmentVariableContext.getProperty("GO_REVISION_HG_DIR"), is("ca3ebb67f527c0ad7ed26b789056823d8b9af23f"));
}
@Test
public void shouldOutputEnvironmentVariablesIntoConsoleOut() throws IOException {
BuildAssignment buildAssigment = createAssignment(null);
BuildWork work = new BuildWork(buildAssigment, systemEnvironment.consoleLogCharset());
GoArtifactsManipulatorStub manipulator = new GoArtifactsManipulatorStub();
new SystemEnvironment().setProperty("serviceUrl", "some_random_place");
AgentIdentifier agentIdentifier = new AgentIdentifier("somename", "127.0.0.1", AGENT_UUID);
work.doWork(environmentVariableContext, new AgentWorkContext(agentIdentifier, new FakeBuildRepositoryRemote(),
manipulator, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie"), packageRepositoryExtension, scmExtension, taskExtension, null, null));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_SERVER_URL", "some_random_place"));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_PIPELINE_NAME", PIPELINE_NAME));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_PIPELINE_COUNTER", 1));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_PIPELINE_LABEL", 1));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_STAGE_NAME", STAGE_NAME));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_STAGE_COUNTER", 1));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_JOB_NAME", JOB_NAME));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_REVISION", 3));
assertThat(manipulator.consoleOut(), printedEnvVariable("GO_TRIGGER_USER", TRIGGERED_BY_USER));
}
@Test
public void shouldSetEnvironmentVariableForSvnExternal() throws IOException {
svnRepoFixture.createExternals(svnRepoFixture.getEnd2EndRepoUrl());
command = new SvnCommand(null, svnRepoFixture.getEnd2EndRepoUrl(), null, null, true);
svnMaterial = SvnMaterial.createSvnMaterialWithMock(command);
svnMaterial.setFolder("svn-Dir");
EnvironmentVariableContext environmentVariableContext = doWorkWithMaterials(new Materials(svnMaterial));
assertThat(environmentVariableContext.getProperty("GO_REVISION_SVN_DIR"), is("4"));
assertThat(environmentVariableContext.getProperty("GO_REVISION_SVN_DIR_EXTERNAL"), is("4"));
}
private BuildAssignment createAssignment(EnvironmentVariableContext environmentVariableContext) throws IOException {
JobPlan plan = new DefaultJobPlan(new Resources(), new ArrayList<>(), -1, new JobIdentifier(PIPELINE_NAME, 1, "1", STAGE_NAME, "1", JOB_NAME, 123L), null, new EnvironmentVariables(), new EnvironmentVariables(), null, null);
MaterialRevisions materialRevisions = materialRevisions();
BuildCause buildCause = BuildCause.createWithModifications(materialRevisions, TRIGGERED_BY_USER);
List<Builder> builders = new ArrayList<>();
builders.add(new CommandBuilder("ant", "", dir, new RunIfConfigs(), new NullBuilder(), ""));
return BuildAssignment.create(plan, buildCause, builders, dir, environmentVariableContext, new ArtifactStores());
}
private void setupHgRepo() throws IOException {
hgTestRepo = new HgTestRepo("hgTestRepo1", temporaryFolder);
hgMaterial = MaterialsMother.hgMaterial(hgTestRepo.projectRepositoryUrl(), "hg_Dir");
}
private MaterialRevisions materialRevisions() throws IOException {
MaterialRevision svnRevision = new MaterialRevision(this.svnMaterial,
ModificationsMother.oneModifiedFile(
svnRepoFixture.getHeadRevision(svnRepoFixture.getEnd2EndRepoUrl())));
SvnMaterial svnMaterialForExternal = SvnMaterial.createSvnMaterialWithMock(new SvnCommand(null, svnRepoFixture.getExternalRepoUrl()));
String folder = this.svnMaterial.getFolder() == null ? "external" : this.svnMaterial.getFolder() + "/" + "external";
svnMaterialForExternal.setFolder(folder);
MaterialRevision svnExternalRevision = new MaterialRevision(svnMaterialForExternal,
ModificationsMother.oneModifiedFile(
svnRepoFixture.getHeadRevision(svnRepoFixture.getExternalRepoUrl())));
MaterialRevision hgRevision = new MaterialRevision(hgMaterial,
ModificationsMother.oneModifiedFile(hgTestRepo.latestModifications().get(0).getRevision()));
MaterialRevision dependencyRevision1 = ModificationsMother.dependencyMaterialRevision(0,
dependencyMaterial.getPipelineName() + "-label", 1,
dependencyMaterial, new Date());
MaterialRevision dependencyRevisionWithName = ModificationsMother.dependencyMaterialRevision(0,
dependencyMaterialWithName.getPipelineName() + "-label", 1,
dependencyMaterialWithName, new Date());
return new MaterialRevisions(svnRevision, svnExternalRevision, hgRevision, dependencyRevision1,
dependencyRevisionWithName);
}
private EnvironmentVariableContext doWorkWithMaterials(Materials materials) throws IOException {
pipelineConfig.setMaterialConfigs(materials.convertToConfigs());
BuildAssignment buildAssigment = createAssignment(null);
BuildWork work = new BuildWork(buildAssigment, systemEnvironment.consoleLogCharset());
EnvironmentVariableContext environmentVariableContext = new EnvironmentVariableContext();
AgentIdentifier agentIdentifier = new AgentIdentifier("somename", "127.0.0.1", AGENT_UUID);
work.doWork(environmentVariableContext, new AgentWorkContext(agentIdentifier, new FakeBuildRepositoryRemote(),
new GoArtifactsManipulatorStub(), new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie"), packageRepositoryExtension, scmExtension, taskExtension, null, null));
return environmentVariableContext;
}
}
| |
package org.carlspring.strongbox.rest;
import org.carlspring.strongbox.services.ArtifactManagementService;
import org.carlspring.strongbox.storage.resolvers.ArtifactStorageException;
import javax.ws.rs.DELETE;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.File;
import java.io.IOException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
* @author Martin Todorov
*/
@Component
@Path("/trash")
@Api(value = "/trash")
@PreAuthorize("hasAuthority('ROOT')")
public class TrashRestlet
extends BaseArtifactRestlet
{
private static final Logger logger = LoggerFactory.getLogger(TrashRestlet.class);
@Autowired
private ArtifactManagementService artifactManagementService;
@DELETE
@Path("{storageId}/{repositoryId}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Used to delete the trash for a specified repository.", position = 1)
@ApiResponses(value = { @ApiResponse(code = 200, message = "The trash for ${storageId}:${repositoryId}' was removed successfully."),
@ApiResponse(code = 400, message = "An error occurred!"),
@ApiResponse(code = 404, message = "The specified (storageId/repositoryId) does not exist!") })
public Response delete(@ApiParam(value = "The storageId", required = true)
@PathParam("storageId") String storageId,
@ApiParam(value = "The repositoryId", required = true)
@PathParam("repositoryId") String repositoryId)
throws IOException
{
if (getStorage(storageId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified storageId does not exist!")
.build();
}
if (getRepository(storageId, repositoryId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified repositoryId does not exist!")
.build();
}
try
{
artifactManagementService.deleteTrash(storageId, repositoryId);
logger.debug("Deleted trash for repository " + repositoryId + ".");
}
catch (ArtifactStorageException e)
{
logger.error(e.getMessage(), e);
return Response.status(Response.Status.BAD_REQUEST)
.entity(e.getMessage())
.build();
}
return Response.ok()
.entity("The trash for '" + storageId + ":" + repositoryId + "' was removed successfully.")
.build();
}
@DELETE
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Used to delete the trash for all repositories.", position = 2)
@ApiResponses(value = { @ApiResponse(code = 200, message = "The trash for all repositories was successfully removed."),
@ApiResponse(code = 500, message = "An error occurred!") })
public Response delete()
throws IOException
{
try
{
artifactManagementService.deleteTrash();
logger.debug("Deleted trash for all repositories.");
}
catch (ArtifactStorageException e)
{
logger.error(e.getMessage(), e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
return Response.ok().entity("The trash for all repositories was successfully removed.").build();
}
@POST
@Path("{storageId}/{repositoryId}/{path:.*}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Used to undelete the trash for a path under a specified repository.", position = 3)
@ApiResponses(value = { @ApiResponse(code = 200, message = "The trash for '${storageId}:${repositoryId}' was restored successfully."),
@ApiResponse(code = 400, message = "An error occurred!"),
@ApiResponse(code = 404, message = "The specified (storageId/repositoryId/path) does not exist!") })
public Response undelete(@ApiParam(value = "The storageId", required = true)
@PathParam("storageId") String storageId,
@ApiParam(value = "The repositoryId", required = true)
@PathParam("repositoryId") String repositoryId,
@ApiParam(value = "The path to restore", required = true)
@PathParam("path") String path)
throws IOException
{
logger.debug("UNDELETE: " + path);
logger.debug(storageId + ":" + repositoryId + ": " + path);
if (getStorage(storageId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified storageId does not exist!")
.build();
}
if (getRepository(storageId, repositoryId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified repositoryId does not exist!")
.build();
}
if (!new File(getRepository(storageId, repositoryId).getBasedir() + "/.trash", path).exists())
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified path does not exist!")
.build();
}
try
{
artifactManagementService.undelete(storageId, repositoryId, path);
logger.debug("Undeleted trash for path " + path + " under repository " + storageId + ":" + repositoryId + ".");
}
catch (ArtifactStorageException e)
{
return Response.status(Response.Status.BAD_REQUEST)
.entity(e.getMessage())
.build();
}
return Response.ok()
.entity("The trash for '" + storageId + ":" + repositoryId +"' was restored successfully.")
.build();
}
@POST
@Path("{storageId}/{repositoryId}")
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Used to undelete the trash for a specified repository.", position = 4)
@ApiResponses(value = { @ApiResponse(code = 200, message = "The trash for '${storageId}:${repositoryId}' was restored successfully."),
@ApiResponse(code = 400, message = "An error occurred!"),
@ApiResponse(code = 404, message = "The specified (storageId/repositoryId) does not exist!") })
public Response undelete(@ApiParam(value = "The storageId", required = true)
@PathParam("storageId") String storageId,
@ApiParam(value = "The repositoryId", required = true)
@PathParam("repositoryId") String repositoryId)
throws IOException
{
if (getConfiguration().getStorage(storageId).getRepository(repositoryId) != null)
{
try
{
artifactManagementService.undeleteTrash(storageId, repositoryId);
logger.debug("Undeleted trash for repository " + repositoryId + ".");
}
catch (ArtifactStorageException e)
{
if (artifactManagementService.getStorage(storageId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified storageId does not exist!")
.build();
}
else if (artifactManagementService.getStorage(storageId).getRepository(repositoryId) == null)
{
return Response.status(Response.Status.NOT_FOUND)
.entity("The specified repositoryId does not exist!")
.build();
}
return Response.status(Response.Status.BAD_REQUEST)
.entity(e.getMessage())
.build();
}
return Response.ok()
.entity("The trash for '" + storageId + ":" + repositoryId +"' was been restored successfully.")
.build();
}
else
{
return Response.status(Response.Status.NOT_FOUND).entity("Storage or repository could not be found!").build();
}
}
@POST
@Produces(MediaType.TEXT_PLAIN)
@ApiOperation(value = "Used to undelete the trash for all repositories.", position = 5)
@ApiResponses(value = { @ApiResponse(code = 200, message = "The trash for all repositories was successfully restored."),
@ApiResponse(code = 400, message = "An error occurred!") })
public Response undelete()
throws IOException
{
try
{
artifactManagementService.undeleteTrash();
logger.debug("Undeleted trash for all repositories.");
}
catch (ArtifactStorageException e)
{
logger.error(e.getMessage(), e);
return Response.status(Response.Status.BAD_REQUEST)
.entity(e.getMessage())
.build();
}
return Response.ok()
.entity("The trash for all repositories was successfully restored.")
.build();
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.functiongraph.graph.vertex;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.awt.geom.Point2D;
import javax.swing.JButton;
import javax.swing.JComponent;
import edu.uci.ics.jung.graph.Graph;
import ghidra.app.plugin.core.functiongraph.graph.*;
import ghidra.app.plugin.core.functiongraph.mvc.*;
import ghidra.app.util.viewer.listingpanel.ListingModel;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSetView;
import ghidra.program.model.listing.Program;
import ghidra.program.model.symbol.FlowType;
import ghidra.program.util.ProgramLocation;
import ghidra.program.util.ProgramSelection;
import ghidra.util.SystemUtilities;
import ghidra.util.exception.AssertException;
public abstract class AbstractFunctionGraphVertex implements FGVertex {
private FGController controller;
private final Program program;
private final AddressSetView addressSet;
private Point2D location;
private double emphasisLevel;
private double alpha = 1D;
private FGVertexType vertexType;
private FlowType flowType;
private boolean isEntry;
private boolean doHashCode = true;
private int hashCode;
/**
* To be restored when the component for this vertex is created.
*/
protected Color pendingRestoreColor;
private GroupHistoryInfo groupInfo;
AbstractFunctionGraphVertex(FGController controller, Program program, AddressSetView addresses,
FlowType flowType, boolean isEntry) {
if (addresses == null || addresses.isEmpty()) {
throw new IllegalArgumentException("Vertex cannot have null or empty address body");
}
this.controller = controller;
this.program = program;
this.addressSet = addresses;
this.flowType = flowType;
this.isEntry = isEntry;
this.location = new Point2D.Double();
}
/* Copy constructor */
AbstractFunctionGraphVertex(FGController controller, AbstractFunctionGraphVertex vertex) {
this.controller = controller;
this.program = vertex.program;
this.addressSet = vertex.addressSet;
this.location = vertex.location;
this.vertexType = vertex.vertexType;
this.isEntry = vertex.isEntry;
this.flowType = vertex.flowType;
this.groupInfo = vertex.groupInfo;
}
abstract boolean hasLoadedComponent();
abstract AbstractGraphComponentPanel doGetComponent();
@Override
public void writeSettings(FunctionGraphVertexAttributes settings) {
controller.saveVertexColors(this, settings);
}
@Override
public void readSettings(FunctionGraphVertexAttributes settings) {
controller.restoreVertexColors(this, settings);
}
@Override
public void updateGroupAssociationStatus(GroupHistoryInfo newGroupInfo) {
this.groupInfo = newGroupInfo;
doGetComponent().updateGroupAssociationStatus(groupInfo != null);
}
@Override
public GroupHistoryInfo getGroupInfo() {
return groupInfo;
}
@Override
public boolean isUncollapsedGroupMember() {
if (groupInfo == null) {
return false;
}
// we are an uncollapsed group member if we have a group info and we *are* in the graph
// (not being in the graph means that we are inside of a group)
return isInGraph();
}
private boolean isInGraph() {
FGData graphData = controller.getFunctionGraphData();
FunctionGraph functionGraph = graphData.getFunctionGraph();
Graph<FGVertex, FGEdge> graph = functionGraph;
return graph.containsVertex(this);
}
@Override
public JComponent getComponent() {
return doGetComponent();
}
public FGController getController() {
return controller;
}
@Override
public Program getProgram() {
return program;
}
@Override
public Address getVertexAddress() {
return addressSet.getMinAddress();
}
@Override
public AddressSetView getAddresses() {
return addressSet;
}
@Override
public boolean containsProgramLocation(ProgramLocation pl) {
return addressSet.contains(pl.getAddress());
}
@Override
public boolean containsAddress(Address address) {
return addressSet.contains(address);
}
@Override
public void setEmphasis(double emphasisLevel) {
this.emphasisLevel = emphasisLevel;
}
@Override
public double getEmphasis() {
return emphasisLevel;
}
@Override
public void setAlpha(double alpha) {
this.alpha = alpha;
}
@Override
public double getAlpha() {
return alpha;
}
@Override
public void setLocation(Point2D location) {
this.location = location;
}
@Override
public Point2D getLocation() {
return location;
}
@Override
public FGVertexType getVertexType() {
return vertexType;
}
@Override
public void setVertexType(FGVertexType vertexType) {
if (this.vertexType != null) {
throw new AssertException("Cannot set the vertex type more than once. " +
"Previous type was " + vertexType + " on vertex " + this);
}
this.vertexType = vertexType;
}
@Override
public boolean isEntry() {
// note: not sure if we need the second check; this check will catch any case where
// the vertex was manually marked as an entry
return isEntry || (vertexType != null && vertexType.isEntry());
}
@Override
public FlowType getFlowType() {
return flowType;
}
@Override
public int hashCode() {
// code blocks don't overlap, so min address is sufficient for a good hash value
if (doHashCode) {
hashCode = addressSet.getMinAddress().hashCode();
doHashCode = false;
}
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (getClass() != obj.getClass()) {
return false;
}
AbstractFunctionGraphVertex other = (AbstractFunctionGraphVertex) obj;
Address minAddress = addressSet.getMinAddress();
Address otherMinAddress = other.addressSet.getMinAddress();
if (!SystemUtilities.isEqual(minAddress, otherMinAddress)) {
return false;
}
Address maxAddress = addressSet.getMaxAddress();
Address otherMaxAddress = other.addressSet.getMaxAddress();
return SystemUtilities.isEqual(maxAddress, otherMaxAddress);
}
@Override
public void dispose() {
controller = null;
}
//==================================================================================================
// GraphComponentPanel Delegate Methods
//==================================================================================================
@Override
public void restoreColor(Color color) {
if (hasLoadedComponent()) {
doGetComponent().restoreColor(color);
return;
}
pendingRestoreColor = color;
}
@Override
public Color getUserDefinedColor() {
return doGetComponent().getUserDefinedColor();
}
@Override
public Color getDefaultBackgroundColor() {
return doGetComponent().getDefaultBackgroundColor();
}
@Override
public Color getBackgroundColor() {
return doGetComponent().getBackgroundColor();
}
@Override
public Color getSelectionColor() {
return doGetComponent().getSelectionColor();
}
@Override
public void clearColor() {
doGetComponent().clearColor();
}
@Override
public String getTitle() {
return doGetComponent().getTitle();
}
@Override
public String getToolTipText(MouseEvent event) {
return doGetComponent().getToolTipText(event);
}
@Override
public JComponent getToolTipComponentForEdge(FGEdge edge) {
return doGetComponent().getToolTipComponentForEdge(edge);
}
@Override
public JComponent getToolTipComponentForVertex() {
return doGetComponent().getToolTipComponentForVertex();
}
@Override
public boolean isDefaultBackgroundColor() {
return doGetComponent().isDefaultBackgroundColor();
}
@Override
public Rectangle getBounds() {
return doGetComponent().getBounds();
}
@Override
public boolean isFullScreenMode() {
return doGetComponent().isFullScreenMode();
}
@Override
public void setFullScreenMode(boolean fullScreen) {
doGetComponent().setFullScreenMode(fullScreen);
}
@Override
public boolean isSelected() {
return doGetComponent().isSelected();
}
@Override
public void setSelected(boolean selected) {
doGetComponent().setSelected(selected);
}
@Override
public void setHovered(boolean hovered) {
// we don't support this for now
}
@Override
public boolean isHovered() {
// we don't support this for now
return false;
}
@Override
public void editLabel(JComponent parentComponent) {
doGetComponent().editLabel(parentComponent);
}
@Override
public void setFocused(boolean focused) {
AbstractGraphComponentPanel component = doGetComponent();
component.setSelected(focused);
component.setFocused(focused);
}
@Override
public boolean isFocused() {
AbstractGraphComponentPanel component = doGetComponent();
return component.isFocused();
}
@Override
public void setProgramSelection(ProgramSelection selection) {
doGetComponent().setProgramSelection(selection);
}
@Override
public ProgramSelection getProgramSelection() {
return doGetComponent().getProgramSelection();
}
@Override
public String getTextSelection() {
return doGetComponent().getTextSelection();
}
@Override
public void setProgramHighlight(ProgramSelection highlight) {
doGetComponent().setProgramHighlight(highlight);
}
@Override
public void setProgramLocation(ProgramLocation location) {
doGetComponent().setProgramLocation(location);
}
@Override
public ProgramLocation getProgramLocation() {
return doGetComponent().getProgramLocation();
}
@Override
public ListingModel getListingModel(Address address) {
return doGetComponent().getListingModel(address);
}
@Override
public Rectangle getCursorBounds() {
return doGetComponent().getCursorBounds();
}
@Override
public void setBackgroundColor(Color color) {
doGetComponent().setBackgroundColor(color);
}
@Override
public boolean isHeaderClick(Component clickedComponent) {
return doGetComponent().isHeaderClick(clickedComponent);
}
@Override
public boolean isGrabbable(Component c) {
if (!doGetComponent().isHeaderClick(c)) {
return false; // only the header is grabbable
}
// the user cannot grab buttons, as they can press them
return !(c instanceof JButton);
}
@Override
public String toString() {
if (getController() == null || !hasLoadedComponent()) {
// disposed!
return getClass().getSimpleName() + "@" + getVertexAddress().toString();
}
return doGetComponent().getTitle();
}
@Override
public void refreshModel() {
doGetComponent().refreshModel();
}
@Override
public void refreshDisplay() {
doGetComponent().refreshDisplay();
}
@Override
public void refreshDisplayForAddress(Address address) {
doGetComponent().refreshDisplayForAddress(address);
}
@Override
public void setShowing(boolean isShowing) {
doGetComponent().setShowing(isShowing);
}
@Override
public Component getMaximizedViewComponent() {
return doGetComponent().getMaximizedViewComponent();
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.store.access.sort;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Comparator;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.InternalGemFireError;
import com.gemstone.gemfire.cache.util.ObjectSizer;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.control.MemoryThresholdListener;
import com.gemstone.gemfire.internal.shared.unsafe.ChannelBufferUnsafeDataInputStream;
import com.gemstone.gemfire.internal.shared.unsafe.ChannelBufferUnsafeDataOutputStream;
import com.gemstone.gemfire.internal.size.ReflectionSingleObjectSizer;
import com.gemstone.gemfire.internal.util.ArraySortedCollectionWithOverflow;
import com.gemstone.gemfire.internal.util.TIntObjectHashMapWithDups;
import com.gemstone.gnu.trove.TIntArrayList;
import com.gemstone.gnu.trove.TObjectHashingStrategy;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.Misc;
import com.pivotal.gemfirexd.internal.engine.access.GemFireTransaction;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.RegionAndKey;
import com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils;
import com.pivotal.gemfirexd.internal.engine.jdbc.GemFireXDRuntimeException;
import com.pivotal.gemfirexd.internal.engine.store.AbstractCompactExecRow;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
import com.pivotal.gemfirexd.internal.engine.store.GemFireStore;
import com.pivotal.gemfirexd.internal.engine.store.RowFormatter;
import com.pivotal.gemfirexd.internal.engine.store.offheap.OffHeapRow;
import com.pivotal.gemfirexd.internal.engine.store.offheap.OffHeapRowWithLobs;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.services.property.PropertyUtil;
import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow;
import com.pivotal.gemfirexd.internal.iapi.store.access.ColumnOrdering;
import com.pivotal.gemfirexd.internal.iapi.store.access.SortController;
import com.pivotal.gemfirexd.internal.iapi.store.access.SortInfo;
import com.pivotal.gemfirexd.internal.iapi.store.access.SortObserver;
import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController;
import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.ScanControllerRowSource;
import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.ScanManager;
import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.Sort;
import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.TransactionManager;
import com.pivotal.gemfirexd.internal.iapi.types.DataType;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor;
import com.pivotal.gemfirexd.internal.impl.io.DirFile;
import com.pivotal.gemfirexd.internal.shared.common.sanity.SanityManager;
/**
* Optimized implementation of the Derby sorter using
* {@link ArraySortedCollectionWithOverflow}.
*
* @author swale
* @since gfxd 2.0
*/
public final class ArraySorter extends ArraySortedCollectionWithOverflow
implements Sort, SortController {
static final int DEFAULT_MAX_ARRAY_SIZE = 65536;
static final TIntArrayList ZERO_LIST = new TIntArrayList(0);
static final long BASE_CLASS_OVERHEAD = ReflectionSingleObjectSizer
.sizeof(ArraySorter.class);
final ExecRowSerializer serializer;
GemFireTransaction tran;
final SortObserver sortObserver;
private int numRowsInput;
private int numRowsOutput;
private TIntArrayList mergeRunsSize;
public static final class ExecRowSerializer extends DataSerializer {
private ExecRow templateRow;
private GemFireContainer container;
private RowFormatter formatter;
/**
* Type of row which is one of the static values below.
*/
private byte rowType;
private static final byte BYTEARRAY = 1;
private static final byte ARRAY_OF_BYTEARRAY = 2;
private static final byte OFFHEAP_ROW = 3;
private static final byte OFFHEAP_ROW_WITH_LOBS = 4;
private static final byte DVDARRAY = 5;
private static final byte DYNAMIC = 6;
private int rowsWritten;
private int rowsRead;
private static byte getRowType(Object rowData) {
final Class<?> cls = rowData.getClass();
if (cls == byte[].class) {
return BYTEARRAY;
}
else if (cls == byte[][].class) {
return ARRAY_OF_BYTEARRAY;
}
else if (cls == OffHeapRow.class) {
return OFFHEAP_ROW;
}
else if (cls == OffHeapRowWithLobs.class) {
return OFFHEAP_ROW_WITH_LOBS;
}
else if (cls == DataValueDescriptor[].class) {
return DVDARRAY;
}
else {
SanityManager.THROWASSERT("ExecRowSerializer: unknown row format "
+ cls + ": " + rowData);
// never reached
throw new AssertionError("not expected to be reached");
}
}
void initialize(Object rowData) {
boolean useRowTypeOnly = true;
if (this.container != null) {
if (this.container.isOffHeap()) {
// for offheap case we can always have a combination of offheap+heap
// e.g. transactional entries
useRowTypeOnly = false;
}
else {
// check if any schema changed from LOBs to non-LOBs or vice-versa
if (this.container.hasLobs()) {
useRowTypeOnly = this.container.hasLobsInAllSchema();
}
else {
useRowTypeOnly = !this.container.hasLobsInAnySchema();
}
}
}
if (useRowTypeOnly) {
this.rowType = getRowType(rowData);
}
else {
this.rowType = DYNAMIC;
}
if (GemFireXDUtils.TraceTempFileIO) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_TEMP_FILE_IO,
"Writing rows of type = " + getRowType(this.rowType));
}
}
private static String getRowType(final byte rowType) {
switch (rowType) {
case BYTEARRAY:
return "BYTEARRAY";
case ARRAY_OF_BYTEARRAY:
return "ARRAY_OF_BYTEARRAY";
case OFFHEAP_ROW:
return "OFFHEAP_ROW";
case OFFHEAP_ROW_WITH_LOBS:
return "OFFHEAP_ROW_WITH_LOBS";
case DVDARRAY:
return "DVDARRAY";
case DYNAMIC:
return "DYNAMIC";
default:
return "UNKNOWN";
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean toData(Object o, DataOutput out) throws IOException {
return toData((ExecRow)o, out);
}
final boolean toData(final ExecRow row, final DataOutput out)
throws IOException {
final Object rowData = row.getRawRowValue(false);
if (this.rowType == 0) {
this.templateRow = row;
if (row instanceof AbstractCompactExecRow) {
this.formatter = ((AbstractCompactExecRow)row).getRowFormatter();
this.container = this.formatter.container;
if (this.formatter.isTableFormatter()
&& !this.formatter.container.hasSingleSchema) {
this.formatter = null;
}
}
initialize(rowData);
}
byte rowType = this.rowType;
if (rowType == DYNAMIC) {
rowType = getRowType(rowData);
out.writeByte(rowType);
}
switch (rowType) {
case ARRAY_OF_BYTEARRAY:
DataSerializer.writeArrayOfByteArrays((byte[][])rowData, out);
break;
case BYTEARRAY:
DataSerializer.writeByteArray((byte[])rowData, out);
break;
case OFFHEAP_ROW:
final OffHeapRow bs = (OffHeapRow)rowData;
if (bs != null) {
if (GemFireXDUtils.TraceTempFileIO) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_TEMP_FILE_IO,
"Writing OffHeap data of length " + bs.getLength());
}
bs.toData(out);
}
else {
DataSerializer.writeByteArray(null, out);
}
break;
case OFFHEAP_ROW_WITH_LOBS:
final OffHeapRowWithLobs bsLobs = (OffHeapRowWithLobs)rowData;
if (bsLobs != null) {
if (GemFireXDUtils.TraceTempFileIO) {
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_TEMP_FILE_IO,
"Writing OffHeap lobs data of length " + bsLobs.getLength());
}
bsLobs.toData(out);
}
else {
DataSerializer.writeArrayOfByteArrays(null, out);
}
break;
default:
DataValueDescriptor[] dvds = (DataValueDescriptor[])rowData;
if (GemFireXDUtils.TraceTempFileIO) {
StringBuilder sb = new StringBuilder();
sb.append("Writing DataValueDescriptors of length ").append(
dvds.length);
sb.append(" values ");
try {
StringBuilder dvdS = toString(dvds, new StringBuilder());
sb.append(dvdS);
} catch (StandardException se) {
throw new GemFireXDRuntimeException(se);
}
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_TEMP_FILE_IO,
sb.toString());
}
DataType.writeDVDArray(dvds, out);
break;
}
DataSerializer.writeTreeSet(row.getAllRegionAndKeyInfo(), out);
this.rowsWritten++;
return true;
}
/**
* {@inheritDoc}
*/
@Override
public Object fromData(DataInput in) throws IOException,
ClassNotFoundException {
ExecRow row;
final GemFireContainer container;
final RowFormatter formatter;
byte rowType = this.rowType;
if (rowType == DYNAMIC) {
rowType = in.readByte();
}
switch (rowType) {
case BYTEARRAY:
// OFFHEAP: optimize; allocate in offheap but will need to handle
// retain/release separately when CompactExecRow is GCed?
case OFFHEAP_ROW:
byte[] bytes = DataSerializer.readByteArray(in);
container = this.container;
formatter = this.formatter;
row = container.newExecRowFromBytes(bytes,
formatter != null ? formatter : container.getRowFormatter(bytes));
break;
case ARRAY_OF_BYTEARRAY:
// OFFHEAP: optimize; allocate in offheap but will need to handle
// retain/release separately when CompactExecRow is GCed?
case OFFHEAP_ROW_WITH_LOBS:
byte[][] byteArrays = DataSerializer.readArrayOfByteArrays(in);
container = this.container;
formatter = this.formatter;
row = container.newExecRowFromByteArrays(byteArrays,
formatter != null ? formatter : container.getRowFormatter(
byteArrays));
break;
default:
DataValueDescriptor[] dvds = DataType.readDVDArray(in);
if (GemFireXDUtils.TraceTempFileIO) {
StringBuilder sb = new StringBuilder();
sb.append("Read DataValueDescriptors of length ").append(
dvds.length);
sb.append(" values ");
try {
StringBuilder dvdS = toString(dvds, new StringBuilder());
sb.append(dvdS);
} catch (StandardException se) {
throw new GemFireXDRuntimeException(se);
}
SanityManager.DEBUG_PRINT(GfxdConstants.TRACE_TEMP_FILE_IO,
sb.toString());
}
row = this.templateRow.getNewNullRow();
row.setRowArray(dvds);
break;
}
row.setAllRegionAndKeyInfo(DataSerializer.<RegionAndKey> readTreeSet(in));
this.rowsRead++;
return row;
}
public int getRowsWritten() {
return this.rowsWritten;
}
public int getRowsRead() {
return this.rowsRead;
}
public ExecRow getTemplateRow() {
return this.templateRow;
}
private StringBuilder toString(DataValueDescriptor[] dvd, StringBuilder sb)
throws StandardException {
for (int i = 0; i < dvd.length; i++) {
sb.append(dvd[i].getString()).append("(").append(dvd[i].getTypeName())
.append("),");
}
return sb;
}
@Override
public int getId() {
throw new InternalGemFireError(
"ExecRowSerializer.getId: not expected to be invoked");
}
@Override
public Class<?>[] getSupportedClasses() {
throw new InternalGemFireError(
"ExecRowSerializer.getSupportedClasses: not expected to be invoked");
}
}
private static final ObjectSizer rowSizer = new ObjectSizer() {
@Override
public int sizeof(Object o) {
long size;
try {
size = ((ExecRow)o).estimateRowSize();
return size < Integer.MAX_VALUE ? (int)size : Integer.MAX_VALUE;
} catch (StandardException se) {
throw new GemFireXDRuntimeException(se);
}
}
};
public static final class RowCompare implements Comparator<Object>,
TObjectHashingStrategy {
private static final long serialVersionUID = -2971361542262894575L;
/**
* Column ordering flags contains:
*
* 2nd bit: 1 if ascending and 0 if descending
*
* 3rd bit: 1 if nullsLow==true and 0 otherwise
*
* Higher order integer contains the column order.
*
* Any change/addition in above scheme will require corresponding change in
* compare impl which assumes that there is nothing other than these two
* flags.
*/
private final long[] columnOrderingFlags;
private int numComparisons;
RowCompare(ColumnOrdering[] columnOrdering) {
this.columnOrderingFlags = new long[columnOrdering.length];
for (int i = 0; i < columnOrdering.length; i++) {
final ColumnOrdering order = columnOrdering[i];
long flags = (order.getIsAscending() ? 0x2 : 0x0);
if (order.getIsNullsOrderedLow()) {
flags |= 0x4;
}
flags |= (((long)order.getColumnId() + 1L) << Integer.SIZE);
this.columnOrderingFlags[i] = flags;
}
}
/**
* {@inheritDoc}
*/
@Override
public final int compare(final Object o1, final Object o2) {
final ExecRow r1 = (ExecRow)o1;
final ExecRow r2 = (ExecRow)o2;
this.numComparisons++;
// Get the number of columns we have to compare.
final long[] columnOrderingFlags = this.columnOrderingFlags;
final int colsToCompare = columnOrderingFlags.length;
int r;
// Compare the columns specified in the column
// ordering array.
try {
for (int i = 0; i < colsToCompare; i++) {
// Get columns to compare.
final long flags = columnOrderingFlags[i];
final int iflags = (int)(flags & 0xffffffff);
final int column = (int)((flags >>> Integer.SIZE) & 0xffffffff);
// If the columns don't compare equal, we're done.
// Return the sense of the comparison.
// currently only nullsLow is in the flags
if ((r = r1.compare(r2, column, iflags > 0x2)) != 0) {
// flags & 0x2 will give 2 or 0, so (-1) gives 1 or -1
// for ascending and descending respectively
return r * ((iflags & 0x2) - 1);
}
}
// We made it through all the columns, and they must have
// all compared equal. So return that the rows compare equal.
return 0;
} catch (StandardException se) {
throw new GemFireXDRuntimeException(se);
}
}
public final int getNumComparisons() {
return this.numComparisons;
}
/**
* {@inheritDoc}
*/
@Override
public final int computeHashCode(final Object o) {
final ExecRow r = (ExecRow)o;
// Get the number of columns we have to use.
final long[] columnOrderingFlags = this.columnOrderingFlags;
final int colsToCompare = columnOrderingFlags.length;
int hash = 0;
for (int i = 0; i < colsToCompare; i++) {
// Get columns to compare.
final int column = (int)((columnOrderingFlags[i] >>> Integer.SIZE)
& 0xffffffff);
hash = r.computeHashCode(column, hash);
}
return hash;
}
/**
* {@inheritDoc}
*/
@Override
public final boolean equals(final Object o1, final Object o2) {
final ExecRow r1 = (ExecRow)o1;
final ExecRow r2 = (ExecRow)o2;
this.numComparisons++;
// Get the number of columns we have to compare.
final long[] columnOrderingFlags = this.columnOrderingFlags;
final int colsToCompare = columnOrderingFlags.length;
// Compare the columns specified in the column
// ordering array.
try {
for (int i = 0; i < colsToCompare; i++) {
// Get columns to compare.
final int column = (int)((columnOrderingFlags[i] >>> Integer.SIZE)
& 0xffffffff);
// If the columns don't compare equal, we're done.
if (r1.compare(r2, column, false) != 0) {
return false;
}
}
// We made it through all the columns, and they must have
// all compared equal. So return that the rows compare equal.
return true;
} catch (StandardException se) {
throw new GemFireXDRuntimeException(se);
}
}
}
// TODO: PERF: also carry through projection from higher layers even if it
// has not been pushed down to minimize data written to disk
// also avoid overflowing CompactExecRows whose value points to something
// which is still in region's in-memory data, or overflow just the oplogId
// and disk offset
protected ArraySorter(GemFireTransaction tran, RowCompare comparator,
SortObserver sortObserver, final int maxUnsortedArraySize,
final long maxSortLimit, MemoryThresholdListener mtl, String overflowDir,
GemFireCacheImpl cache) {
super(comparator, sortObserver, comparator, rowSizer, maxUnsortedArraySize,
maxSortLimit, mtl, overflowDir, cache);
this.serializer = new ExecRowSerializer();
this.tran = tran;
this.sortObserver = sortObserver;
this.mergeRunsSize = ZERO_LIST;
}
public static ArraySorter create(GemFireTransaction tran,
ColumnOrdering[] columnOrdering, SortObserver sortObserver,
boolean alreadyInOrder, long estimatedRows, int estimatedRowSize,
long maxSortLimit) {
final GemFireStore store = Misc.getMemStore();
DirFile tmpDir = store.getDatabase().getTempDir();
String overflowDir;
if (tmpDir != null) {
overflowDir = tmpDir.getAbsolutePath();
}
else {
overflowDir = PropertyUtil.getSystemProperty("java.io.tmpdir");
if (overflowDir == null) {
overflowDir = ".";
}
}
// expand the estimatedRows by factor of 2 and then check if it is smaller
// than DEFAULT_MAX_ARRAY_SIZE; if true then use that as unsorted array size
if (estimatedRows <= 1 || estimatedRows > (DEFAULT_MAX_ARRAY_SIZE >> 1)) {
estimatedRows = (DEFAULT_MAX_ARRAY_SIZE >> 1);
}
return new ArraySorter(tran, new RowCompare(columnOrdering), sortObserver,
(int)(estimatedRows << 1), maxSortLimit, store.thresholdListener(),
overflowDir, store.getGemFireCache());
}
public final ExecRowSerializer getDataSerializer() {
return this.serializer;
}
@Override
protected OverflowData overflowElementArray(final Object[] elements) {
OverflowData overflowData = super.overflowElementArray(elements);
if (this.mergeRunsSize == ZERO_LIST) {
this.mergeRunsSize = new TIntArrayList();
}
this.mergeRunsSize.add(overflowData.size());
return overflowData;
}
@Override
protected int writeElements(final Object[] elements,
final ChannelBufferUnsafeDataOutputStream dos) throws IOException {
int numElements = 0;
final ExecRowSerializer rowSerializer = this.serializer;
for (Object element : elements) {
// last array may have empty elements at the end
if (element != null) {
final ExecRow row = (ExecRow)element;
rowSerializer.toData(row, dos);
numElements++;
continue;
}
else {
break;
}
}
return numElements;
}
@Override
protected Object readElement(final ChannelBufferUnsafeDataInputStream in)
throws ClassNotFoundException, IOException {
return this.serializer.fromData(in);
}
/**
* {@inheritDoc}
*/
@Override
public boolean insert(ExecRow row) throws StandardException {
this.numRowsInput++;
final SortObserver sortObserver = this.sortObserver;
if (sortObserver != null
&& (row = sortObserver.insertNonDuplicateKey(row)) == null) {
return false;
}
try {
if (super.add(row)) {
this.numRowsOutput++;
return true;
}
else {
return false;
}
} catch (GemFireXDRuntimeException re) {
if (re.getCause() instanceof StandardException) {
throw (StandardException)re.getCause();
}
else {
throw re;
}
}
}
@Override
public long estimateMemoryUsage(ExecRow sortResultRow)
throws StandardException {
long numMemElements = 0;
// start with basic object overhead
long overheads = BASE_CLASS_OVERHEAD;
// add approx overheads for overflowed and other memory data
final Object[] elements = this.elements;
// for each object in elements
overheads += (elements.length * (ReflectionSingleObjectSizer.OBJECT_SIZE
+ ReflectionSingleObjectSizer.REFERENCE_SIZE));
if (this.overflowOutputChannel != null) {
// add some overhead for the file channels (not sure how much exactly and
// sizer may not give the right picture due to native handles involved)
overheads += 64;
}
final int currentArrayIndex = this.currentArrayIndex;
for (int i = 0; i < currentArrayIndex; i++) {
final Object elementArray = elements[i];
if (elementArray instanceof Object[]) {
final Object[] elems = (Object[])elementArray;
numMemElements += elems.length;
// for Object[].length and references
overheads += (4 + elems.length
* ReflectionSingleObjectSizer.REFERENCE_SIZE);
}
else {
overheads += (((OverflowData)elementArray).bufferSize()
+ OverflowData.BASE_CLASS_OVERHEAD);
}
}
final Object[] currentArray = this.currentArray;
if (currentArray != null) {
numMemElements += this.currentArrayPos;
// for current Object[].length and references
overheads += (4 + currentArray.length
* ReflectionSingleObjectSizer.REFERENCE_SIZE);
}
// lastly the overhead of dups map
final TIntObjectHashMapWithDups dups = this.hashCodeToObjectMapWithDups;
if (dups != null) {
// three arrays here: byte[], int[], object[]
overheads += dups.capacity() *
(1 + 4 + ReflectionSingleObjectSizer.REFERENCE_SIZE);
overheads += (3 * ReflectionSingleObjectSizer.OBJECT_SIZE);
}
long estimatedRowSize = this.estimatedObjectSize;
if (estimatedRowSize <= 0) {
estimatedRowSize = (sortResultRow != null ? sortResultRow
.estimateRowSize() : 1L);
}
return overheads + (numMemElements * estimatedRowSize);
}
/**
* {@inheritDoc}
*/
@Override
public void completedInserts() {
// remove from transaction
final GemFireTransaction tran = this.tran;
if (tran != null) {
tran.closeMe(this);
this.tran = null;
}
}
@Override
public void clear() {
super.clear();
this.tran = null;
this.numRowsInput = 0;
this.numRowsOutput = 0;
this.mergeRunsSize = ZERO_LIST;
}
/**
* {@inheritDoc}
*/
@Override
public SortInfo getSortInfo() throws StandardException {
return new MergeSortInfo("external", this.numRowsInput, this.numRowsOutput,
numOverflowedElements(), this.mergeRunsSize,
((RowCompare)this.comparator).getNumComparisons());
}
/**
* {@inheritDoc}
*/
@Override
public SortController open(TransactionManager tran) throws StandardException {
return this;
}
/**
* {@inheritDoc}
*/
@Override
public ScanManager openSortScan(TransactionManager tran, boolean hold)
throws StandardException {
return new ArraySortScan(this, tran, hold);
}
/**
* {@inheritDoc}
*/
@Override
public ScanControllerRowSource openSortRowSource(TransactionManager tran)
throws StandardException {
return new ArraySortScan(this, tran, false);
}
/**
* {@inheritDoc}
*/
@Override
public void drop(TransactionController tran) throws StandardException {
super.close();
}
}
| |
package com.sequenceiq.redbeams.converter.stack;
import static com.sequenceiq.cloudbreak.util.SecurityGroupSeparator.getSecurityGroupIds;
import static com.sequenceiq.redbeams.api.endpoint.v4.databaseserver.requests.AllocateDatabaseServerV4Request.RDS_NAME_MAX_LENGTH;
import java.time.Instant;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import com.google.common.base.Strings;
import com.sequenceiq.cloudbreak.api.endpoint.v4.common.DatabaseVendor;
import com.sequenceiq.cloudbreak.auth.CrnUser;
import com.sequenceiq.cloudbreak.auth.altus.EntitlementService;
import com.sequenceiq.cloudbreak.auth.crn.Crn;
import com.sequenceiq.cloudbreak.auth.security.CrnUserDetailsService;
import com.sequenceiq.cloudbreak.cloud.model.CloudSubnet;
import com.sequenceiq.cloudbreak.cloud.model.StackTags;
import com.sequenceiq.cloudbreak.common.exception.BadRequestException;
import com.sequenceiq.cloudbreak.common.json.Json;
import com.sequenceiq.cloudbreak.common.mappable.CloudPlatform;
import com.sequenceiq.cloudbreak.common.mappable.ProviderParameterCalculator;
import com.sequenceiq.cloudbreak.common.service.Clock;
import com.sequenceiq.cloudbreak.tag.CostTagging;
import com.sequenceiq.cloudbreak.tag.request.CDPTagGenerationRequest;
import com.sequenceiq.environment.api.v1.environment.model.response.DetailedEnvironmentResponse;
import com.sequenceiq.environment.api.v1.environment.model.response.SecurityAccessResponse;
import com.sequenceiq.redbeams.api.endpoint.v4.databaseserver.requests.AllocateDatabaseServerV4Request;
import com.sequenceiq.redbeams.api.endpoint.v4.databaseserver.requests.SslMode;
import com.sequenceiq.redbeams.api.endpoint.v4.databaseserver.responses.SslCertificateType;
import com.sequenceiq.redbeams.api.endpoint.v4.stacks.DatabaseServerV4StackRequest;
import com.sequenceiq.redbeams.api.endpoint.v4.stacks.NetworkV4StackRequest;
import com.sequenceiq.redbeams.api.endpoint.v4.stacks.SecurityGroupV4StackRequest;
import com.sequenceiq.redbeams.api.model.common.DetailedDBStackStatus;
import com.sequenceiq.redbeams.configuration.DatabaseServerSslCertificateConfig;
import com.sequenceiq.redbeams.configuration.SslCertificateEntry;
import com.sequenceiq.redbeams.domain.stack.DBStack;
import com.sequenceiq.redbeams.domain.stack.DBStackStatus;
import com.sequenceiq.redbeams.domain.stack.DatabaseServer;
import com.sequenceiq.redbeams.domain.stack.Network;
import com.sequenceiq.redbeams.domain.stack.SecurityGroup;
import com.sequenceiq.redbeams.domain.stack.SslConfig;
import com.sequenceiq.redbeams.exception.RedbeamsException;
import com.sequenceiq.redbeams.service.AccountTagService;
import com.sequenceiq.redbeams.service.EnvironmentService;
import com.sequenceiq.redbeams.service.PasswordGeneratorService;
import com.sequenceiq.redbeams.service.UserGeneratorService;
import com.sequenceiq.redbeams.service.UuidGeneratorService;
import com.sequenceiq.redbeams.service.crn.CrnService;
import com.sequenceiq.redbeams.service.network.NetworkParameterAdder;
import com.sequenceiq.redbeams.service.network.SubnetChooserService;
import com.sequenceiq.redbeams.service.network.SubnetListerService;
@Component
public class AllocateDatabaseServerV4RequestToDBStackConverter {
private static final Logger LOGGER = LoggerFactory.getLogger(AllocateDatabaseServerV4RequestToDBStackConverter.class);
private static final String DBSTACK_NAME_PREFIX = "dbstck";
@Value("${cb.enabledplatforms:}")
private Set<String> dbServiceSupportedPlatforms;
@Value("${redbeams.ssl.enabled:}")
private boolean sslEnabled;
@Value("${redbeams.db.postgres.major.version:}")
private String redbeamsDbMajorVersion;
@Inject
private EnvironmentService environmentService;
@Inject
private ProviderParameterCalculator providerParameterCalculator;
@Inject
private Clock clock;
@Inject
private SubnetListerService subnetListerService;
@Inject
private SubnetChooserService subnetChooserService;
@Inject
private UserGeneratorService userGeneratorService;
@Inject
private PasswordGeneratorService passwordGeneratorService;
@Inject
private UuidGeneratorService uuidGeneratorService;
@Inject
private NetworkParameterAdder networkParameterAdder;
@Inject
private CrnUserDetailsService crnUserDetailsService;
@Inject
private EntitlementService entitlementService;
@Inject
private CrnService crnService;
@Inject
private CostTagging costTagging;
@Inject
private AccountTagService accountTagService;
@Inject
private DatabaseServerSslCertificateConfig databaseServerSslCertificateConfig;
@PostConstruct
public void initSupportedPlatforms() {
if (dbServiceSupportedPlatforms.isEmpty()) {
dbServiceSupportedPlatforms = Set.of(CloudPlatform.AWS.toString(), CloudPlatform.AZURE.toString(), CloudPlatform.MOCK.toString());
}
}
public DBStack convert(AllocateDatabaseServerV4Request source, String ownerCrnString) {
Crn ownerCrn = Crn.safeFromString(ownerCrnString);
CrnUser user = crnUserDetailsService.loadUserByUsername(ownerCrnString);
DetailedEnvironmentResponse environment = environmentService.getByCrn(source.getEnvironmentCrn());
DBStack dbStack = new DBStack();
dbStack.setOwnerCrn(ownerCrn);
dbStack.setUserName(user.getEmail());
CloudPlatform cloudPlatform = updateCloudPlatformAndRelatedFields(source, dbStack, environment.getCloudPlatform());
dbStack.setName(source.getName() != null ? source.getName() : generateDatabaseServerStackName(environment.getName()));
dbStack.setEnvironmentId(source.getEnvironmentCrn());
setRegion(dbStack, environment);
if (source.getDatabaseServer() != null) {
dbStack.setDatabaseServer(buildDatabaseServer(source.getDatabaseServer(), cloudPlatform, ownerCrn,
environment.getSecurityAccess()));
}
Map<String, Object> asMap = providerParameterCalculator.get(source).asMap();
if (asMap != null) {
Map<String, String> parameter = new HashMap<>();
asMap.forEach((key, value) -> parameter.put(key, value.toString()));
dbStack.setParameters(parameter);
}
dbStack.setNetwork(buildNetwork(source.getNetwork(), environment, cloudPlatform, dbStack));
Instant now = clock.getCurrentInstant();
dbStack.setDBStackStatus(new DBStackStatus(dbStack, DetailedDBStackStatus.PROVISION_REQUESTED, now.toEpochMilli()));
dbStack.setResourceCrn(crnService.createCrn(dbStack).toString());
dbStack.setTags(getTags(dbStack, source, environment));
dbStack.setSslConfig(getSslConfig(source, dbStack));
return dbStack;
}
// FIXME Potentially extract this whole logic into a service as it might be needed later for cert rotation
private SslConfig getSslConfig(AllocateDatabaseServerV4Request source, DBStack dbStack) {
SslConfig sslConfig = new SslConfig();
if (sslEnabled && source.getSslConfig() != null && SslMode.isEnabled(source.getSslConfig().getSslMode())) {
String cloudPlatform = dbStack.getCloudPlatform();
String region = dbStack.getRegion();
// TODO Determine the highest available SSL cert version for GCP; update sslCertificateActiveVersion during provisioning
int maxVersion = databaseServerSslCertificateConfig.getMaxVersionByCloudPlatformAndRegion(cloudPlatform, region);
sslConfig.setSslCertificateActiveVersion(maxVersion);
// TODO Add SslConfig.sslCertificateMaxVersion and keep it up-to-date (mostly for GCP)
Set<String> certs;
String cloudProviderIdentifier;
int numberOfCerts = databaseServerSslCertificateConfig.getNumberOfCertsByCloudPlatformAndRegion(cloudPlatform, region);
if (numberOfCerts == 0) {
// TODO Initialize SSL cert & CloudProviderIdentifier for GCP
// This is possible for cloud platforms where SSL is supported, but the certs are not pre-registered in CB; see e.g. GCP
certs = Collections.emptySet();
cloudProviderIdentifier = null;
} else if (numberOfCerts == 1 || !CloudPlatform.AZURE.equals(source.getCloudPlatform())) {
SslCertificateEntry cert = databaseServerSslCertificateConfig.getCertByCloudPlatformAndRegionAndVersion(cloudPlatform, region, maxVersion);
validateCert(cloudPlatform, maxVersion, cert);
certs = Collections.singleton(cert.getCertPem());
cloudProviderIdentifier = cert.getCloudProviderIdentifier();
} else {
// In Azure and for > 1 certs, include both the most recent cert and the preceding one
Set<SslCertificateEntry> certsTemp =
databaseServerSslCertificateConfig.getCertsByCloudPlatformAndRegionAndVersions(cloudPlatform, region, maxVersion - 1, maxVersion)
.stream()
.filter(Objects::nonNull)
.collect(Collectors.toSet());
validateNonNullCertsCount(cloudPlatform, maxVersion, certsTemp);
findAndValidateCertByVersion(cloudPlatform, maxVersion - 1, certsTemp);
cloudProviderIdentifier = findAndValidateCertByVersion(cloudPlatform, maxVersion, certsTemp).getCloudProviderIdentifier();
certs = certsTemp
.stream()
.map(SslCertificateEntry::getCertPem)
.collect(Collectors.toSet());
validateUniqueCertsCount(cloudPlatform, maxVersion, certs);
}
sslConfig.setSslCertificates(certs);
sslConfig.setSslCertificateActiveCloudProviderIdentifier(cloudProviderIdentifier);
sslConfig.setSslCertificateType(SslCertificateType.CLOUD_PROVIDER_OWNED);
}
return sslConfig;
}
private void validateCert(String cloudPlatform, int versionExpected, SslCertificateEntry cert) {
if (cert == null) {
throw new IllegalStateException(
String.format("Could not find SSL certificate version %d for cloud platform \"%s\"", versionExpected, cloudPlatform));
}
int version = cert.getVersion();
if (version != versionExpected) {
throw new IllegalStateException(String.format("SSL certificate version mismatch for cloud platform \"%s\": expected=%d, actual=%d", cloudPlatform,
versionExpected, version));
}
if (Strings.isNullOrEmpty(cert.getCloudProviderIdentifier())) {
throw new IllegalStateException(
String.format("Blank CloudProviderIdentifier in SSL certificate version %d for cloud platform \"%s\"", versionExpected, cloudPlatform));
}
if (Strings.isNullOrEmpty(cert.getCertPem())) {
throw new IllegalStateException(String.format("Blank PEM in SSL certificate version %d for cloud platform \"%s\"", versionExpected, cloudPlatform));
}
}
private void validateNonNullCertsCount(String cloudPlatform, int maxVersion, Set<SslCertificateEntry> certs) {
if (certs.size() != 2) {
throw new IllegalStateException(
String.format("Could not find SSL certificate(s) when requesting versions [%d, %d] for cloud platform \"%s\": " +
"expected 2 certificates, got %d", maxVersion - 1, maxVersion, cloudPlatform, certs.size()));
}
}
private SslCertificateEntry findAndValidateCertByVersion(String cloudPlatform, int version, Set<SslCertificateEntry> certs) {
SslCertificateEntry result = certs.stream()
.filter(c -> c.getVersion() == version)
.findFirst()
.orElse(null);
validateCert(cloudPlatform, version, result);
return result;
}
private void validateUniqueCertsCount(String cloudPlatform, int maxVersion, Set<String> certs) {
if (certs.size() != 2) {
throw new IllegalStateException(
String.format("Received duplicated SSL certificate PEM when requesting versions [%d, %d] for cloud platform \"%s\"",
maxVersion - 1, maxVersion, cloudPlatform));
}
}
private Json getTags(DBStack dbStack, AllocateDatabaseServerV4Request dbRequest, DetailedEnvironmentResponse environment) {
boolean internalTenant = entitlementService.internalTenant(dbStack.getAccountId());
Map<String, String> resultTags = getTags(dbRequest.getTags());
CDPTagGenerationRequest request = CDPTagGenerationRequest.Builder
.builder()
.withCreatorCrn(dbStack.getOwnerCrn().toString())
.withEnvironmentCrn(dbStack.getEnvironmentId())
.withPlatform(dbStack.getCloudPlatform())
.withAccountId(dbStack.getAccountId())
.withResourceCrn(dbStack.getResourceCrn())
.withIsInternalTenant(internalTenant)
.withUserName(dbStack.getUserName())
.withAccountTags(accountTagService.list())
.withUserDefinedTags(resultTags)
.build();
Map<String, String> defaultTags = costTagging.prepareDefaultTags(request);
Map<String, String> environmentUserTags = Objects.requireNonNullElse(environment.getTags().getUserDefined(), new HashMap<>());
resultTags.putAll(defaultTags);
environmentUserTags.forEach(resultTags::putIfAbsent);
return new Json(new StackTags(resultTags, new HashMap<>(), defaultTags));
}
private Map<String, String> getTags(Map<String, String> tags) {
return tags != null ? new HashMap<>(tags) : new HashMap<>();
}
private void setRegion(DBStack dbStack, DetailedEnvironmentResponse environment) {
if (environment.getLocation() == null) {
throw new RedbeamsException("Environment does not contain region");
}
dbStack.setRegion(environment.getLocation().getName());
}
private CloudPlatform updateCloudPlatformAndRelatedFields(AllocateDatabaseServerV4Request request, DBStack dbStack, String cloudPlatformEnvironment) {
String cloudPlatformRequest;
if (request.getCloudPlatform() != null) {
cloudPlatformRequest = request.getCloudPlatform().name();
checkCloudPlatformsMatch(cloudPlatformEnvironment, cloudPlatformRequest);
} else {
cloudPlatformRequest = cloudPlatformEnvironment;
}
LOGGER.debug("Cloud platform is {}", cloudPlatformRequest);
CloudPlatform cloudPlatform = CloudPlatform.valueOf(cloudPlatformRequest);
checkCloudPlatformIsSupported(cloudPlatform);
request.setCloudPlatform(cloudPlatform);
if (request.getNetwork() != null) {
request.getNetwork().setCloudPlatform(cloudPlatform);
}
if (request.getDatabaseServer() != null) {
request.getDatabaseServer().setCloudPlatform(cloudPlatform);
}
dbStack.setCloudPlatform(cloudPlatformRequest);
dbStack.setPlatformVariant(cloudPlatformRequest);
return cloudPlatform;
}
private void checkCloudPlatformIsSupported(CloudPlatform cloudPlatform) {
if (!dbServiceSupportedPlatforms.contains(cloudPlatform.toString())) {
throw new BadRequestException(String.format("Cloud platform %s not supported yet.", cloudPlatform));
}
}
private void checkCloudPlatformsMatch(String cloudPlatformEnvironment, String cloudPlatformRequest) {
if (!cloudPlatformEnvironment.equals(cloudPlatformRequest)) {
throw new BadRequestException(String.format(
"Cloud platform of the request %s and the environment %s do not match.", cloudPlatformRequest, cloudPlatformEnvironment));
}
}
private Map<String, Object> getSubnetsFromEnvironment(DetailedEnvironmentResponse environmentResponse, CloudPlatform cloudPlatform,
DBStack dbStack) {
List<CloudSubnet> subnets = subnetListerService.listSubnets(environmentResponse, cloudPlatform);
List<CloudSubnet> chosenSubnet = subnetChooserService.chooseSubnets(subnets, cloudPlatform, dbStack);
List<String> chosenSubnetIds = chosenSubnet
.stream()
.map(CloudSubnet::getId)
.collect(Collectors.toList());
List<String> chosenAzs = chosenSubnet
.stream()
.map(CloudSubnet::getAvailabilityZone)
.collect(Collectors.toList());
return networkParameterAdder.addSubnetIds(new HashMap<>(), chosenSubnetIds, chosenAzs, cloudPlatform);
}
private Network buildNetwork(NetworkV4StackRequest source, DetailedEnvironmentResponse environmentResponse, CloudPlatform cloudPlatform,
DBStack dbStack) {
Network network = new Network();
network.setName(generateNetworkName());
Map<String, Object> parameters = source != null
? providerParameterCalculator.get(source).asMap()
: getSubnetsFromEnvironment(environmentResponse, cloudPlatform, dbStack);
networkParameterAdder.addParameters(parameters, environmentResponse, cloudPlatform, dbStack);
if (parameters != null) {
try {
network.setAttributes(new Json(parameters));
} catch (IllegalArgumentException e) {
throw new BadRequestException("Invalid network parameters", e);
}
}
return network;
}
private DatabaseServer buildDatabaseServer(DatabaseServerV4StackRequest source, CloudPlatform cloudPlatform, Crn ownerCrn,
SecurityAccessResponse securityAccessResponse) {
DatabaseServer server = new DatabaseServer();
server.setAccountId(ownerCrn.getAccountId());
server.setName(generateDatabaseServerName());
server.setInstanceType(source.getInstanceType());
DatabaseVendor databaseVendor = DatabaseVendor.fromValue(source.getDatabaseVendor());
server.setDatabaseVendor(databaseVendor);
server.setConnectionDriver(source.getConnectionDriver());
server.setStorageSize(source.getStorageSize());
server.setRootUserName(source.getRootUserName() != null ? source.getRootUserName() : userGeneratorService.generateUserName());
server.setRootPassword(source.getRootUserPassword() != null ?
source.getRootUserPassword() : passwordGeneratorService.generatePassword(Optional.of(cloudPlatform)));
server.setPort(source.getPort());
server.setSecurityGroup(buildExistingSecurityGroup(source.getSecurityGroup(), securityAccessResponse));
Map<String, Object> parameters = providerParameterCalculator.get(source).asMap();
if (parameters != null) {
try {
setDbVersion(parameters, cloudPlatform);
server.setAttributes(new Json(parameters));
} catch (IllegalArgumentException e) {
throw new BadRequestException("Invalid database server parameters", e);
}
}
return server;
}
private void setDbVersion(Map<String, Object> parameters, CloudPlatform cloudPlatform) {
String dbVersionKey = getDbVersionKey(cloudPlatform);
parameters.computeIfAbsent(dbVersionKey, k -> redbeamsDbMajorVersion);
}
private String getDbVersionKey(CloudPlatform cloudPlatform) {
switch (cloudPlatform) {
case AZURE:
return "dbVersion";
default:
return "engineVersion";
}
}
/**
* Redbeams saves security group id if it is provided in the request or if the environment provides a default security group.
* If none of them are filled in, then a custom security group is created later in spi.
*
* @param source - the request
* @param securityAccessResponse - environment data
* @return returns the saved security groups. If none is specified, then an empty security group is returned.
*/
private SecurityGroup buildExistingSecurityGroup(SecurityGroupV4StackRequest source, SecurityAccessResponse securityAccessResponse) {
SecurityGroup securityGroup = new SecurityGroup();
if (source != null) {
securityGroup.setSecurityGroupIds(source.getSecurityGroupIds());
} else if (securityAccessResponse.getDefaultSecurityGroupId() != null) {
securityGroup.setSecurityGroupIds(getSecurityGroupIds(securityAccessResponse.getDefaultSecurityGroupId()));
}
return securityGroup;
}
// Sorry, MissingResourceNameGenerator seems like overkill. Unlike other
// converters, this converter generates names internally in the same format.
private String generateNetworkName() {
return String.format("n-%s", uuidGeneratorService.randomUuid());
}
private String generateDatabaseServerName() {
return String.format("dbsvr-%s", uuidGeneratorService.randomUuid());
}
private String generateDatabaseServerStackName(String environmentName) {
String environmentNameWithDbStack = String.format("%s-%s", environmentName, DBSTACK_NAME_PREFIX);
String uuid = uuidGeneratorService.uuidVariableParts(RDS_NAME_MAX_LENGTH - environmentNameWithDbStack.length() - 1);
return String.format("%s-%s", environmentNameWithDbStack, uuid);
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.admin.authentication;
import org.junit.Test;
import org.keycloak.authentication.authenticators.broker.IdpCreateUserIfUniqueAuthenticatorFactory;
import org.keycloak.representations.idm.AuthenticatorConfigInfoRepresentation;
import org.keycloak.representations.idm.ConfigPropertyRepresentation;
import org.keycloak.testsuite.Assert;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.NotFoundException;
/**
* @author <a href="mailto:mstrukel@redhat.com">Marko Strukelj</a>
*/
public class ProvidersTest extends AbstractAuthenticationTest {
@Test
public void testFormProviders() {
List<Map<String, Object>> result = authMgmtResource.getFormProviders();
Assert.assertNotNull("null result", result);
Assert.assertEquals("size", 1, result.size());
Map<String, Object> item = result.get(0);
Assert.assertEquals("id", "registration-page-form", item.get("id"));
Assert.assertEquals("displayName", "Registration Page", item.get("displayName"));
Assert.assertEquals("description", "This is the controller for the registration page", item.get("description"));
}
@Test
public void testFormActionProviders() {
List<Map<String, Object>> result = authMgmtResource.getFormActionProviders();
List<Map<String, Object>> expected = new LinkedList<>();
addProviderInfo(expected, "registration-profile-action", "Profile Validation",
"Validates email, first name, and last name attributes and stores them in user data.");
addProviderInfo(expected, "registration-recaptcha-action", "Recaptcha",
"Adds Google Recaptcha button. Recaptchas verify that the entity that is registering is a human. " +
"This can only be used on the internet and must be configured after you add it.");
addProviderInfo(expected, "registration-password-action", "Password Validation",
"Validates that password matches password confirmation field. It also will store password in user's credential store.");
addProviderInfo(expected, "registration-user-creation", "Registration User Creation",
"This action must always be first! Validates the username of the user in validation phase. " +
"In success phase, this will create the user in the database.");
compareProviders(expected, result);
}
@Test
public void testClientAuthenticatorProviders() {
List<Map<String, Object>> result = authMgmtResource.getClientAuthenticatorProviders();
List<Map<String, Object>> expected = new LinkedList<>();
addProviderInfo(expected, "client-jwt", "Signed Jwt",
"Validates client based on signed JWT issued by client and signed with the Client private key");
addProviderInfo(expected, "client-secret", "Client Id and Secret", "Validates client based on 'client_id' and " +
"'client_secret' sent either in request parameters or in 'Authorization: Basic' header");
addProviderInfo(expected, "testsuite-client-passthrough", "Testsuite Dummy Client Validation", "Testsuite dummy authenticator, " +
"which automatically authenticates hardcoded client (like 'test-app' )");
compareProviders(expected, result);
}
@Test
public void testPerClientConfigDescriptions() {
Map<String, List<ConfigPropertyRepresentation>> configs = authMgmtResource.getPerClientConfigDescription();
Assert.assertTrue(configs.containsKey("client-jwt"));
Assert.assertTrue(configs.containsKey("client-secret"));
Assert.assertTrue(configs.containsKey("testsuite-client-passthrough"));
Assert.assertTrue(configs.get("client-jwt").isEmpty());
Assert.assertTrue(configs.get("client-secret").isEmpty());
List<ConfigPropertyRepresentation> cfg = configs.get("testsuite-client-passthrough");
Assert.assertProviderConfigProperty(cfg.get(0), "passthroughauth.foo", "Foo Property", null,
"Foo Property of this authenticator, which does nothing", "String");
Assert.assertProviderConfigProperty(cfg.get(1), "passthroughauth.bar", "Bar Property", null,
"Bar Property of this authenticator, which does nothing", "boolean");
}
@Test
public void testAuthenticatorConfigDescription() {
// Try some not-existent provider
try {
authMgmtResource.getAuthenticatorConfigDescription("not-existent");
Assert.fail("Don't expected to find provider 'not-existent'");
} catch (NotFoundException nfe) {
// Expected
}
AuthenticatorConfigInfoRepresentation infoRep = authMgmtResource.getAuthenticatorConfigDescription(IdpCreateUserIfUniqueAuthenticatorFactory.PROVIDER_ID);
Assert.assertEquals("Create User If Unique", infoRep.getName());
Assert.assertEquals(IdpCreateUserIfUniqueAuthenticatorFactory.PROVIDER_ID, infoRep.getProviderId());
Assert.assertEquals("Detect if there is existing Keycloak account with same email like identity provider. If no, create new user", infoRep.getHelpText());
Assert.assertEquals(1, infoRep.getProperties().size());
Assert.assertProviderConfigProperty(infoRep.getProperties().get(0), "require.password.update.after.registration", "Require Password Update After Registration",
null, "If this option is true and new user is successfully imported from Identity Provider to Keycloak (there is no duplicated email or username detected in Keycloak DB), then this user is required to update his password",
"boolean");
}
@Test
public void testInitialAuthenticationProviders() {
List<Map<String, Object>> providers = authMgmtResource.getAuthenticatorProviders();
providers = sortProviders(providers);
compareProviders(expectedAuthProviders(), providers);
}
private List<Map<String, Object>> expectedAuthProviders() {
ArrayList<Map<String, Object>> result = new ArrayList<>();
addProviderInfo(result, "auth-conditional-otp-form", "Conditional OTP Form",
"Validates a OTP on a separate OTP form. Only shown if required based on the configured conditions.");
addProviderInfo(result, "auth-cookie", "Cookie", "Validates the SSO cookie set by the auth server.");
addProviderInfo(result, "auth-otp-form", "OTP Form", "Validates a OTP on a separate OTP form.");
addProviderInfo(result, "auth-script-based", "Script-based Authentication", "Script based authentication.");
addProviderInfo(result, "auth-spnego", "Kerberos", "Initiates the SPNEGO protocol. Most often used with Kerberos.");
addProviderInfo(result, "auth-username-password-form", "Username Password Form",
"Validates a username and password from login form.");
addProviderInfo(result, "direct-grant-validate-otp", "OTP", "Validates the one time password supplied as a 'totp' form parameter in direct grant request");
addProviderInfo(result, "direct-grant-validate-password", "Password",
"Validates the password supplied as a 'password' form parameter in direct grant request");
addProviderInfo(result, "direct-grant-validate-username", "Username Validation",
"Validates the username supplied as a 'username' form parameter in direct grant request");
addProviderInfo(result, "http-basic-authenticator", null, null);
addProviderInfo(result, "idp-confirm-link", "Confirm link existing account", "Show the form where user confirms if he wants " +
"to link identity provider with existing account or rather edit user profile data retrieved from identity provider to avoid conflict");
addProviderInfo(result, "idp-create-user-if-unique", "Create User If Unique", "Detect if there is existing Keycloak account " +
"with same email like identity provider. If no, create new user");
addProviderInfo(result, "idp-email-verification", "Verify existing account by Email", "Email verification of existing Keycloak " +
"user, that wants to link his user account with identity provider");
addProviderInfo(result, "idp-review-profile", "Review Profile",
"User reviews and updates profile data retrieved from Identity Provider in the displayed form");
addProviderInfo(result, "idp-username-password-form", "Username Password Form for identity provider reauthentication",
"Validates a password from login form. Username is already known from identity provider authentication");
addProviderInfo(result, "reset-credential-email", "Send Reset Email", "Send email to user and wait for response.");
addProviderInfo(result, "reset-credentials-choose-user", "Choose User", "Choose a user to reset credentials for");
addProviderInfo(result, "reset-otp", "Reset OTP", "Sets the Configure OTP required action if execution is REQUIRED. " +
"Will also set it if execution is OPTIONAL and the OTP is currently configured for it.");
addProviderInfo(result, "reset-password", "Reset Password", "Sets the Update Password required action if execution is REQUIRED. " +
"Will also set it if execution is OPTIONAL and the password is currently configured for it.");
addProviderInfo(result, "testsuite-dummy-passthrough", "Testsuite Dummy Pass Thru",
"Testsuite Dummy authenticator. Just passes through and is hardcoded to a specific user");
addProviderInfo(result, "testsuite-dummy-registration", "Testsuite Dummy Pass Thru",
"Testsuite Dummy authenticator. Just passes through and is hardcoded to a specific user");
return result;
}
private List<Map<String, Object>> sortProviders(List<Map<String, Object>> providers) {
ArrayList<Map<String, Object>> sorted = new ArrayList<>(providers);
Collections.sort(sorted, new ProviderComparator());
return sorted;
}
private void compareProviders(List<Map<String, Object>> expected, List<Map<String, Object>> actual) {
Assert.assertEquals("Providers count", expected.size(), actual.size());
// compare ignoring list and map impl types
Assert.assertEquals(normalizeResults(expected), normalizeResults(actual));
}
private List<Map<String, Object>> normalizeResults(List<Map<String, Object>> list) {
ArrayList<Map<String, Object>> result = new ArrayList();
for (Map<String, Object> item: list) {
result.add(new HashMap(item));
}
return result;
}
private void addProviderInfo(List<Map<String, Object>> list, String id, String displayName, String description) {
HashMap<String, Object> item = new HashMap<>();
item.put("id", id);
item.put("displayName", displayName);
item.put("description", description);
list.add(item);
}
private static class ProviderComparator implements Comparator<Map<String, Object>> {
@Override
public int compare(Map<String, Object> o1, Map<String, Object> o2) {
return String.valueOf(o1.get("id")).compareTo(String.valueOf(o2.get("id")));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vera Y. Petrashkova
* @version $Revision$
*/
package org.apache.harmony.security.tests.java.security;
import dalvik.annotation.TestTargetClass;
import dalvik.annotation.TestTargets;
import dalvik.annotation.TestLevel;
import dalvik.annotation.TestTargetNew;
import java.security.AlgorithmParameterGenerator;
import java.security.AlgorithmParameters;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Provider;
import java.security.SecureRandom;
import java.security.Security;
import java.security.spec.AlgorithmParameterSpec;
import org.apache.harmony.security.tests.support.SpiEngUtils;
import junit.framework.TestCase;
@TestTargetClass(AlgorithmParameterGenerator.class)
/**
* Tests for <code>AlgorithmParameterGenerator</code> class constructors and
* methods.
*/
public class AlgorithmParameterGenerator2Test extends TestCase {
private static final String AlgorithmParameterGeneratorProviderClass = "org.apache.harmony.security.tests.support.MyAlgorithmParameterGeneratorSpi";
private static final String defaultAlg = "APG";
private static final String[] invalidValues = SpiEngUtils.invalidValues;
private static final String[] validValues;
static {
validValues = new String[4];
validValues[0] = defaultAlg;
validValues[1] = defaultAlg.toLowerCase();
validValues[2] = "apG";
validValues[3] = "ApG";
}
Provider mProv;
protected void setUp() throws Exception {
super.setUp();
mProv = (new SpiEngUtils()).new MyProvider("MyAPGProvider", "Testing provider",
AlgorithmParameterGenerator1Test.srvAlgorithmParameterGenerator.concat(".").concat(defaultAlg),
AlgorithmParameterGeneratorProviderClass);
Security.insertProviderAt(mProv, 1);
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
Security.removeProvider(mProv.getName());
}
private void checkResult(AlgorithmParameterGenerator algParGen)
throws InvalidAlgorithmParameterException {
AlgorithmParameters param = algParGen.generateParameters();
assertNull("Not null parameters", param);
AlgorithmParameterSpec pp = null;
algParGen.init(pp, new SecureRandom());
algParGen.init(pp);
try {
algParGen.init(pp, null);
fail("IllegalArgumentException must be thrown");
} catch (IllegalArgumentException e) {
}
pp = new tmpAlgorithmParameterSpec("Proba");
algParGen.init(pp, new SecureRandom());
algParGen.init(pp);
algParGen.init(0, null);
algParGen.init(0, new SecureRandom());
try {
algParGen.init(-10, null);
fail("IllegalArgumentException must be thrown");
} catch (IllegalArgumentException e) {
}
try {
algParGen.init(-10, new SecureRandom());
fail("IllegalArgumentException must be thrown");
} catch (IllegalArgumentException e) {
}
}
/**
* Test for <code>getInstance(String algorithm)</code> method
* Assertions:
* throws NullPointerException must be thrown is null
* throws NoSuchAlgorithmException must be thrown if algorithm is not available
* returns AlgorithmParameterGenerator object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class}
)
public void testGetInstance01() throws NoSuchAlgorithmException,
InvalidAlgorithmParameterException {
try {
AlgorithmParameterGenerator.getInstance(null);
fail("NullPointerException or NoSuchAlgorithmException should be thrown");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
AlgorithmParameterGenerator.getInstance(invalidValues[i]);
fail("NoSuchAlgorithmException must be thrown (algorithm: "
.concat(invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
AlgorithmParameterGenerator apG;
for (int i = 0; i < validValues.length; i++) {
apG = AlgorithmParameterGenerator.getInstance(validValues[i]);
assertEquals("Incorrect algorithm", apG.getAlgorithm(),
validValues[i]);
assertEquals("Incorrect provider", apG.getProvider(), mProv);
checkResult(apG);
}
}
/**
* Test for <code>getInstance(String algorithm, String provider)</code>
* method
* Assertions:
* throws NullPointerException must be thrown is null
* throws NoSuchAlgorithmException must be thrown if algorithm is not available
* throws IllegalArgumentException when provider is null;
* throws NoSuchProviderException when provider is available;
* returns AlgorithmParameterGenerator object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class, java.lang.String.class}
)
public void testGetInstance02() throws NoSuchAlgorithmException,
NoSuchProviderException, IllegalArgumentException,
InvalidAlgorithmParameterException {
try {
AlgorithmParameterGenerator.getInstance(null, mProv.getName());
fail("NullPointerException or NoSuchAlgorithmException should be thrown");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
AlgorithmParameterGenerator.getInstance(invalidValues[i], mProv
.getName());
fail("NoSuchAlgorithmException must be thrown (algorithm: "
.concat(invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
String prov = null;
for (int i = 0; i < validValues.length; i++) {
try {
AlgorithmParameterGenerator.getInstance(validValues[i], prov);
fail("IllegalArgumentException must be thrown when provider is null (algorithm: "
.concat(invalidValues[i]).concat(")"));
} catch (IllegalArgumentException e) {
}
}
for (int i = 0; i < validValues.length; i++) {
for (int j = 1; j < invalidValues.length; j++) {
try {
AlgorithmParameterGenerator.getInstance(validValues[i],
invalidValues[j]);
fail("NoSuchProviderException must be thrown (algorithm: "
.concat(invalidValues[i]).concat(" provider: ")
.concat(invalidValues[j]).concat(")"));
} catch (NoSuchProviderException e) {
}
}
}
AlgorithmParameterGenerator apG;
for (int i = 0; i < validValues.length; i++) {
apG = AlgorithmParameterGenerator.getInstance(validValues[i], mProv
.getName());
assertEquals("Incorrect algorithm", apG.getAlgorithm(),
validValues[i]);
assertEquals("Incorrect provider", apG.getProvider().getName(),
mProv.getName());
checkResult(apG);
}
}
/**
* Test for <code>getInstance(String algorithm, Provider provider)</code>
* method
* Assertions:
* throws NullPointerException must be thrown is null
* throws NoSuchAlgorithmException must be thrown if algorithm is not available
* throws IllegalArgumentException when provider is null;
* returns AlgorithmParameterGenerator object
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "getInstance",
args = {java.lang.String.class, java.security.Provider.class}
)
public void testGetInstance03() throws NoSuchAlgorithmException,
IllegalArgumentException,
InvalidAlgorithmParameterException {
try {
AlgorithmParameterGenerator.getInstance(null, mProv);
fail("NullPointerException or NoSuchAlgorithmException should be thrown");
} catch (NullPointerException e) {
} catch (NoSuchAlgorithmException e) {
}
for (int i = 0; i < invalidValues.length; i++) {
try {
AlgorithmParameterGenerator.getInstance(invalidValues[i], mProv);
fail("NoSuchAlgorithmException must be thrown (algorithm: "
.concat(invalidValues[i]).concat(")"));
} catch (NoSuchAlgorithmException e) {
}
}
Provider prov = null;
for (int i = 0; i < validValues.length; i++) {
try {
AlgorithmParameterGenerator.getInstance(validValues[i], prov);
fail("IllegalArgumentException must be thrown when provider is null (algorithm: "
.concat(invalidValues[i]).concat(")"));
} catch (IllegalArgumentException e) {
}
}
AlgorithmParameterGenerator apG;
for (int i = 0; i < validValues.length; i++) {
apG = AlgorithmParameterGenerator.getInstance(validValues[i], mProv);
assertEquals("Incorrect algorithm", apG.getAlgorithm(),
validValues[i]);
assertEquals("Incorrect provider", apG.getProvider(), mProv);
checkResult(apG);
}
}
/**
* Additional class for init(...) methods verification
*/
class tmpAlgorithmParameterSpec implements AlgorithmParameterSpec {
private final String type;
public tmpAlgorithmParameterSpec(String type) {
this.type = type;
}
public String getType() {
return type;
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1beta1/document_understanding.proto
package com.google.cloud.documentai.v1beta1;
/**
*
*
* <pre>
* Contains metadata for the BatchProcessDocuments operation.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta1.OperationMetadata}
*/
public final class OperationMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta1.OperationMetadata)
OperationMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use OperationMetadata.newBuilder() to construct.
private OperationMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OperationMetadata() {
state_ = 0;
stateMessage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OperationMetadata();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private OperationMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
int rawValue = input.readEnum();
state_ = rawValue;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
stateMessage_ = s;
break;
}
case 26:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (createTime_ != null) {
subBuilder = createTime_.toBuilder();
}
createTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(createTime_);
createTime_ = subBuilder.buildPartial();
}
break;
}
case 34:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (updateTime_ != null) {
subBuilder = updateTime_.toBuilder();
}
updateTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateTime_);
updateTime_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta1.DocumentAiProto
.internal_static_google_cloud_documentai_v1beta1_OperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta1.DocumentAiProto
.internal_static_google_cloud_documentai_v1beta1_OperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta1.OperationMetadata.class,
com.google.cloud.documentai.v1beta1.OperationMetadata.Builder.class);
}
/** Protobuf enum {@code google.cloud.documentai.v1beta1.OperationMetadata.State} */
public enum State implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* The default value. This value is used if the state is omitted.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Request is received.
* </pre>
*
* <code>ACCEPTED = 1;</code>
*/
ACCEPTED(1),
/**
*
*
* <pre>
* Request operation is waiting for scheduling.
* </pre>
*
* <code>WAITING = 2;</code>
*/
WAITING(2),
/**
*
*
* <pre>
* Request is being processed.
* </pre>
*
* <code>RUNNING = 3;</code>
*/
RUNNING(3),
/**
*
*
* <pre>
* The batch processing completed successfully.
* </pre>
*
* <code>SUCCEEDED = 4;</code>
*/
SUCCEEDED(4),
/**
*
*
* <pre>
* The batch processing was cancelled.
* </pre>
*
* <code>CANCELLED = 5;</code>
*/
CANCELLED(5),
/**
*
*
* <pre>
* The batch processing has failed.
* </pre>
*
* <code>FAILED = 6;</code>
*/
FAILED(6),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* The default value. This value is used if the state is omitted.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
public static final int STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Request is received.
* </pre>
*
* <code>ACCEPTED = 1;</code>
*/
public static final int ACCEPTED_VALUE = 1;
/**
*
*
* <pre>
* Request operation is waiting for scheduling.
* </pre>
*
* <code>WAITING = 2;</code>
*/
public static final int WAITING_VALUE = 2;
/**
*
*
* <pre>
* Request is being processed.
* </pre>
*
* <code>RUNNING = 3;</code>
*/
public static final int RUNNING_VALUE = 3;
/**
*
*
* <pre>
* The batch processing completed successfully.
* </pre>
*
* <code>SUCCEEDED = 4;</code>
*/
public static final int SUCCEEDED_VALUE = 4;
/**
*
*
* <pre>
* The batch processing was cancelled.
* </pre>
*
* <code>CANCELLED = 5;</code>
*/
public static final int CANCELLED_VALUE = 5;
/**
*
*
* <pre>
* The batch processing has failed.
* </pre>
*
* <code>FAILED = 6;</code>
*/
public static final int FAILED_VALUE = 6;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static State valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static State forNumber(int value) {
switch (value) {
case 0:
return STATE_UNSPECIFIED;
case 1:
return ACCEPTED;
case 2:
return WAITING;
case 3:
return RUNNING;
case 4:
return SUCCEEDED;
case 5:
return CANCELLED;
case 6:
return FAILED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<State>() {
public State findValueByNumber(int number) {
return State.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.documentai.v1beta1.OperationMetadata.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final State[] VALUES = values();
public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private State(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.documentai.v1beta1.OperationMetadata.State)
}
public static final int STATE_FIELD_NUMBER = 1;
private int state_;
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata.State getState() {
@SuppressWarnings("deprecation")
com.google.cloud.documentai.v1beta1.OperationMetadata.State result =
com.google.cloud.documentai.v1beta1.OperationMetadata.State.valueOf(state_);
return result == null
? com.google.cloud.documentai.v1beta1.OperationMetadata.State.UNRECOGNIZED
: result;
}
public static final int STATE_MESSAGE_FIELD_NUMBER = 2;
private volatile java.lang.Object stateMessage_;
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @return The stateMessage.
*/
@java.lang.Override
public java.lang.String getStateMessage() {
java.lang.Object ref = stateMessage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stateMessage_ = s;
return s;
}
}
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @return The bytes for stateMessage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStateMessageBytes() {
java.lang.Object ref = stateMessage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stateMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return createTime_ != null;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return getCreateTime();
}
public static final int UPDATE_TIME_FIELD_NUMBER = 4;
private com.google.protobuf.Timestamp updateTime_;
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*
* @return Whether the updateTime field is set.
*/
@java.lang.Override
public boolean hasUpdateTime() {
return updateTime_ != null;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*
* @return The updateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getUpdateTime() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return getUpdateTime();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (state_
!= com.google.cloud.documentai.v1beta1.OperationMetadata.State.STATE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateMessage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, stateMessage_);
}
if (createTime_ != null) {
output.writeMessage(3, getCreateTime());
}
if (updateTime_ != null) {
output.writeMessage(4, getUpdateTime());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (state_
!= com.google.cloud.documentai.v1beta1.OperationMetadata.State.STATE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateMessage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, stateMessage_);
}
if (createTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCreateTime());
}
if (updateTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getUpdateTime());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1beta1.OperationMetadata)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1beta1.OperationMetadata other =
(com.google.cloud.documentai.v1beta1.OperationMetadata) obj;
if (state_ != other.state_) return false;
if (!getStateMessage().equals(other.getStateMessage())) return false;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (hasUpdateTime() != other.hasUpdateTime()) return false;
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
hash = (37 * hash) + STATE_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getStateMessage().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
if (hasUpdateTime()) {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1beta1.OperationMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Contains metadata for the BatchProcessDocuments operation.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta1.OperationMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta1.OperationMetadata)
com.google.cloud.documentai.v1beta1.OperationMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta1.DocumentAiProto
.internal_static_google_cloud_documentai_v1beta1_OperationMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta1.DocumentAiProto
.internal_static_google_cloud_documentai_v1beta1_OperationMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta1.OperationMetadata.class,
com.google.cloud.documentai.v1beta1.OperationMetadata.Builder.class);
}
// Construct using com.google.cloud.documentai.v1beta1.OperationMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
state_ = 0;
stateMessage_ = "";
if (createTimeBuilder_ == null) {
createTime_ = null;
} else {
createTime_ = null;
createTimeBuilder_ = null;
}
if (updateTimeBuilder_ == null) {
updateTime_ = null;
} else {
updateTime_ = null;
updateTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1beta1.DocumentAiProto
.internal_static_google_cloud_documentai_v1beta1_OperationMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata getDefaultInstanceForType() {
return com.google.cloud.documentai.v1beta1.OperationMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata build() {
com.google.cloud.documentai.v1beta1.OperationMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata buildPartial() {
com.google.cloud.documentai.v1beta1.OperationMetadata result =
new com.google.cloud.documentai.v1beta1.OperationMetadata(this);
result.state_ = state_;
result.stateMessage_ = stateMessage_;
if (createTimeBuilder_ == null) {
result.createTime_ = createTime_;
} else {
result.createTime_ = createTimeBuilder_.build();
}
if (updateTimeBuilder_ == null) {
result.updateTime_ = updateTime_;
} else {
result.updateTime_ = updateTimeBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1beta1.OperationMetadata) {
return mergeFrom((com.google.cloud.documentai.v1beta1.OperationMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.documentai.v1beta1.OperationMetadata other) {
if (other == com.google.cloud.documentai.v1beta1.OperationMetadata.getDefaultInstance())
return this;
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
if (!other.getStateMessage().isEmpty()) {
stateMessage_ = other.stateMessage_;
onChanged();
}
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.documentai.v1beta1.OperationMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.documentai.v1beta1.OperationMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int state_ = 0;
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata.State getState() {
@SuppressWarnings("deprecation")
com.google.cloud.documentai.v1beta1.OperationMetadata.State result =
com.google.cloud.documentai.v1beta1.OperationMetadata.State.valueOf(state_);
return result == null
? com.google.cloud.documentai.v1beta1.OperationMetadata.State.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(com.google.cloud.documentai.v1beta1.OperationMetadata.State value) {
if (value == null) {
throw new NullPointerException();
}
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The state of the current batch processing.
* </pre>
*
* <code>.google.cloud.documentai.v1beta1.OperationMetadata.State state = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
state_ = 0;
onChanged();
return this;
}
private java.lang.Object stateMessage_ = "";
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @return The stateMessage.
*/
public java.lang.String getStateMessage() {
java.lang.Object ref = stateMessage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stateMessage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @return The bytes for stateMessage.
*/
public com.google.protobuf.ByteString getStateMessageBytes() {
java.lang.Object ref = stateMessage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stateMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @param value The stateMessage to set.
* @return This builder for chaining.
*/
public Builder setStateMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
stateMessage_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearStateMessage() {
stateMessage_ = getDefaultInstance().getStateMessage();
onChanged();
return this;
}
/**
*
*
* <pre>
* A message providing more details about the current state of processing.
* </pre>
*
* <code>string state_message = 2;</code>
*
* @param value The bytes for stateMessage to set.
* @return This builder for chaining.
*/
public Builder setStateMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
stateMessage_ = value;
onChanged();
return this;
}
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return createTimeBuilder_ != null || createTime_ != null;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
onChanged();
} else {
createTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
onChanged();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (createTime_ != null) {
createTime_ =
com.google.protobuf.Timestamp.newBuilder(createTime_).mergeFrom(value).buildPartial();
} else {
createTime_ = value;
}
onChanged();
} else {
createTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public Builder clearCreateTime() {
if (createTimeBuilder_ == null) {
createTime_ = null;
onChanged();
} else {
createTime_ = null;
createTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* The creation time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
private com.google.protobuf.Timestamp updateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
updateTimeBuilder_;
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*
* @return Whether the updateTime field is set.
*/
public boolean hasUpdateTime() {
return updateTimeBuilder_ != null || updateTime_ != null;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*
* @return The updateTime.
*/
public com.google.protobuf.Timestamp getUpdateTime() {
if (updateTimeBuilder_ == null) {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
} else {
return updateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateTime_ = value;
onChanged();
} else {
updateTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (updateTimeBuilder_ == null) {
updateTime_ = builderForValue.build();
onChanged();
} else {
updateTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (updateTime_ != null) {
updateTime_ =
com.google.protobuf.Timestamp.newBuilder(updateTime_).mergeFrom(value).buildPartial();
} else {
updateTime_ = value;
}
onChanged();
} else {
updateTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public Builder clearUpdateTime() {
if (updateTimeBuilder_ == null) {
updateTime_ = null;
onChanged();
} else {
updateTime_ = null;
updateTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
onChanged();
return getUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
if (updateTimeBuilder_ != null) {
return updateTimeBuilder_.getMessageOrBuilder();
} else {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
}
}
/**
*
*
* <pre>
* The last update time of the operation.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder() {
if (updateTimeBuilder_ == null) {
updateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getUpdateTime(), getParentForChildren(), isClean());
updateTime_ = null;
}
return updateTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta1.OperationMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.OperationMetadata)
private static final com.google.cloud.documentai.v1beta1.OperationMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta1.OperationMetadata();
}
public static com.google.cloud.documentai.v1beta1.OperationMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OperationMetadata> PARSER =
new com.google.protobuf.AbstractParser<OperationMetadata>() {
@java.lang.Override
public OperationMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new OperationMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<OperationMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OperationMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta1.OperationMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (c) 2003 by Cosylab d.o.o.
*
* The full license specifying the redistribution, modification, usage and other
* rights and obligations is included with the distribution of this project in
* the file license.html. If the license is not included you may find a copy at
* http://www.cosylab.com/legal/abeans_license.htm or may write to Cosylab, d.o.o.
*
* THIS SOFTWARE IS PROVIDED AS-IS WITHOUT WARRANTY OF ANY KIND, NOT EVEN THE
* IMPLIED WARRANTY OF MERCHANTABILITY. THE AUTHOR OF THIS SOFTWARE, ASSUMES
* _NO_ RESPONSIBILITY FOR ANY CONSEQUENCE RESULTING FROM THE USE, MODIFICATION,
* OR REDISTRIBUTION OF THIS SOFTWARE.
*/
package xal.extension.widgets.smf;
import xal.smf.Accelerator;
import xal.smf.AcceleratorNode;
import xal.smf.AcceleratorSeq;
import xal.smf.data.XMLDataManager;
import xal.smf.impl.*;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Insets;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
* <code>XALSynopticPanel</code> is simple panel, that shows synoptic layout of
* selected XAL sequence. Synoptic is drawn in horizontal direction. To
* define right or left distance of drawing from the edge of panel, use
* margins. To define which part of sequenc should be drawn, use start and end
* position.
*
* @author <a href="mailto:igor.kriznar@cosylab.com">Igor Kriznar</a>
* @since Aug 29, 2003.
*
* TODO Fire events for mouse selection of elements
* TODO Add element selection dialog for choosing singel from multiple selection
* TODO Implement SNS official color code
*/
public class XALSynopticPanel extends JPanel {
/** serialization ID */
private static final long serialVersionUID = 1L;
private double startPosition;
private double endPosition;
private AcceleratorSeq acceleratorSequence;
private ArrayList<AcceleratorNode> thick = new ArrayList<AcceleratorNode>();
private ArrayList<AcceleratorNode> thin = new ArrayList<AcceleratorNode>();
private Insets margin;
private String[] labels = new String[0];
private double _wrapShift; // relevant for rings; it specifies the shift in wrap location where negative numbers start
/**
* Default constructor.
*/
public XALSynopticPanel()
{
super();
_wrapShift = 0.0;
setBackground(Color.white);
margin = new Insets(30, 20, 30, 20);
setOpaque(true);
setToolTipText("XAL Synoptics");
}
/**
* This runs simple test applet.
*
* @param args CMD args
*/
public static void main(String[] args)
{
try {
XALSynopticPanel pane = new XALSynopticPanel();
Accelerator acc = XMLDataManager.loadDefaultAccelerator();
//pane.setAcceleratorSequence(acc.getSequence("DTL1"));
pane.setAcceleratorSequence(acc.getSequence("MEBT"));
//pane.setStartPosition(2.6);
//pane.setEndPosition(4.3);
JFrame frame = new JFrame();
frame.setSize(300, 200);
frame.getContentPane().add(pane);
frame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e)
{
System.exit(0);
}
});
frame.setVisible( true );
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Get the accelerator sequence, which is displayed from first to last by synoptic defined element.
* @return the accelerator sequence, which is displayed from first to last by synoptic defined element
*/
public AcceleratorSeq getAcceleratorSequence()
{
return acceleratorSequence;
}
/**
* Sets accelerator sequence. Note that this method migth take some
* time to finish, if sequence contains a lot of elements.
* Uses beginning and end of this sequence for initial start and end position.
*
* @param seq
*/
public void setAcceleratorSequence(AcceleratorSeq seq) {
if ( seq != acceleratorSequence ) { // need to check to avoid side effects of resetting the start end breaking synchronization with plot
setAcceleratorSequence( seq, 0.0, 0.0 );
}
}
/**
* Sets accelerator sequence and initial start and end position in sequence.
* Note that this method migth take some
* time to finish, if sequence contains a lot of elements.
*
* @param seq
*/
public void setAcceleratorSequence(AcceleratorSeq seq, double start, double end)
{
/*if (seq == null) {
return;
}*/
acceleratorSequence = seq;
if (start<end) {
startPosition = start;
endPosition = end;
} else {
startPosition = end;
endPosition = start;
}
updateSequence();
}
/**
* Set the shift in the location where the wrapping occurs (relevant only for rings)
* @param shift the shift (meters) in location along the ring where positions are measured in positive versus negative numbers relative to the origin
*/
public void setWrapShift( final double shift ) {
_wrapShift = shift;
repaint();
}
private void updateSequence()
{
thick.clear();
thin.clear();
if (acceleratorSequence == null) {
repaint();
return;
}
final List<AcceleratorNode> list = acceleratorSequence.getAllNodes();
// set initial size from sequence
if (list.size() > 0 && startPosition==endPosition) {
startPosition = acceleratorSequence.getPosition( list.get(0) ) - list.get(0).getLength() / 2.0;
endPosition = acceleratorSequence.getPosition( list.get( list.size() - 1 ) ) + list.get( list.size() - 1 ).getLength() / 2.0;
}
final ArrayList<AcceleratorNode> newThick = new ArrayList<AcceleratorNode>(list.size());
final ArrayList<AcceleratorNode> newThin = new ArrayList<AcceleratorNode>(list.size());
for ( final AcceleratorNode el : list ) {
double pos = acceleratorSequence.getPosition(el);
//System.out.println(pos + " \t" + (pos + el.getLength()) + " \t"
// + el.getId() + " \t"
// + el.getClass().getName().substring(el.getClass().getName()
// .lastIndexOf('.') + 1));
if (pos >= startPosition && pos <= endPosition) {
if ( el instanceof Bend || el instanceof Quadrupole || el instanceof PermQuadrupole || el instanceof RfGap || el instanceof RfCavity ) {
newThick.add(el);
} else {
newThin.add(el);
}
}
}
thick = newThick;
thin = newThin;
repaint();
}
/**
* Get the position in sequence up to which elements are drawn.
* @return the position in sequence up to which elements are drawn
*/
public double getEndPosition()
{
return endPosition;
}
/**
* Get the position in sequence from which elements are drawn.
* @return the position in the sequence from which elements are drawn
*/
public double getStartPosition()
{
return startPosition;
}
/**
* Sets position in sequence up to which elements are drawn. Must be more
* than start position.
*
* @param d
*
* @throws IllegalArgumentException if new end is less then start position
*/
public void setEndPosition(double d)
{
if (d < startPosition) {
throw new IllegalArgumentException("New end position (" + d
+ ") is less than start position (" + startPosition + ").");
}
endPosition = d;
repaint();
}
/**
* Sets position in sequence from which elements are drawn. Must be less
* than end position.
*
* @param d
*
* @throws IllegalArgumentException if new start is more than end position
*/
public void setStartPosition(double d)
{
if (d > endPosition) {
throw new IllegalArgumentException("New start position (" + d
+ ") is more than end position (" + endPosition + ").");
}
startPosition = d;
repaint();
}
/* (non-Javadoc)
* @see javax.swing.JComponent#paintComponent(java.awt.Graphics)
*/
protected void paintComponent(Graphics g)
{
super.paintComponent(g);
double scale = endPosition - startPosition;
int width = getWidth() - margin.right - margin.left;
int height = getHeight() - margin.top - margin.bottom;
if (width <= 0 || height <= 0) {
return;
}
if (width != labels.length) {
labels = new String[width];
}
Arrays.fill(labels, null);
int x = margin.left;
int y = margin.top + (int)(height / 2.0);
// draw line
g.setColor(Color.black);
g.drawLine(x, y, x + width - 1, y);
g.drawLine(x, y - 3, x, y + 3);
g.drawLine(x + width - 1, y - 3, x + width - 1, y + 3);
if (thin == null || thick == null) {
return;
}
final double sequenceLength = acceleratorSequence != null ? acceleratorSequence.getLength() : 0.0;
final double wrapLocation = sequenceLength - _wrapShift;
// first draw thick elements
for ( final AcceleratorNode el : thick ) {
String name = el.getId();
double pos = acceleratorSequence.getPosition(el);
final double wrappedPosition = pos <= wrapLocation ? pos : pos - sequenceLength;
if (wrappedPosition+el.getLength()<startPosition || wrappedPosition-el.getLength()>endPosition) continue;
int ex = x + (int)((wrappedPosition - el.getLength() / 2.0 - startPosition) / scale * (width - 1));
int l = (int)(el.getLength() / scale * (width - 1));
if (l < 2) {
l = 2;
}
if (el instanceof Dipole) {
g.setColor(Color.yellow);
g.fillRect(ex, margin.top, l, height);
//System.out.println(pos + " \t" + (pos + el.getLength()) + " \t" + ex + " \t" + l);
for (int i = ex - x; i < ex - x + l; i++) {
addLabel(i, name);
}
}
else if (el instanceof Quadrupole || el instanceof PermQuadrupole) {
g.setColor(Color.red);
g.fillRect(ex, margin.top, l, height);
for (int i = ex - x; i < ex - x + l; i++) {
addLabel(i, name);
}
}
else if (el instanceof Solenoid) {
g.setColor(Color.green);
g.fillRect(ex, margin.top, l, height);
for (int i = ex - x; i < ex - x + l; i++) {
addLabel(i, name);
}
}
else if (el instanceof RfGap) {
g.setColor(Color.gray);
if (l < 3) {
g.fillRect(ex, margin.top, l, height);
} else {
g.fillRoundRect(ex, margin.top, l, height, height / 5,
height / 5);
}
for (int i = ex - x; i < ex - x + l; i++) {
addLabel(i, name);
}
}
else if (el instanceof RfCavity) {
g.setColor(Color.gray);
if (l < 3) {
g.drawRect(ex, margin.top - 1, l, height + 1);
} else {
g.drawRoundRect(ex, margin.top - 1, l, height + 1,
height / 10, height / 10);
}
for (int i = ex - x; i < ex - x + l; i++) {
addLabel(i, name);
}
}
}
// Draw thin elements
for ( final AcceleratorNode el : thin ) {
String name = el.getId();
double pos = acceleratorSequence.getPosition(el);
final double wrappedPosition = pos <= wrapLocation ? pos : pos - sequenceLength;
if (wrappedPosition+el.getLength()<startPosition || wrappedPosition-el.getLength()>endPosition) continue;
if (el instanceof HDipoleCorr || el instanceof VDipoleCorr) {
int ex = x + (int)((wrappedPosition - startPosition) / scale * (width - 1));
g.setColor(Color.blue);
g.fillRect(ex, margin.top, 2, height);
for (int i = ex - x; i < ex - x + 2; i++) {
addLabel(i, name);
}
} else if (el instanceof BPM || el instanceof ProfileMonitor || el instanceof Marker ) {
int ex = x + (int)((wrappedPosition - startPosition) / scale * (width - 1));
if (el instanceof BPM)
g.setColor(Color.cyan);
else if ( el instanceof ProfileMonitor )
g.setColor(Color.green);
else
g.setColor( Color.GRAY );
g.fillRect(ex, margin.top, 1, height);
for (int i = ex - x; i < ex - x + 1; i++) {
addLabel(i, name);
}
}
}
}
/**
* Get the margin around drawing.
* @return the margin around drawing
*/
public Insets getMargin()
{
return margin;
}
/**
* Sets the margin around drawing, takes effect regardles the border
* margins.
*
* @param insets
*/
public void setMargin(Insets insets)
{
//System.out.println(insets);
margin = insets;
repaint();
}
private void addLabel(int index, String label)
{
if (index >= labels.length || index < 0) {
return;
}
if (labels[index] == null) {
labels[index] = label;
} else {
labels[index] = labels[index] + ", " + label;
}
}
/* (non-Javadoc)
* @see javax.swing.JComponent#getToolTipText(java.awt.event.MouseEvent)
*/
public String getToolTipText(MouseEvent event)
{
int i = event.getPoint().x - margin.left;
if (i >= 0 && i < labels.length) {
int i_upper = i;
int i_down = i;
while(i_upper < (labels.length-1) && labels[i_upper] == null){
i_upper += 1;
}
while(i_down > 0 && labels[i_down] == null){
i_down -= 1;
}
if(Math.abs(i- i_down) < Math.abs(i_upper -i)){
return labels[i_down];
}
else{
return labels[i_upper];
}
//System.out.println(i + " " + labels[i]);
} else {
return super.getToolTipText(event);
}
}
}
/* __oOo__ */
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.greengrassv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains information about a deployment.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/greengrassv2-2020-11-30/Deployment" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Deployment implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the target
* IoT thing or thing group.
* </p>
*/
private String targetArn;
/**
* <p>
* The revision number of the deployment.
* </p>
*/
private String revisionId;
/**
* <p>
* The ID of the deployment.
* </p>
*/
private String deploymentId;
/**
* <p>
* The name of the deployment.
* </p>
*/
private String deploymentName;
/**
* <p>
* The time at which the deployment was created, expressed in ISO 8601 format.
* </p>
*/
private java.util.Date creationTimestamp;
/**
* <p>
* The status of the deployment.
* </p>
*/
private String deploymentStatus;
/**
* <p>
* Whether or not the deployment is the latest revision for its target.
* </p>
*/
private Boolean isLatestForTarget;
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the target
* IoT thing or thing group.
* </p>
*
* @param targetArn
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* target IoT thing or thing group.
*/
public void setTargetArn(String targetArn) {
this.targetArn = targetArn;
}
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the target
* IoT thing or thing group.
* </p>
*
* @return The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* target IoT thing or thing group.
*/
public String getTargetArn() {
return this.targetArn;
}
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the target
* IoT thing or thing group.
* </p>
*
* @param targetArn
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* target IoT thing or thing group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withTargetArn(String targetArn) {
setTargetArn(targetArn);
return this;
}
/**
* <p>
* The revision number of the deployment.
* </p>
*
* @param revisionId
* The revision number of the deployment.
*/
public void setRevisionId(String revisionId) {
this.revisionId = revisionId;
}
/**
* <p>
* The revision number of the deployment.
* </p>
*
* @return The revision number of the deployment.
*/
public String getRevisionId() {
return this.revisionId;
}
/**
* <p>
* The revision number of the deployment.
* </p>
*
* @param revisionId
* The revision number of the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withRevisionId(String revisionId) {
setRevisionId(revisionId);
return this;
}
/**
* <p>
* The ID of the deployment.
* </p>
*
* @param deploymentId
* The ID of the deployment.
*/
public void setDeploymentId(String deploymentId) {
this.deploymentId = deploymentId;
}
/**
* <p>
* The ID of the deployment.
* </p>
*
* @return The ID of the deployment.
*/
public String getDeploymentId() {
return this.deploymentId;
}
/**
* <p>
* The ID of the deployment.
* </p>
*
* @param deploymentId
* The ID of the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withDeploymentId(String deploymentId) {
setDeploymentId(deploymentId);
return this;
}
/**
* <p>
* The name of the deployment.
* </p>
*
* @param deploymentName
* The name of the deployment.
*/
public void setDeploymentName(String deploymentName) {
this.deploymentName = deploymentName;
}
/**
* <p>
* The name of the deployment.
* </p>
*
* @return The name of the deployment.
*/
public String getDeploymentName() {
return this.deploymentName;
}
/**
* <p>
* The name of the deployment.
* </p>
*
* @param deploymentName
* The name of the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withDeploymentName(String deploymentName) {
setDeploymentName(deploymentName);
return this;
}
/**
* <p>
* The time at which the deployment was created, expressed in ISO 8601 format.
* </p>
*
* @param creationTimestamp
* The time at which the deployment was created, expressed in ISO 8601 format.
*/
public void setCreationTimestamp(java.util.Date creationTimestamp) {
this.creationTimestamp = creationTimestamp;
}
/**
* <p>
* The time at which the deployment was created, expressed in ISO 8601 format.
* </p>
*
* @return The time at which the deployment was created, expressed in ISO 8601 format.
*/
public java.util.Date getCreationTimestamp() {
return this.creationTimestamp;
}
/**
* <p>
* The time at which the deployment was created, expressed in ISO 8601 format.
* </p>
*
* @param creationTimestamp
* The time at which the deployment was created, expressed in ISO 8601 format.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withCreationTimestamp(java.util.Date creationTimestamp) {
setCreationTimestamp(creationTimestamp);
return this;
}
/**
* <p>
* The status of the deployment.
* </p>
*
* @param deploymentStatus
* The status of the deployment.
* @see DeploymentStatus
*/
public void setDeploymentStatus(String deploymentStatus) {
this.deploymentStatus = deploymentStatus;
}
/**
* <p>
* The status of the deployment.
* </p>
*
* @return The status of the deployment.
* @see DeploymentStatus
*/
public String getDeploymentStatus() {
return this.deploymentStatus;
}
/**
* <p>
* The status of the deployment.
* </p>
*
* @param deploymentStatus
* The status of the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeploymentStatus
*/
public Deployment withDeploymentStatus(String deploymentStatus) {
setDeploymentStatus(deploymentStatus);
return this;
}
/**
* <p>
* The status of the deployment.
* </p>
*
* @param deploymentStatus
* The status of the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeploymentStatus
*/
public Deployment withDeploymentStatus(DeploymentStatus deploymentStatus) {
this.deploymentStatus = deploymentStatus.toString();
return this;
}
/**
* <p>
* Whether or not the deployment is the latest revision for its target.
* </p>
*
* @param isLatestForTarget
* Whether or not the deployment is the latest revision for its target.
*/
public void setIsLatestForTarget(Boolean isLatestForTarget) {
this.isLatestForTarget = isLatestForTarget;
}
/**
* <p>
* Whether or not the deployment is the latest revision for its target.
* </p>
*
* @return Whether or not the deployment is the latest revision for its target.
*/
public Boolean getIsLatestForTarget() {
return this.isLatestForTarget;
}
/**
* <p>
* Whether or not the deployment is the latest revision for its target.
* </p>
*
* @param isLatestForTarget
* Whether or not the deployment is the latest revision for its target.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Deployment withIsLatestForTarget(Boolean isLatestForTarget) {
setIsLatestForTarget(isLatestForTarget);
return this;
}
/**
* <p>
* Whether or not the deployment is the latest revision for its target.
* </p>
*
* @return Whether or not the deployment is the latest revision for its target.
*/
public Boolean isLatestForTarget() {
return this.isLatestForTarget;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTargetArn() != null)
sb.append("TargetArn: ").append(getTargetArn()).append(",");
if (getRevisionId() != null)
sb.append("RevisionId: ").append(getRevisionId()).append(",");
if (getDeploymentId() != null)
sb.append("DeploymentId: ").append(getDeploymentId()).append(",");
if (getDeploymentName() != null)
sb.append("DeploymentName: ").append(getDeploymentName()).append(",");
if (getCreationTimestamp() != null)
sb.append("CreationTimestamp: ").append(getCreationTimestamp()).append(",");
if (getDeploymentStatus() != null)
sb.append("DeploymentStatus: ").append(getDeploymentStatus()).append(",");
if (getIsLatestForTarget() != null)
sb.append("IsLatestForTarget: ").append(getIsLatestForTarget());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Deployment == false)
return false;
Deployment other = (Deployment) obj;
if (other.getTargetArn() == null ^ this.getTargetArn() == null)
return false;
if (other.getTargetArn() != null && other.getTargetArn().equals(this.getTargetArn()) == false)
return false;
if (other.getRevisionId() == null ^ this.getRevisionId() == null)
return false;
if (other.getRevisionId() != null && other.getRevisionId().equals(this.getRevisionId()) == false)
return false;
if (other.getDeploymentId() == null ^ this.getDeploymentId() == null)
return false;
if (other.getDeploymentId() != null && other.getDeploymentId().equals(this.getDeploymentId()) == false)
return false;
if (other.getDeploymentName() == null ^ this.getDeploymentName() == null)
return false;
if (other.getDeploymentName() != null && other.getDeploymentName().equals(this.getDeploymentName()) == false)
return false;
if (other.getCreationTimestamp() == null ^ this.getCreationTimestamp() == null)
return false;
if (other.getCreationTimestamp() != null && other.getCreationTimestamp().equals(this.getCreationTimestamp()) == false)
return false;
if (other.getDeploymentStatus() == null ^ this.getDeploymentStatus() == null)
return false;
if (other.getDeploymentStatus() != null && other.getDeploymentStatus().equals(this.getDeploymentStatus()) == false)
return false;
if (other.getIsLatestForTarget() == null ^ this.getIsLatestForTarget() == null)
return false;
if (other.getIsLatestForTarget() != null && other.getIsLatestForTarget().equals(this.getIsLatestForTarget()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTargetArn() == null) ? 0 : getTargetArn().hashCode());
hashCode = prime * hashCode + ((getRevisionId() == null) ? 0 : getRevisionId().hashCode());
hashCode = prime * hashCode + ((getDeploymentId() == null) ? 0 : getDeploymentId().hashCode());
hashCode = prime * hashCode + ((getDeploymentName() == null) ? 0 : getDeploymentName().hashCode());
hashCode = prime * hashCode + ((getCreationTimestamp() == null) ? 0 : getCreationTimestamp().hashCode());
hashCode = prime * hashCode + ((getDeploymentStatus() == null) ? 0 : getDeploymentStatus().hashCode());
hashCode = prime * hashCode + ((getIsLatestForTarget() == null) ? 0 : getIsLatestForTarget().hashCode());
return hashCode;
}
@Override
public Deployment clone() {
try {
return (Deployment) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.greengrassv2.model.transform.DeploymentMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.watchman;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.facebook.watchman.bser.BserDeserializer;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableMap;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
public class WatchmanClientTest extends WatchmanTestBase {
private WatchmanClient mClient;
private Boolean withWatchProject;
@Rule
public ExpectedException thrown = ExpectedException.none();
/* @Before methods in subclasses are run AFTER @Before methods of the base class */
@Before
public void setClient() throws IOException {
withWatchProject = true;
mClient = new WatchmanClientImpl(
mIncomingMessageGetter,
mOutgoingMessageStream,
new Supplier<Boolean>() {
@Override
public Boolean get() {
return withWatchProject;
}
});
mClient.start();
}
@SuppressWarnings("unchecked")
@Test
public void subscribeTriggersListenerTest() throws InterruptedException {
Map<String, Object> subscriptionReply = new HashMap<>();
subscriptionReply.put("version", "1.2.3");
// "subscribe" value should be private to WatchmanClient so we could use any mock string
subscriptionReply.put("subscribe", "sub-0");
mObjectQueue.put(subscriptionReply);
Map<String, Object> subscribeEvent = new HashMap<>();
subscribeEvent.put("version", "1.2.3");
subscribeEvent.put("clock", "c:123:1234");
subscribeEvent.put("files", Arrays.asList("/foo/bar", "/foo/baz"));
subscribeEvent.put("root", "/foo");
subscribeEvent.put("subscription", "sub-0");
mObjectQueue.put(subscribeEvent);
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Map<String, Object>> result = new AtomicReference<>();
mClient.subscribe(Paths.get("/foo"), null, new Callback() {
@Override
public void call(Map<String, Object> event) {
result.set(event);
latch.countDown();
}
});
if (! latch.await(10, TimeUnit.SECONDS)) {
Assert.fail();
}
deepObjectEquals(subscribeEvent, result.get());
}
/**
* Test the case when we get a unilateral message from Watchman (a subscription update event)
* before the answer to the command we have just sent. We expect that the response to the watch
* request is delivered, and not the subscription update event.
*/
@SuppressWarnings("unchecked")
@Test
public void watchProjectWithUnilateralTest() throws ExecutionException, InterruptedException {
Map<String, Object> dummyUnilateralMessage = new HashMap<>();
dummyUnilateralMessage.put("version", "1.2.3");
dummyUnilateralMessage.put("clock", "c:123:1234");
dummyUnilateralMessage.put("files", Arrays.asList("/foo/bar", "/foo/baz"));
dummyUnilateralMessage.put("root", "/foo");
dummyUnilateralMessage.put("subscription", "sub-0");
mObjectQueue.put(dummyUnilateralMessage);
Map<String, Object> mockResponse = new HashMap<>();
mockResponse.put("version", "1.2.3");
mockResponse.put("watch", "/foo/bar");
mockResponse.put("relative_path", "/foo");
mObjectQueue.put(mockResponse);
Map<String, Object> receivedResponse = mClient.watch(Paths.get("/foo/bar")).get();
deepObjectEquals(mockResponse, receivedResponse);
}
/**
* Test that the watch-project request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void watchProjectRequestTest() throws IOException, ExecutionException, InterruptedException {
String PATH = "/foo/bar";
mObjectQueue.put(new HashMap<String, Object>()); // response irrelevant
mClient.watch(Paths.get(PATH)).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
//noinspection RedundantCast
deepObjectEquals(
Arrays.<Object>asList("watch-project", PATH),
request);
}
/**
* Test that the watch-del request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void watchDelRequestTest() throws IOException, ExecutionException, InterruptedException {
String PATH = "/foo/bar";
mObjectQueue.put(new HashMap<String, Object>()); // response irrelevant
mClient.watchDel(Paths.get(PATH)).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
//noinspection RedundantCast
deepObjectEquals(
Arrays.<Object>asList("watch-del", PATH),
request);
}
/**
* Test that requesting a watch when watch-project is unavailable throws a WatchmanException whose
* message mentions upgrading Watchman.
*/
@SuppressWarnings("unchecked")
@Test
public void watchRequestTest() throws IOException, ExecutionException, InterruptedException {
String PATH = "/foo/bar";
thrown.expect(ExecutionException.class);
thrown.expectCause(Matchers.allOf(
Matchers.isA(WatchmanException.class),
Matchers.hasToString(
Matchers.containsString("upgrade"))));
withWatchProject = false;
mClient.watch(Paths.get(PATH)).get(); // throws
}
/**
* Test that the subscribe request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void subscribeRequestTest() throws ExecutionException, InterruptedException, IOException {
final String PATH = "/foo/bar";
final String NAME = "sub-0";
Callback mockListener = Mockito.mock(Callback.class);
Map<String, Object> response = new HashMap<>();
response.put("subscribe", "name");
mObjectQueue.put(response); // response irrelevant
mClient.subscribe(
Paths.get(PATH),
new HashMap<String, Object>(),
mockListener).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList("subscribe", PATH, NAME, new HashMap<String, Object>()),
request);
}
/**
* Test that the unsubscribe request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void unsubscribeRequestTest()
throws ExecutionException, InterruptedException, IOException {
final String PATH = "/foo/bar";
final String NAME = "sub-0";
Map<String, Object> response = new HashMap<>();
response.put("deleted", true);
mObjectQueue.put(response); // response irrelevant
mObjectQueue.put(response); // response irrelevant
Callback mockListener = Mockito.mock(Callback.class);
WatchmanClient.SubscriptionDescriptor descriptor = mClient.subscribe(
Paths.get(PATH),
new HashMap<String, Object>(),
mockListener).get();
mOutgoingMessageStream.reset(); // ignore the subscribe command
mClient.unsubscribe(descriptor).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList("unsubscribe", PATH, NAME),
request);
}
/**
* Test that the clock request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void clockRequestWithoutTimeoutTest() throws ExecutionException, InterruptedException, IOException {
final String PATH = "/foo/bar";
Map<String, Object> response = new HashMap<>();
response.put("clock", "some value");
mObjectQueue.put(response); // response irrelevant
mClient.clock(Paths.get(PATH)).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList("clock", PATH),
request);
}
/**
* Test that the clock request sent by WatchmanClient respects the interface of Watchman, when
* sync_timeout is also required
*/
@SuppressWarnings("unchecked")
@Test
public void clockRequestWithTimeoutTest() throws ExecutionException, InterruptedException, IOException {
final String PATH = "/foo/bar";
final Short SYNC_TIMEOUT = 1500;
Map<String, Object> response = new HashMap<>();
response.put("clock", "some value");
mObjectQueue.put(response); // response irrelevant
mClient.clock(Paths.get(PATH), SYNC_TIMEOUT).get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList(
"clock",
PATH,
ImmutableMap.<String, Object>of("sync_timeout", SYNC_TIMEOUT)),
request);
}
/**
* Test that the version request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void versionRequestTestNoCapabilities()
throws ExecutionException, InterruptedException, IOException {
Map<String, Object> response = new HashMap<>();
response.put("version", "1.2.3");
mObjectQueue.put(response); // response irrelevant
mClient.version().get();
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList("version"),
request);
}
/**
* Test that the version request sent by WatchmanClient respects the interface of Watchman
*/
@SuppressWarnings("unchecked")
@Test
public void versionRequestTestWithCapabilities()
throws ExecutionException, InterruptedException, IOException {
Map<String, Object> response = new HashMap<>();
response.put("version", "1.2.3");
mObjectQueue.put(response); // response irrelevant
List<String> optionalCapabilities = Collections.singletonList("optional1");
List<String> requiredCapabilities = Arrays.asList("required1", "required2");
mClient.version(optionalCapabilities, requiredCapabilities).get();
Map<String, Object> expectedCapabilitiesMap = new HashMap<>();
expectedCapabilitiesMap.put("optional", optionalCapabilities);
expectedCapabilitiesMap.put("required", requiredCapabilities);
ByteArrayInputStream in = new ByteArrayInputStream(mOutgoingMessageStream.toByteArray());
BserDeserializer deserializer = new BserDeserializer(BserDeserializer.KeyOrdering.UNSORTED);
List<Object> request = (List<Object>) deserializer.deserializeBserValue(in);
deepObjectEquals(
Arrays.<Object>asList("version", expectedCapabilitiesMap),
request);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.pen;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Collection;
import java.util.List;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCounter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PORank;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POFilter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCollectedGroup;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLocalRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackageLite;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POCombinerPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMultiQueryPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POUnion;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PODemux;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PODistinct;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSort;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POProject;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.GreaterThanExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.LessThanExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.GTOrEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.LTOrEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.EqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.NotEqualToExpr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PORegexp;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POIsNull;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POAnd;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POOr;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PONot;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POBinCond;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.PONegative;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserComparisonFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POMapLookUp;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POJoinPackage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POCast;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLimit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POFRJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMergeJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMergeCogroup;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStream;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSkewedJoin;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPartitionRearrange;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POOptimizedForEach;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPreCombinerLocalRearrange;
import org.apache.pig.data.DataBag;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.plan.PlanWalker;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.pig.impl.util.IdentityHashSet;
import org.apache.pig.data.Tuple;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor;
import org.apache.pig.pen.util.LineageTracer;
import org.apache.pig.impl.plan.DepthFirstWalker;
import org.apache.pig.newplan.logical.relational.LogicalSchema;
/**
* The class used to (re)attach illustrators to physical operators
*
*
*/
public class IllustratorAttacher extends PhyPlanVisitor {
PigContext pigContext;
LineageTracer lineage;
HashMap<PhysicalOperator, Collection<IdentityHashSet<Tuple>>> poToEqclassesMap;
private HashMap<PhysicalOperator, DataBag> poToDataMap;
private int maxRecords;
private boolean revisit = false;
private ArrayList<Boolean[]> subExpResults = null;
private final Map<POLoad, LogicalSchema> poloadToSchemaMap;
public IllustratorAttacher(PhysicalPlan plan, LineageTracer lineage, int maxRecords,
Map<POLoad, LogicalSchema> poLoadToSchemaMap, PigContext hadoopPigContext) throws VisitorException {
super(plan, new DepthFirstWalker<PhysicalOperator, PhysicalPlan>(plan));
pigContext = hadoopPigContext;
this.lineage = lineage;
poToEqclassesMap = new HashMap<PhysicalOperator, Collection<IdentityHashSet<Tuple>>>();
poToDataMap = new HashMap<PhysicalOperator, DataBag>();
this.maxRecords = maxRecords;
this.poloadToSchemaMap = poLoadToSchemaMap;
}
/**
* revisit an enhanced physical plan from MR compilation
* @param plan a physical plan to be traversed
*/
public void revisit(PhysicalPlan plan) throws VisitorException {
pushWalker(new DepthFirstWalker<PhysicalOperator, PhysicalPlan>(plan));
revisit = true;
PhysicalPlan oriPlan = mPlan;
mPlan = plan;
visit();
mPlan = oriPlan;
popWalker();
}
private void setIllustrator(PhysicalOperator po, int nEqClasses) {
if (revisit && po.getIllustrator() != null)
return;
LinkedList<IdentityHashSet<Tuple>> eqClasses = new LinkedList<IdentityHashSet<Tuple>>();
poToEqclassesMap.put(po, eqClasses);
for (int i = 0; i < nEqClasses; ++i)
{
IdentityHashSet<Tuple> eqClass = new IdentityHashSet<Tuple>();
eqClasses.add(eqClass);
}
Illustrator illustrator = new Illustrator(lineage, eqClasses, this, pigContext);
po.setIllustrator(illustrator);
poToDataMap.put(po, illustrator.getData());
}
private void setIllustrator(PhysicalOperator po, LinkedList<IdentityHashSet<Tuple>> eqClasses) {
if (revisit && po.getIllustrator() != null)
return;
Illustrator illustrator = new Illustrator(lineage, eqClasses, this, pigContext);
po.setIllustrator(illustrator);
if (eqClasses != null)
poToEqclassesMap.put(po, eqClasses);
poToDataMap.put(po, illustrator.getData());
}
void setIllustrator(PhysicalOperator po) {
if (revisit && po.getIllustrator() != null)
return;
LinkedList<IdentityHashSet<Tuple>> eqClasses = new LinkedList<IdentityHashSet<Tuple>>();
IdentityHashSet<Tuple> eqClass = new IdentityHashSet<Tuple>();
eqClasses.add(eqClass);
Illustrator illustrator = new Illustrator(lineage, eqClasses, this, pigContext);
po.setIllustrator(illustrator);
poToEqclassesMap.put(po, eqClasses);
poToDataMap.put(po, illustrator.getData());
}
public Map<PhysicalOperator, DataBag> getDataMap() {
return poToDataMap;
}
@Override
public void visitLoad(POLoad ld) throws VisitorException{
// LOAD from temporary files need no illustrator
if (revisit)
return;
LinkedList<IdentityHashSet<Tuple>> eqClasses = new LinkedList<IdentityHashSet<Tuple>>();
poToEqclassesMap.put(ld, eqClasses);
IdentityHashSet<Tuple> eqClass = new IdentityHashSet<Tuple>();
eqClasses.add(eqClass);
Illustrator illustrator;
illustrator = new Illustrator(lineage, eqClasses, maxRecords, this, poloadToSchemaMap.get(ld), pigContext);
ld.setIllustrator(illustrator);
poToDataMap.put(ld, illustrator.getData());
}
@Override
public void visitStore(POStore st) throws VisitorException{
setIllustrator(st, 1);
}
@Override
public void visitFilter(POFilter fl) throws VisitorException{
setIllustrator(fl, 0);
subExpResults = fl.getIllustrator().getSubExpResults();
innerPlanAttach(fl, fl.getPlan());
subExpResults = null;
}
@Override
public void visitLocalRearrange(POLocalRearrange lr) throws VisitorException{
super.visitLocalRearrange(lr);
setIllustrator(lr);
}
@Override
public void visitPackage(POPackage pkg) throws VisitorException{
if (!(pkg instanceof POPackageLite) && pkg.isDistinct())
setIllustrator(pkg, 1);
else
setIllustrator(pkg, null);
}
@Override
public void visitCombinerPackage(POCombinerPackage pkg) throws VisitorException{
setIllustrator(pkg);
}
@Override
public void visitMultiQueryPackage(POMultiQueryPackage pkg) throws VisitorException{
setIllustrator(pkg);
}
@Override
public void visitPOForEach(POForEach nfe) throws VisitorException {
if (revisit && nfe.getIllustrator() != null)
return;
List<PhysicalPlan> innerPlans = nfe.getInputPlans();
for (PhysicalPlan innerPlan : innerPlans)
innerPlanAttach(nfe, innerPlan);
List<PhysicalOperator> preds = mPlan.getPredecessors(nfe);
if (preds != null && preds.size() == 1 &&
preds.get(0) instanceof POPackage &&
!(preds.get(0) instanceof POPackageLite) &&
((POPackage) preds.get(0)).isDistinct()) {
// equivalence class of POPackage for DISTINCT needs to be used
//instead of the succeeding POForEach's equivalence class
setIllustrator(nfe, preds.get(0).getIllustrator().getEquivalenceClasses());
nfe.getIllustrator().setEqClassesShared();
} else
setIllustrator(nfe, 1);
}
@Override
public void visitUnion(POUnion un) throws VisitorException{
if (revisit && un.getIllustrator() != null)
return;
setIllustrator(un, null);
}
@Override
public void visitSplit(POSplit spl) throws VisitorException{
if (revisit && spl.getIllustrator() != null)
return;
for (PhysicalPlan poPlan : spl.getPlans())
innerPlanAttach(spl, poPlan);
setIllustrator(spl);
}
@Override
public void visitDemux(PODemux demux) throws VisitorException{
if (revisit && demux.getIllustrator() != null)
return;
List<PhysicalPlan> innerPlans = demux.getPlans();
for (PhysicalPlan innerPlan : innerPlans)
innerPlanAttach(demux, innerPlan);
setIllustrator(demux);
}
@Override
public void visitDistinct(PODistinct distinct) throws VisitorException {
setIllustrator(distinct, 1);
}
@Override
public void visitSort(POSort sort) throws VisitorException {
setIllustrator(sort, 1);
}
@Override
public void visitRank(PORank rank) throws VisitorException {
setIllustrator(rank, 3);
}
@Override
public void visitCounter(POCounter counter) throws VisitorException {
setIllustrator(counter, 1);
}
@Override
public void visitProject(POProject proj) throws VisitorException{
}
@Override
public void visitGreaterThan(GreaterThanExpr grt) throws VisitorException{
setIllustrator(grt, 0);
if (!revisit && subExpResults != null)
subExpResults.add(grt.getIllustrator().getSubExpResult());
}
@Override
public void visitLessThan(LessThanExpr lt) throws VisitorException{
setIllustrator(lt, 0);
if (!revisit && subExpResults != null)
subExpResults.add(lt.getIllustrator().getSubExpResult());
}
@Override
public void visitGTOrEqual(GTOrEqualToExpr gte) throws VisitorException{
setIllustrator(gte, 0);
if (!revisit && subExpResults != null)
subExpResults.add(gte.getIllustrator().getSubExpResult());
}
@Override
public void visitLTOrEqual(LTOrEqualToExpr lte) throws VisitorException{
setIllustrator(lte, 0);
if (!revisit && subExpResults != null)
subExpResults.add(lte.getIllustrator().getSubExpResult());
}
@Override
public void visitEqualTo(EqualToExpr eq) throws VisitorException{
setIllustrator(eq, 0);
if (!revisit && subExpResults != null)
subExpResults.add(eq.getIllustrator().getSubExpResult());
}
@Override
public void visitNotEqualTo(NotEqualToExpr eq) throws VisitorException{
setIllustrator(eq, 0);
if (!revisit && subExpResults != null)
subExpResults.add(eq.getIllustrator().getSubExpResult());
}
@Override
public void visitRegexp(PORegexp re) throws VisitorException{
setIllustrator(re, 0);
if (!revisit && subExpResults != null)
subExpResults.add(re.getIllustrator().getSubExpResult());
}
@Override
public void visitIsNull(POIsNull isNull) throws VisitorException {
setIllustrator(isNull, 0);
if (!revisit && subExpResults != null)
subExpResults.add(isNull.getIllustrator().getSubExpResult());
}
@Override
public void visitAnd(POAnd and) throws VisitorException {
setIllustrator(and, 0);
}
@Override
public void visitOr(POOr or) throws VisitorException {
setIllustrator(or, 0);
}
@Override
public void visitNot(PONot not) throws VisitorException {
setIllustrator(not, 0);
if (!revisit && subExpResults != null)
subExpResults.add(not.getIllustrator().getSubExpResult());
}
@Override
public void visitBinCond(POBinCond binCond) {
}
@Override
public void visitNegative(PONegative negative) {
setIllustrator(negative, 1);
}
@Override
public void visitUserFunc(POUserFunc userFunc) throws VisitorException {
}
@Override
public void visitComparisonFunc(POUserComparisonFunc compFunc) throws VisitorException {
// one each for >, ==, and <
setIllustrator(compFunc, 3);
}
@Override
public void visitMapLookUp(POMapLookUp mapLookUp) {
setIllustrator(mapLookUp, 1);
}
@Override
public void visitJoinPackage(POJoinPackage joinPackage) throws VisitorException{
if (revisit && joinPackage.getIllustrator() != null)
return;
setIllustrator(joinPackage);
joinPackage.getForEach().setIllustrator(joinPackage.getIllustrator());
}
@Override
public void visitCast(POCast cast) {
}
@Override
public void visitLimit(POLimit lim) throws VisitorException {
setIllustrator(lim, 1);
}
@Override
public void visitStream(POStream stream) throws VisitorException {
setIllustrator(stream, 1);
}
/**
* @param optimizedForEach
*/
@Override
public void visitPOOptimizedForEach(POOptimizedForEach optimizedForEach) throws VisitorException {
visitPOForEach(optimizedForEach);
}
private void innerPlanAttach(PhysicalOperator po, PhysicalPlan plan) throws VisitorException {
PlanWalker<PhysicalOperator, PhysicalPlan> childWalker =
mCurrentWalker.spawnChildWalker(plan);
pushWalker(childWalker);
childWalker.walk(this);
popWalker();
LinkedList<IdentityHashSet<Tuple>> eqClasses = new LinkedList<IdentityHashSet<Tuple>>();
if (subExpResults != null && !revisit) {
int size = 1 << subExpResults.size();
for (int i = 0; i < size; ++i) {
eqClasses.add(new IdentityHashSet<Tuple>());
}
po.getIllustrator().setEquivalenceClasses(eqClasses, po);
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.bpmn.helper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.flowable.bpmn.model.BoundaryEvent;
import org.flowable.bpmn.model.BpmnModel;
import org.flowable.bpmn.model.CallActivity;
import org.flowable.bpmn.model.ErrorEventDefinition;
import org.flowable.bpmn.model.Event;
import org.flowable.bpmn.model.EventDefinition;
import org.flowable.bpmn.model.EventSubProcess;
import org.flowable.bpmn.model.FlowElement;
import org.flowable.bpmn.model.FlowElementsContainer;
import org.flowable.bpmn.model.MapExceptionEntry;
import org.flowable.bpmn.model.Process;
import org.flowable.bpmn.model.StartEvent;
import org.flowable.common.engine.api.FlowableException;
import org.flowable.common.engine.api.delegate.event.FlowableEngineEventType;
import org.flowable.common.engine.api.delegate.event.FlowableEventDispatcher;
import org.flowable.common.engine.impl.util.CollectionUtil;
import org.flowable.common.engine.impl.util.ReflectUtil;
import org.flowable.engine.delegate.BpmnError;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.delegate.event.impl.FlowableEventBuilder;
import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.persistence.entity.ExecutionEntityManager;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.engine.impl.util.ProcessDefinitionUtil;
/**
* This class is responsible for finding and executing error handlers for BPMN Errors.
*
* Possible error handlers include Error Intermediate Events and Error Event Sub-Processes.
*
* @author Tijs Rademakers
* @author Saeid Mirzaei
*/
public class ErrorPropagation {
public static void propagateError(BpmnError error, DelegateExecution execution) {
propagateError(error.getErrorCode(), execution);
}
public static void propagateError(String errorCode, DelegateExecution execution) {
Map<String, List<Event>> eventMap = new HashMap<>();
Set<String> rootProcessDefinitionIds = new HashSet<>();
if (!execution.getProcessInstanceId().equals(execution.getRootProcessInstanceId())) {
ExecutionEntity parentExecution = (ExecutionEntity) execution;
while (parentExecution.getParentId() != null || parentExecution.getSuperExecutionId() != null) {
if (parentExecution.getParentId() != null) {
parentExecution = parentExecution.getParent();
} else {
parentExecution = parentExecution.getSuperExecution();
rootProcessDefinitionIds.add(parentExecution.getProcessDefinitionId());
}
}
}
if (rootProcessDefinitionIds.size() > 0) {
for (String processDefinitionId : rootProcessDefinitionIds) {
eventMap.putAll(findCatchingEventsForProcess(processDefinitionId, errorCode));
}
}
eventMap.putAll(findCatchingEventsForProcess(execution.getProcessDefinitionId(), errorCode));
if (eventMap.size() > 0) {
executeCatch(eventMap, execution, errorCode);
}
if (eventMap.size() == 0) {
throw new BpmnError(errorCode, "No catching boundary event found for error with errorCode '" + errorCode + "', neither in same process nor in parent process");
}
}
protected static void executeCatch(Map<String, List<Event>> eventMap, DelegateExecution delegateExecution, String errorId) {
Set<String> toDeleteProcessInstanceIds = new HashSet<>();
Event matchingEvent = null;
ExecutionEntity currentExecution = (ExecutionEntity) delegateExecution;
ExecutionEntity parentExecution = null;
if (eventMap.containsKey(currentExecution.getActivityId() + "#" + currentExecution.getProcessDefinitionId())) {
// Check for multi instance
if (currentExecution.getParentId() != null && currentExecution.getParent().isMultiInstanceRoot()) {
parentExecution = currentExecution.getParent();
} else {
parentExecution = currentExecution;
}
matchingEvent = getCatchEventFromList(eventMap.get(currentExecution.getActivityId() +
"#" + currentExecution.getProcessDefinitionId()), parentExecution);
} else {
parentExecution = currentExecution.getParent();
// Traverse parents until one is found that is a scope and matches the activity the boundary event is defined on
while (matchingEvent == null && parentExecution != null) {
FlowElementsContainer currentContainer = null;
if (parentExecution.getCurrentFlowElement() instanceof FlowElementsContainer) {
currentContainer = (FlowElementsContainer) parentExecution.getCurrentFlowElement();
} else if (parentExecution.getId().equals(parentExecution.getProcessInstanceId())) {
currentContainer = ProcessDefinitionUtil.getProcess(parentExecution.getProcessDefinitionId());
}
if (currentContainer != null) {
for (String refId : eventMap.keySet()) {
List<Event> events = eventMap.get(refId);
if (CollectionUtil.isNotEmpty(events) && events.get(0) instanceof StartEvent) {
String refActivityId = refId.substring(0, refId.indexOf('#'));
String refProcessDefinitionId = refId.substring(refId.indexOf('#') + 1);
if (parentExecution.getProcessDefinitionId().equals(refProcessDefinitionId) &&
currentContainer.getFlowElement(refActivityId) != null) {
matchingEvent = getCatchEventFromList(events, parentExecution);
String errorCode = getErrorCodeFromErrorEventDefinition(matchingEvent);
if (StringUtils.isNotEmpty(errorCode)) {
break;
}
}
}
}
}
if (matchingEvent == null) {
if (eventMap.containsKey(parentExecution.getActivityId() + "#" + parentExecution.getProcessDefinitionId())) {
// Check for multi instance
if (parentExecution.getParentId() != null && parentExecution.getParent().isMultiInstanceRoot()) {
parentExecution = parentExecution.getParent();
}
matchingEvent = getCatchEventFromList(eventMap.get(parentExecution.getActivityId() +
"#" + parentExecution.getProcessDefinitionId()), parentExecution);
} else if (StringUtils.isNotEmpty(parentExecution.getParentId())) {
parentExecution = parentExecution.getParent();
} else {
if (parentExecution.getProcessInstanceId().equals(parentExecution.getRootProcessInstanceId()) == false) {
toDeleteProcessInstanceIds.add(parentExecution.getProcessInstanceId());
parentExecution = parentExecution.getSuperExecution();
} else {
parentExecution = null;
}
}
}
}
}
if (matchingEvent != null && parentExecution != null) {
for (String processInstanceId : toDeleteProcessInstanceIds) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
ExecutionEntity processInstanceEntity = executionEntityManager.findById(processInstanceId);
// Delete
executionEntityManager.deleteProcessInstanceExecutionEntity(processInstanceEntity.getId(),
currentExecution.getCurrentFlowElement() != null ? currentExecution.getCurrentFlowElement().getId() : null,
"ERROR_EVENT " + errorId, false, false, false);
// Event
ProcessEngineConfigurationImpl processEngineConfiguration = CommandContextUtil.getProcessEngineConfiguration();
FlowableEventDispatcher eventDispatcher = null;
if (processEngineConfiguration != null) {
eventDispatcher = processEngineConfiguration.getEventDispatcher();
}
if (eventDispatcher != null && eventDispatcher.isEnabled()) {
processEngineConfiguration.getEventDispatcher()
.dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT, processInstanceEntity),
processEngineConfiguration.getEngineCfgKey());
}
}
executeEventHandler(matchingEvent, parentExecution, currentExecution, errorId);
} else {
throw new FlowableException("No matching parent execution for error code " + errorId + " found");
}
}
protected static void executeEventHandler(Event event, ExecutionEntity parentExecution, ExecutionEntity currentExecution, String errorId) {
ProcessEngineConfigurationImpl processEngineConfiguration = CommandContextUtil.getProcessEngineConfiguration();
FlowableEventDispatcher eventDispatcher = null;
String errorCode = errorId;
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(parentExecution.getProcessDefinitionId());
if (bpmnModel != null) {
String modelError = bpmnModel.getErrors().get(errorId);
if (modelError != null) {
errorCode = modelError;
}
}
if (processEngineConfiguration != null) {
eventDispatcher = processEngineConfiguration.getEventDispatcher();
}
if (eventDispatcher != null && eventDispatcher.isEnabled()) {
processEngineConfiguration.getEventDispatcher().dispatchEvent(
FlowableEventBuilder.createErrorEvent(FlowableEngineEventType.ACTIVITY_ERROR_RECEIVED, event.getId(), errorId, errorCode, parentExecution.getId(),
parentExecution.getProcessInstanceId(), parentExecution.getProcessDefinitionId()),
processEngineConfiguration.getEngineCfgKey());
}
if (event instanceof StartEvent) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
if (parentExecution.isProcessInstanceType()) {
executionEntityManager.deleteChildExecutions(parentExecution, null, true);
} else if (!currentExecution.getParentId().equals(parentExecution.getId())) {
CommandContextUtil.getAgenda().planDestroyScopeOperation(currentExecution);
} else {
executionEntityManager.deleteExecutionAndRelatedData(currentExecution, null, false);
}
ExecutionEntity eventSubProcessExecution = executionEntityManager.createChildExecution(parentExecution);
injectErrorContext(event, eventSubProcessExecution, errorCode);
if (event.getSubProcess() != null) {
eventSubProcessExecution.setCurrentFlowElement(event.getSubProcess());
CommandContextUtil.getActivityInstanceEntityManager().recordActivityStart(eventSubProcessExecution);
ExecutionEntity subProcessStartEventExecution = executionEntityManager.createChildExecution(eventSubProcessExecution);
subProcessStartEventExecution.setCurrentFlowElement(event);
CommandContextUtil.getAgenda().planContinueProcessOperation(subProcessStartEventExecution);
} else {
eventSubProcessExecution.setCurrentFlowElement(event);
CommandContextUtil.getAgenda().planContinueProcessOperation(eventSubProcessExecution);
}
} else {
ExecutionEntity boundaryExecution = null;
List<? extends ExecutionEntity> childExecutions = parentExecution.getExecutions();
for (ExecutionEntity childExecution : childExecutions) {
if (childExecution != null
&& childExecution.getActivityId() != null
&& childExecution.getActivityId().equals(event.getId())) {
boundaryExecution = childExecution;
}
}
injectErrorContext(event, boundaryExecution, errorCode);
CommandContextUtil.getAgenda().planTriggerExecutionOperation(boundaryExecution);
}
}
protected static Map<String, List<Event>> findCatchingEventsForProcess(String processDefinitionId, String errorCode) {
Map<String, List<Event>> eventMap = new HashMap<>();
Process process = ProcessDefinitionUtil.getProcess(processDefinitionId);
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(processDefinitionId);
String compareErrorCode = retrieveErrorCode(bpmnModel, errorCode);
List<EventSubProcess> subProcesses = process.findFlowElementsOfType(EventSubProcess.class, true);
for (EventSubProcess eventSubProcess : subProcesses) {
for (FlowElement flowElement : eventSubProcess.getFlowElements()) {
if (flowElement instanceof StartEvent) {
StartEvent startEvent = (StartEvent) flowElement;
if (CollectionUtil.isNotEmpty(startEvent.getEventDefinitions()) && startEvent.getEventDefinitions().get(0) instanceof ErrorEventDefinition) {
ErrorEventDefinition errorEventDef = (ErrorEventDefinition) startEvent.getEventDefinitions().get(0);
String eventErrorCode = retrieveErrorCode(bpmnModel, errorEventDef.getErrorCode());
if (eventErrorCode == null || compareErrorCode == null || eventErrorCode.equals(compareErrorCode)) {
List<Event> startEvents = new ArrayList<>();
startEvents.add(startEvent);
eventMap.put(eventSubProcess.getId() + "#" + processDefinitionId, startEvents);
}
}
}
}
}
List<BoundaryEvent> boundaryEvents = process.findFlowElementsOfType(BoundaryEvent.class, true);
for (BoundaryEvent boundaryEvent : boundaryEvents) {
if (boundaryEvent.getAttachedToRefId() != null && CollectionUtil.isNotEmpty(boundaryEvent.getEventDefinitions()) && boundaryEvent
.getEventDefinitions().get(0) instanceof ErrorEventDefinition && !(boundaryEvent.getAttachedToRef() instanceof EventSubProcess)) {
ErrorEventDefinition errorEventDef = (ErrorEventDefinition) boundaryEvent.getEventDefinitions().get(0);
String eventErrorCode = retrieveErrorCode(bpmnModel, errorEventDef.getErrorCode());
if (eventErrorCode == null || compareErrorCode == null || eventErrorCode.equals(compareErrorCode)) {
List<Event> elementBoundaryEvents = null;
if (!eventMap.containsKey(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId)) {
elementBoundaryEvents = new ArrayList<>();
eventMap.put(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId, elementBoundaryEvents);
} else {
elementBoundaryEvents = eventMap.get(boundaryEvent.getAttachedToRefId() + "#" + processDefinitionId);
}
elementBoundaryEvents.add(boundaryEvent);
}
}
}
return eventMap;
}
public static boolean mapException(Exception e, ExecutionEntity execution, List<MapExceptionEntry> exceptionMap) {
String errorCode = findMatchingExceptionMapping(e, exceptionMap);
if (errorCode != null) {
propagateError(errorCode, execution);
return true;
} else {
ExecutionEntity callActivityExecution = null;
ExecutionEntity parentExecution = execution.getParent();
while (parentExecution != null && callActivityExecution == null) {
if (parentExecution.getId().equals(parentExecution.getProcessInstanceId())) {
if (parentExecution.getSuperExecution() != null) {
callActivityExecution = parentExecution.getSuperExecution();
} else {
parentExecution = null;
}
} else {
parentExecution = parentExecution.getParent();
}
}
if (callActivityExecution != null) {
CallActivity callActivity = (CallActivity) callActivityExecution.getCurrentFlowElement();
if (CollectionUtil.isNotEmpty(callActivity.getMapExceptions())) {
errorCode = findMatchingExceptionMapping(e, callActivity.getMapExceptions());
if (errorCode != null) {
propagateError(errorCode, callActivityExecution);
return true;
}
}
}
return false;
}
}
public static String findMatchingExceptionMapping(Exception e, List<MapExceptionEntry> exceptionMap) {
String defaultExceptionMapping = null;
for (MapExceptionEntry me : exceptionMap) {
String exceptionClass = me.getClassName();
String errorCode = me.getErrorCode();
String rootCause = me.getRootCause();
// save the first mapping with no exception class as default map
if (StringUtils.isNotEmpty(errorCode) && StringUtils.isEmpty(exceptionClass) && defaultExceptionMapping == null) {
// if rootCause is set, check if it matches the exception
if (StringUtils.isNotEmpty(rootCause)) {
if (ExceptionUtils.getRootCause(e).getClass().getName().equals(rootCause)) {
defaultExceptionMapping = errorCode;
continue;
}
} else {
defaultExceptionMapping = errorCode;
continue;
}
}
// ignore if error code or class are not defined
if (StringUtils.isEmpty(errorCode) || StringUtils.isEmpty(exceptionClass)) {
continue;
}
if (e.getClass().getName().equals(exceptionClass)) {
if (StringUtils.isNotEmpty(rootCause)) {
if (ExceptionUtils.getRootCause(e).getClass().getName().equals(rootCause)) {
return errorCode;
}
continue;
}
return errorCode;
}
if (me.isAndChildren()) {
Class<?> exceptionClassClass = ReflectUtil.loadClass(exceptionClass);
if (exceptionClassClass.isAssignableFrom(e.getClass())) {
if (StringUtils.isNotEmpty(rootCause)) {
if (ExceptionUtils.getRootCause(e).getClass().getName().equals(rootCause)) {
return errorCode;
}
} else {
return errorCode;
}
}
}
}
return defaultExceptionMapping;
}
protected static Event getCatchEventFromList(List<Event> events, ExecutionEntity parentExecution) {
Event selectedEvent = null;
String selectedEventErrorCode = null;
BpmnModel bpmnModel = ProcessDefinitionUtil.getBpmnModel(parentExecution.getProcessDefinitionId());
for (Event event : events) {
String errorCode = getErrorCodeFromErrorEventDefinition(event);
if (bpmnModel != null) {
errorCode = retrieveErrorCode(bpmnModel, errorCode);
}
if (selectedEvent == null || (StringUtils.isEmpty(selectedEventErrorCode) && StringUtils.isNotEmpty(errorCode))) {
selectedEvent = event;
selectedEventErrorCode = errorCode;
}
}
return selectedEvent;
}
protected static String getErrorCodeFromErrorEventDefinition(Event event) {
for (EventDefinition eventDefinition : event.getEventDefinitions()) {
if (eventDefinition instanceof ErrorEventDefinition) {
return ((ErrorEventDefinition) eventDefinition).getErrorCode();
}
}
return null;
}
protected static String retrieveErrorCode(BpmnModel bpmnModel, String errorCode) {
String finalErrorCode = null;
if (errorCode != null && bpmnModel.containsErrorRef(errorCode)) {
finalErrorCode = bpmnModel.getErrors().get(errorCode);
} else {
finalErrorCode = errorCode;
}
return finalErrorCode;
}
public static <E extends Throwable> void handleException(Throwable exc, ExecutionEntity execution, List<MapExceptionEntry> exceptionMap) throws E {
Throwable cause = exc;
BpmnError error = null;
while (cause != null) {
if (cause instanceof BpmnError) {
error = (BpmnError) cause;
break;
} else if (cause instanceof Exception) {
if (ErrorPropagation.mapException((Exception) cause, (ExecutionEntity) execution, exceptionMap)) {
return;
}
}
cause = cause.getCause();
}
if (error != null) {
ErrorPropagation.propagateError(error, execution);
} else {
throw (E) exc;
}
}
protected static void injectErrorContext(Event event, ExecutionEntity execution, String errorCode) {
for (EventDefinition eventDefinition : event.getEventDefinitions()) {
if (!(eventDefinition instanceof ErrorEventDefinition)) {
continue;
}
ErrorEventDefinition definition = (ErrorEventDefinition) eventDefinition;
String variableName = definition.getErrorVariableName();
if (variableName == null || variableName.isEmpty()) {
continue;
}
if (definition.getErrorVariableTransient() != null && definition.getErrorVariableTransient()) {
if (definition.getErrorVariableLocalScope() != null && definition.getErrorVariableLocalScope()) {
execution.setTransientVariableLocal(variableName, errorCode);
} else {
execution.setTransientVariable(variableName, errorCode);
}
} else {
if (definition.getErrorVariableLocalScope() != null && definition.getErrorVariableLocalScope()) {
execution.setVariableLocal(variableName, errorCode);
} else {
execution.setVariable(variableName, errorCode);
}
}
}
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spring;
import com.hazelcast.config.MemberAddressProviderConfig;
import com.hazelcast.config.AwsConfig;
import com.hazelcast.config.CacheDeserializedValues;
import com.hazelcast.config.CacheSimpleConfig;
import com.hazelcast.config.Config;
import com.hazelcast.config.DiscoveryConfig;
import com.hazelcast.config.DiscoveryStrategyConfig;
import com.hazelcast.config.DurableExecutorConfig;
import com.hazelcast.config.EntryListenerConfig;
import com.hazelcast.config.EventJournalConfig;
import com.hazelcast.config.EvictionConfig;
import com.hazelcast.config.EvictionPolicy;
import com.hazelcast.config.ExecutorConfig;
import com.hazelcast.config.GlobalSerializerConfig;
import com.hazelcast.config.GroupConfig;
import com.hazelcast.config.HotRestartPersistenceConfig;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.ItemListenerConfig;
import com.hazelcast.config.ListConfig;
import com.hazelcast.config.ListenerConfig;
import com.hazelcast.config.LockConfig;
import com.hazelcast.config.ManagementCenterConfig;
import com.hazelcast.config.MapAttributeConfig;
import com.hazelcast.config.MapConfig;
import com.hazelcast.config.MapIndexConfig;
import com.hazelcast.config.MapPartitionLostListenerConfig;
import com.hazelcast.config.MapStoreConfig;
import com.hazelcast.config.MaxSizeConfig;
import com.hazelcast.config.MemberAttributeConfig;
import com.hazelcast.config.MemberGroupConfig;
import com.hazelcast.config.MultiMapConfig;
import com.hazelcast.config.NativeMemoryConfig;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.config.NetworkConfig;
import com.hazelcast.config.PartitionGroupConfig;
import com.hazelcast.config.QueryCacheConfig;
import com.hazelcast.config.QueueConfig;
import com.hazelcast.config.QueueStoreConfig;
import com.hazelcast.config.QuorumConfig;
import com.hazelcast.config.ReliableTopicConfig;
import com.hazelcast.config.ReplicatedMapConfig;
import com.hazelcast.config.RingbufferConfig;
import com.hazelcast.config.RingbufferStoreConfig;
import com.hazelcast.config.SSLConfig;
import com.hazelcast.config.ScheduledExecutorConfig;
import com.hazelcast.config.SemaphoreConfig;
import com.hazelcast.config.SerializationConfig;
import com.hazelcast.config.SerializerConfig;
import com.hazelcast.config.ServiceConfig;
import com.hazelcast.config.SetConfig;
import com.hazelcast.config.SocketInterceptorConfig;
import com.hazelcast.config.TcpIpConfig;
import com.hazelcast.config.TopicConfig;
import com.hazelcast.config.WANQueueFullBehavior;
import com.hazelcast.config.WanAcknowledgeType;
import com.hazelcast.config.WanConsumerConfig;
import com.hazelcast.config.WanPublisherConfig;
import com.hazelcast.config.WanReplicationConfig;
import com.hazelcast.config.WanReplicationRef;
import com.hazelcast.core.EntryListener;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IAtomicLong;
import com.hazelcast.core.IAtomicReference;
import com.hazelcast.core.ICountDownLatch;
import com.hazelcast.core.IList;
import com.hazelcast.core.ILock;
import com.hazelcast.core.IMap;
import com.hazelcast.core.IQueue;
import com.hazelcast.core.ISemaphore;
import com.hazelcast.core.ISet;
import com.hazelcast.core.ITopic;
import com.hazelcast.core.IdGenerator;
import com.hazelcast.core.MapStore;
import com.hazelcast.core.MapStoreFactory;
import com.hazelcast.core.Member;
import com.hazelcast.core.MembershipListener;
import com.hazelcast.core.MultiMap;
import com.hazelcast.core.QueueStore;
import com.hazelcast.core.QueueStoreFactory;
import com.hazelcast.core.ReplicatedMap;
import com.hazelcast.core.RingbufferStore;
import com.hazelcast.core.RingbufferStoreFactory;
import com.hazelcast.memory.MemoryUnit;
import com.hazelcast.nio.SocketInterceptor;
import com.hazelcast.nio.serialization.DataSerializableFactory;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.StreamSerializer;
import com.hazelcast.nio.ssl.SSLContextFactory;
import com.hazelcast.quorum.QuorumType;
import com.hazelcast.spring.serialization.DummyDataSerializableFactory;
import com.hazelcast.spring.serialization.DummyPortableFactory;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.TopicOverloadPolicy;
import com.hazelcast.wan.WanReplicationEndpoint;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import javax.annotation.Resource;
import java.io.File;
import java.net.InetSocketAddress;
import java.nio.ByteOrder;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import static com.hazelcast.config.HotRestartClusterDataRecoveryPolicy.PARTIAL_RECOVERY_MOST_COMPLETE;
import static com.hazelcast.spi.properties.GroupProperty.MERGE_FIRST_RUN_DELAY_SECONDS;
import static com.hazelcast.spi.properties.GroupProperty.MERGE_NEXT_RUN_DELAY_SECONDS;
import static com.hazelcast.spi.properties.GroupProperty.PARTITION_COUNT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(CustomSpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"fullConfig-applicationContext-hazelcast.xml"})
@Category(QuickTest.class)
public class TestFullApplicationContext extends HazelcastTestSupport {
private Config config;
@Resource(name = "instance")
private HazelcastInstance instance;
@Resource(name = "map1")
private IMap<Object, Object> map1;
@Resource(name = "map2")
private IMap<Object, Object> map2;
@Resource(name = "multiMap")
private MultiMap multiMap;
@Resource(name = "replicatedMap")
private ReplicatedMap replicatedMap;
@Resource(name = "queue")
private IQueue queue;
@Resource(name = "topic")
private ITopic topic;
@Resource(name = "set")
private ISet set;
@Resource(name = "list")
private IList list;
@Resource(name = "executorService")
private ExecutorService executorService;
@Resource(name = "idGenerator")
private IdGenerator idGenerator;
@Resource(name = "atomicLong")
private IAtomicLong atomicLong;
@Resource(name = "atomicReference")
private IAtomicReference atomicReference;
@Resource(name = "countDownLatch")
private ICountDownLatch countDownLatch;
@Resource(name = "semaphore")
private ISemaphore semaphore;
@Resource(name = "lock")
private ILock lock;
@Resource(name = "dummyMapStore")
private MapStore dummyMapStore;
@Autowired
private MapStoreFactory dummyMapStoreFactory;
@Resource(name = "dummyQueueStore")
private QueueStore dummyQueueStore;
@Autowired
private QueueStoreFactory dummyQueueStoreFactory;
@Resource(name = "dummyRingbufferStore")
private RingbufferStore dummyRingbufferStore;
@Autowired
private RingbufferStoreFactory dummyRingbufferStoreFactory;
@Autowired
private WanReplicationEndpoint wanReplication;
@Autowired
private MembershipListener membershipListener;
@Autowired
private EntryListener entryListener;
@Resource
private SSLContextFactory sslContextFactory;
@Resource
private SocketInterceptor socketInterceptor;
@Resource
private StreamSerializer dummySerializer;
@BeforeClass
@AfterClass
public static void start() {
Hazelcast.shutdownAll();
}
@Before
public void before() {
config = instance.getConfig();
}
@Test
public void testCacheConfig() {
assertNotNull(config);
assertEquals(1, config.getCacheConfigs().size());
CacheSimpleConfig cacheConfig = config.getCacheConfig("testCache");
assertEquals("testCache", cacheConfig.getName());
assertTrue(cacheConfig.isDisablePerEntryInvalidationEvents());
assertTrue(cacheConfig.getHotRestartConfig().isEnabled());
assertTrue(cacheConfig.getHotRestartConfig().isFsync());
WanReplicationRef wanRef = cacheConfig.getWanReplicationRef();
assertEquals("testWan", wanRef.getName());
assertEquals("PUT_IF_ABSENT", wanRef.getMergePolicy());
assertEquals(1, wanRef.getFilters().size());
assertEquals("com.example.SampleFilter", wanRef.getFilters().get(0));
assertFalse(wanRef.isRepublishingEnabled());
}
@Test
public void testMapConfig() {
assertNotNull(config);
assertEquals(26, config.getMapConfigs().size());
MapConfig testMapConfig = config.getMapConfig("testMap");
assertNotNull(testMapConfig);
assertEquals("testMap", testMapConfig.getName());
assertEquals(2, testMapConfig.getBackupCount());
assertEquals(EvictionPolicy.NONE, testMapConfig.getEvictionPolicy());
assertEquals(Integer.MAX_VALUE, testMapConfig.getMaxSizeConfig().getSize());
assertEquals(30, testMapConfig.getEvictionPercentage());
assertEquals(0, testMapConfig.getTimeToLiveSeconds());
assertTrue(testMapConfig.getHotRestartConfig().isEnabled());
assertTrue(testMapConfig.getHotRestartConfig().isFsync());
assertEquals(1000, testMapConfig.getMinEvictionCheckMillis());
assertEquals("PUT_IF_ABSENT", testMapConfig.getMergePolicy());
assertTrue(testMapConfig.isReadBackupData());
assertEquals(2, testMapConfig.getMapIndexConfigs().size());
for (MapIndexConfig index : testMapConfig.getMapIndexConfigs()) {
if ("name".equals(index.getAttribute())) {
assertFalse(index.isOrdered());
} else if ("age".equals(index.getAttribute())) {
assertTrue(index.isOrdered());
} else {
fail("unknown index!");
}
}
assertEquals(2, testMapConfig.getMapAttributeConfigs().size());
for (MapAttributeConfig attribute : testMapConfig.getMapAttributeConfigs()) {
if ("power".equals(attribute.getName())) {
assertEquals("com.car.PowerExtractor", attribute.getExtractor());
} else if ("weight".equals(attribute.getName())) {
assertEquals("com.car.WeightExtractor", attribute.getExtractor());
} else {
fail("unknown attribute!");
}
}
assertEquals("my-quorum", testMapConfig.getQuorumName());
// test that the testMapConfig has a mapStoreConfig and it is correct
MapStoreConfig testMapStoreConfig = testMapConfig.getMapStoreConfig();
assertNotNull(testMapStoreConfig);
assertEquals("com.hazelcast.spring.DummyStore", testMapStoreConfig.getClassName());
assertTrue(testMapStoreConfig.isEnabled());
assertEquals(0, testMapStoreConfig.getWriteDelaySeconds());
assertEquals(10, testMapStoreConfig.getWriteBatchSize());
assertTrue(testMapStoreConfig.isWriteCoalescing());
assertEquals(MapStoreConfig.InitialLoadMode.EAGER, testMapStoreConfig.getInitialLoadMode());
// test that the testMapConfig has a nearCacheConfig and it is correct
NearCacheConfig testNearCacheConfig = testMapConfig.getNearCacheConfig();
assertNotNull(testNearCacheConfig);
assertEquals(0, testNearCacheConfig.getTimeToLiveSeconds());
assertEquals(60, testNearCacheConfig.getMaxIdleSeconds());
assertEquals(EvictionPolicy.LRU, testNearCacheConfig.getEvictionConfig().getEvictionPolicy());
assertEquals(5000, testNearCacheConfig.getEvictionConfig().getSize());
assertTrue(testNearCacheConfig.isInvalidateOnChange());
assertFalse(testNearCacheConfig.isSerializeKeys());
// test that the testMapConfig2's mapStoreConfig implementation
MapConfig testMapConfig2 = config.getMapConfig("testMap2");
assertNotNull(testMapConfig2.getMapStoreConfig().getImplementation());
assertEquals(dummyMapStore, testMapConfig2.getMapStoreConfig().getImplementation());
assertEquals(MapStoreConfig.InitialLoadMode.LAZY, testMapConfig2.getMapStoreConfig().getInitialLoadMode());
// test testMapConfig2's WanReplicationConfig
WanReplicationRef wanReplicationRef = testMapConfig2.getWanReplicationRef();
assertEquals("testWan", wanReplicationRef.getName());
assertEquals("PUT_IF_ABSENT", wanReplicationRef.getMergePolicy());
assertTrue(wanReplicationRef.isRepublishingEnabled());
assertEquals(1000, testMapConfig2.getMaxSizeConfig().getSize());
assertEquals(MaxSizeConfig.MaxSizePolicy.PER_NODE, testMapConfig2.getMaxSizeConfig().getMaxSizePolicy());
assertEquals(2, testMapConfig2.getEntryListenerConfigs().size());
for (EntryListenerConfig listener : testMapConfig2.getEntryListenerConfigs()) {
if (listener.getClassName() != null) {
assertNull(listener.getImplementation());
assertTrue(listener.isIncludeValue());
assertFalse(listener.isLocal());
} else {
assertNotNull(listener.getImplementation());
assertEquals(entryListener, listener.getImplementation());
assertTrue(listener.isLocal());
assertTrue(listener.isIncludeValue());
}
}
MapConfig simpleMapConfig = config.getMapConfig("simpleMap");
assertNotNull(simpleMapConfig);
assertEquals("simpleMap", simpleMapConfig.getName());
assertEquals(3, simpleMapConfig.getBackupCount());
assertEquals(1, simpleMapConfig.getAsyncBackupCount());
assertEquals(EvictionPolicy.LRU, simpleMapConfig.getEvictionPolicy());
assertEquals(10, simpleMapConfig.getMaxSizeConfig().getSize());
assertEquals(50, simpleMapConfig.getEvictionPercentage());
assertEquals(1, simpleMapConfig.getTimeToLiveSeconds());
assertEquals("LATEST_UPDATE", simpleMapConfig.getMergePolicy());
// test that the simpleMapConfig does NOT have a nearCacheConfig
assertNull(simpleMapConfig.getNearCacheConfig());
MapConfig testMapConfig3 = config.getMapConfig("testMap3");
assertEquals("com.hazelcast.spring.DummyStoreFactory", testMapConfig3.getMapStoreConfig().getFactoryClassName());
assertFalse(testMapConfig3.getMapStoreConfig().getProperties().isEmpty());
assertEquals(testMapConfig3.getMapStoreConfig().getProperty("dummy.property"), "value");
MapConfig testMapConfig4 = config.getMapConfig("testMap4");
assertEquals(dummyMapStoreFactory, testMapConfig4.getMapStoreConfig().getFactoryImplementation());
MapConfig mapWithOptimizedQueriesConfig = config.getMapConfig("mapWithOptimizedQueries");
assertEquals(CacheDeserializedValues.ALWAYS, mapWithOptimizedQueriesConfig.getCacheDeserializedValues());
MapConfig mapWithValueCachingSetToNever = config.getMapConfig("mapWithValueCachingSetToNever");
assertEquals(CacheDeserializedValues.NEVER, mapWithValueCachingSetToNever.getCacheDeserializedValues());
MapConfig mapWithValueCachingSetToAlways = config.getMapConfig("mapWithValueCachingSetToAlways");
assertEquals(CacheDeserializedValues.ALWAYS, mapWithValueCachingSetToAlways.getCacheDeserializedValues());
MapConfig mapWithNotOptimizedQueriesConfig = config.getMapConfig("mapWithNotOptimizedQueries");
assertEquals(CacheDeserializedValues.INDEX_ONLY, mapWithNotOptimizedQueriesConfig.getCacheDeserializedValues());
MapConfig mapWithDefaultOptimizedQueriesConfig = config.getMapConfig("mapWithDefaultOptimizedQueries");
assertEquals(CacheDeserializedValues.INDEX_ONLY, mapWithDefaultOptimizedQueriesConfig.getCacheDeserializedValues());
MapConfig testMapWithPartitionLostListenerConfig = config.getMapConfig("mapWithPartitionLostListener");
List<MapPartitionLostListenerConfig> partitionLostListenerConfigs
= testMapWithPartitionLostListenerConfig.getPartitionLostListenerConfigs();
assertEquals(1, partitionLostListenerConfigs.size());
assertEquals("DummyMapPartitionLostListenerImpl", partitionLostListenerConfigs.get(0).getClassName());
MapConfig testMapWithPartitionStrategyConfig = config.getMapConfig("mapWithPartitionStrategy");
assertEquals("com.hazelcast.spring.DummyPartitionStrategy",
testMapWithPartitionStrategyConfig.getPartitioningStrategyConfig().getPartitioningStrategyClass());
}
@Test
public void testQueueConfig() {
QueueConfig testQConfig = config.getQueueConfig("testQ");
assertNotNull(testQConfig);
assertEquals("testQ", testQConfig.getName());
assertEquals(1000, testQConfig.getMaxSize());
assertEquals(1, testQConfig.getItemListenerConfigs().size());
assertTrue(testQConfig.isStatisticsEnabled());
ItemListenerConfig listenerConfig = testQConfig.getItemListenerConfigs().get(0);
assertEquals("com.hazelcast.spring.DummyItemListener", listenerConfig.getClassName());
assertTrue(listenerConfig.isIncludeValue());
QueueConfig qConfig = config.getQueueConfig("q");
assertNotNull(qConfig);
assertEquals("q", qConfig.getName());
assertEquals(2500, qConfig.getMaxSize());
assertFalse(qConfig.isStatisticsEnabled());
assertEquals(100, qConfig.getEmptyQueueTtl());
assertEquals("my-quorum", qConfig.getQuorumName());
QueueConfig queueWithStore1 = config.getQueueConfig("queueWithStore1");
assertNotNull(queueWithStore1);
QueueStoreConfig storeConfig1 = queueWithStore1.getQueueStoreConfig();
assertNotNull(storeConfig1);
assertEquals(DummyQueueStore.class.getName(), storeConfig1.getClassName());
QueueConfig queueWithStore2 = config.getQueueConfig("queueWithStore2");
assertNotNull(queueWithStore2);
QueueStoreConfig storeConfig2 = queueWithStore2.getQueueStoreConfig();
assertNotNull(storeConfig2);
assertEquals(DummyQueueStoreFactory.class.getName(), storeConfig2.getFactoryClassName());
QueueConfig queueWithStore3 = config.getQueueConfig("queueWithStore3");
assertNotNull(queueWithStore3);
QueueStoreConfig storeConfig3 = queueWithStore3.getQueueStoreConfig();
assertNotNull(storeConfig3);
assertEquals(dummyQueueStore, storeConfig3.getStoreImplementation());
QueueConfig queueWithStore4 = config.getQueueConfig("queueWithStore4");
assertNotNull(queueWithStore4);
QueueStoreConfig storeConfig4 = queueWithStore4.getQueueStoreConfig();
assertNotNull(storeConfig4);
assertEquals(dummyQueueStoreFactory, storeConfig4.getFactoryImplementation());
}
@Test
public void testLockConfig() {
LockConfig lockConfig = config.getLockConfig("lock");
assertNotNull(lockConfig);
assertEquals("lock", lockConfig.getName());
assertEquals("my-quorum", lockConfig.getQuorumName());
}
@Test
public void testRingbufferConfig() {
RingbufferConfig testRingbuffer = config.getRingbufferConfig("testRingbuffer");
assertNotNull(testRingbuffer);
assertEquals("testRingbuffer", testRingbuffer.getName());
assertEquals(InMemoryFormat.OBJECT, testRingbuffer.getInMemoryFormat());
assertEquals(100, testRingbuffer.getCapacity());
assertEquals(1, testRingbuffer.getBackupCount());
assertEquals(1, testRingbuffer.getAsyncBackupCount());
assertEquals(20, testRingbuffer.getTimeToLiveSeconds());
RingbufferStoreConfig store1 = testRingbuffer.getRingbufferStoreConfig();
assertNotNull(store1);
assertEquals(DummyRingbufferStore.class.getName(), store1.getClassName());
RingbufferConfig testRingbuffer2 = config.getRingbufferConfig("testRingbuffer2");
assertNotNull(testRingbuffer2);
RingbufferStoreConfig store2 = testRingbuffer2.getRingbufferStoreConfig();
assertNotNull(store2);
assertEquals(DummyRingbufferStoreFactory.class.getName(), store2.getFactoryClassName());
assertFalse(store2.getProperties().isEmpty());
assertEquals("value", store2.getProperty("dummy.property"));
assertEquals("value2", store2.getProperty("dummy.property.2"));
RingbufferConfig testRingbuffer3 = config.getRingbufferConfig("testRingbuffer3");
assertNotNull(testRingbuffer3);
RingbufferStoreConfig store3 = testRingbuffer3.getRingbufferStoreConfig();
assertNotNull(store3);
assertEquals(dummyRingbufferStore, store3.getStoreImplementation());
RingbufferConfig testRingbuffer4 = config.getRingbufferConfig("testRingbuffer4");
assertNotNull(testRingbuffer4);
RingbufferStoreConfig store4 = testRingbuffer4.getRingbufferStoreConfig();
assertNotNull(store4);
assertEquals(dummyRingbufferStoreFactory, store4.getFactoryImplementation());
}
@Test
public void testSemaphoreConfig() {
SemaphoreConfig testSemaphore = config.getSemaphoreConfig("testSemaphore");
assertNotNull(testSemaphore);
assertEquals("testSemaphore", testSemaphore.getName());
assertEquals(1, testSemaphore.getBackupCount());
assertEquals(1, testSemaphore.getAsyncBackupCount());
assertEquals(10, testSemaphore.getInitialPermits());
}
@Test
public void testReliableTopicConfig() {
ReliableTopicConfig testReliableTopic = config.getReliableTopicConfig("testReliableTopic");
assertNotNull(testReliableTopic);
assertEquals("testReliableTopic", testReliableTopic.getName());
assertEquals(1, testReliableTopic.getMessageListenerConfigs().size());
assertFalse(testReliableTopic.isStatisticsEnabled());
ListenerConfig listenerConfig = testReliableTopic.getMessageListenerConfigs().get(0);
assertEquals("com.hazelcast.spring.DummyMessageListener", listenerConfig.getClassName());
assertEquals(10, testReliableTopic.getReadBatchSize());
assertEquals(TopicOverloadPolicy.BLOCK, testReliableTopic.getTopicOverloadPolicy());
}
@Test
public void testMultimapConfig() {
MultiMapConfig testMultiMapConfig = config.getMultiMapConfig("testMultimap");
assertEquals(MultiMapConfig.ValueCollectionType.LIST, testMultiMapConfig.getValueCollectionType());
assertEquals(2, testMultiMapConfig.getEntryListenerConfigs().size());
assertFalse(testMultiMapConfig.isBinary());
assertFalse(testMultiMapConfig.isStatisticsEnabled());
for (EntryListenerConfig listener : testMultiMapConfig.getEntryListenerConfigs()) {
if (listener.getClassName() != null) {
assertNull(listener.getImplementation());
assertTrue(listener.isIncludeValue());
assertFalse(listener.isLocal());
} else {
assertNotNull(listener.getImplementation());
assertEquals(entryListener, listener.getImplementation());
assertTrue(listener.isLocal());
assertTrue(listener.isIncludeValue());
}
}
}
@Test
public void testListConfig() {
ListConfig testListConfig = config.getListConfig("testList");
assertNotNull(testListConfig);
assertEquals("testList", testListConfig.getName());
assertEquals(9999, testListConfig.getMaxSize());
assertEquals(1, testListConfig.getBackupCount());
assertEquals(1, testListConfig.getAsyncBackupCount());
assertFalse(testListConfig.isStatisticsEnabled());
}
@Test
public void testSetConfig() {
SetConfig testSetConfig = config.getSetConfig("testSet");
assertNotNull(testSetConfig);
assertEquals("testSet", testSetConfig.getName());
assertEquals(7777, testSetConfig.getMaxSize());
assertEquals(0, testSetConfig.getBackupCount());
assertEquals(0, testSetConfig.getAsyncBackupCount());
assertFalse(testSetConfig.isStatisticsEnabled());
}
@Test
public void testTopicConfig() {
TopicConfig testTopicConfig = config.getTopicConfig("testTopic");
assertNotNull(testTopicConfig);
assertEquals("testTopic", testTopicConfig.getName());
assertEquals(1, testTopicConfig.getMessageListenerConfigs().size());
assertTrue(testTopicConfig.isGlobalOrderingEnabled());
assertFalse(testTopicConfig.isStatisticsEnabled());
ListenerConfig listenerConfig = testTopicConfig.getMessageListenerConfigs().get(0);
assertEquals("com.hazelcast.spring.DummyMessageListener", listenerConfig.getClassName());
}
@Test
public void testServiceConfig() {
ServiceConfig serviceConfig = config.getServicesConfig().getServiceConfig("my-service");
assertEquals("com.hazelcast.spring.MyService", serviceConfig.getClassName());
assertEquals("prop1-value", serviceConfig.getProperties().getProperty("prop1"));
assertEquals("prop2-value", serviceConfig.getProperties().getProperty("prop2"));
MyServiceConfig configObject = (MyServiceConfig) serviceConfig.getConfigObject();
assertNotNull(configObject);
assertEquals("prop1", configObject.stringProp);
assertEquals(123, configObject.intProp);
assertTrue(configObject.boolProp);
Object impl = serviceConfig.getImplementation();
assertNotNull(impl);
assertTrue("expected service of class com.hazelcast.spring.MyService but it is " + impl.getClass().getName(), impl instanceof MyService);
}
@Test
public void testGroupConfig() {
GroupConfig groupConfig = config.getGroupConfig();
assertNotNull(groupConfig);
assertEquals("spring-group", groupConfig.getName());
assertEquals("spring-group-pass", groupConfig.getPassword());
}
@Test
public void testExecutorConfig() {
ExecutorConfig testExecConfig = config.getExecutorConfig("testExec");
assertNotNull(testExecConfig);
assertEquals("testExec", testExecConfig.getName());
assertEquals(2, testExecConfig.getPoolSize());
assertEquals(100, testExecConfig.getQueueCapacity());
assertTrue(testExecConfig.isStatisticsEnabled());
ExecutorConfig testExec2Config = config.getExecutorConfig("testExec2");
assertNotNull(testExec2Config);
assertEquals("testExec2", testExec2Config.getName());
assertEquals(5, testExec2Config.getPoolSize());
assertEquals(300, testExec2Config.getQueueCapacity());
assertFalse(testExec2Config.isStatisticsEnabled());
}
@Test
public void testDurableExecutorConfig() {
DurableExecutorConfig testExecConfig = config.getDurableExecutorConfig("durableExec");
assertNotNull(testExecConfig);
assertEquals("durableExec", testExecConfig.getName());
assertEquals(10, testExecConfig.getPoolSize());
assertEquals(5, testExecConfig.getDurability());
assertEquals(200, testExecConfig.getCapacity());
}
@Test
public void testScheduledExecutorConfig() {
ScheduledExecutorConfig testExecConfig = config.getScheduledExecutorConfig("scheduledExec");
assertNotNull(testExecConfig);
assertEquals("scheduledExec", testExecConfig.getName());
assertEquals(10, testExecConfig.getPoolSize());
assertEquals(5, testExecConfig.getDurability());
}
@Test
public void testNetworkConfig() {
NetworkConfig networkConfig = config.getNetworkConfig();
assertNotNull(networkConfig);
assertEquals(5700, networkConfig.getPort());
assertFalse(networkConfig.isPortAutoIncrement());
Collection<String> allowedPorts = networkConfig.getOutboundPortDefinitions();
assertEquals(2, allowedPorts.size());
Iterator portIter = allowedPorts.iterator();
assertEquals("35000-35100", portIter.next());
assertEquals("36000,36100", portIter.next());
assertFalse(networkConfig.getJoin().getMulticastConfig().isEnabled());
assertEquals(networkConfig.getJoin().getMulticastConfig().getMulticastTimeoutSeconds(), 8);
assertEquals(networkConfig.getJoin().getMulticastConfig().getMulticastTimeToLive(), 16);
assertFalse(networkConfig.getJoin().getMulticastConfig().isLoopbackModeEnabled());
Set<String> tis = networkConfig.getJoin().getMulticastConfig().getTrustedInterfaces();
assertEquals(1, tis.size());
assertEquals("10.10.10.*", tis.iterator().next());
assertFalse(networkConfig.getInterfaces().isEnabled());
assertEquals(1, networkConfig.getInterfaces().getInterfaces().size());
assertEquals("10.10.1.*", networkConfig.getInterfaces().getInterfaces().iterator().next());
TcpIpConfig tcp = networkConfig.getJoin().getTcpIpConfig();
assertNotNull(tcp);
assertTrue(tcp.isEnabled());
assertTrue(networkConfig.getSymmetricEncryptionConfig().isEnabled());
List<String> members = tcp.getMembers();
assertEquals(members.toString(), 2, members.size());
assertEquals("127.0.0.1:5700", members.get(0));
assertEquals("127.0.0.1:5701", members.get(1));
assertEquals("127.0.0.1:5700", tcp.getRequiredMember());
assertAwsConfig(networkConfig.getJoin().getAwsConfig());
assertTrue("reuse-address", networkConfig.isReuseAddress());
assertDiscoveryConfig(networkConfig.getJoin().getDiscoveryConfig());
final MemberAddressProviderConfig memberAddressProviderConfig = networkConfig.getMemberAddressProviderConfig();
assertFalse(memberAddressProviderConfig.isEnabled());
assertEquals("com.hazelcast.spring.DummyMemberAddressProvider", memberAddressProviderConfig.getClassName());
assertFalse(memberAddressProviderConfig.getProperties().isEmpty());
assertEquals("value", memberAddressProviderConfig.getProperties().getProperty("dummy.property"));
assertEquals("value2", memberAddressProviderConfig.getProperties().getProperty("dummy.property.2"));
}
private void assertAwsConfig(AwsConfig aws) {
assertFalse(aws.isEnabled());
assertEquals("sample-access-key", aws.getAccessKey());
assertEquals("sample-secret-key", aws.getSecretKey());
assertEquals("sample-region", aws.getRegion());
assertEquals("sample-header", aws.getHostHeader());
assertEquals("sample-group", aws.getSecurityGroupName());
assertEquals("sample-tag-key", aws.getTagKey());
assertEquals("sample-tag-value", aws.getTagValue());
assertEquals("sample-role", aws.getIamRole());
}
private void assertDiscoveryConfig(DiscoveryConfig discoveryConfig) {
assertTrue(discoveryConfig.getDiscoveryServiceProvider() instanceof DummyDiscoveryServiceProvider);
assertTrue(discoveryConfig.getNodeFilter() instanceof DummyNodeFilter);
List<DiscoveryStrategyConfig> discoveryStrategyConfigs
= (List<DiscoveryStrategyConfig>) discoveryConfig.getDiscoveryStrategyConfigs();
assertEquals(2, discoveryStrategyConfigs.size());
DiscoveryStrategyConfig discoveryStrategyConfig = discoveryStrategyConfigs.get(0);
assertTrue(discoveryStrategyConfig.getDiscoveryStrategyFactory() instanceof DummyDiscoveryStrategyFactory);
assertEquals(3, discoveryStrategyConfig.getProperties().size());
assertEquals("foo", discoveryStrategyConfig.getProperties().get("key-string"));
assertEquals("123", discoveryStrategyConfig.getProperties().get("key-int"));
assertEquals("true", discoveryStrategyConfig.getProperties().get("key-boolean"));
DiscoveryStrategyConfig discoveryStrategyConfig2 = discoveryStrategyConfigs.get(1);
assertEquals(DummyDiscoveryStrategy.class.getName(), discoveryStrategyConfig2.getClassName());
assertEquals(1, discoveryStrategyConfig2.getProperties().size());
assertEquals("foo2", discoveryStrategyConfig2.getProperties().get("key-string"));
}
@Test
public void testProperties() {
Properties properties = config.getProperties();
assertNotNull(properties);
assertEquals("5", properties.get(MERGE_FIRST_RUN_DELAY_SECONDS.getName()));
assertEquals("5", properties.get(MERGE_NEXT_RUN_DELAY_SECONDS.getName()));
assertEquals("277", properties.get(PARTITION_COUNT.getName()));
Config config2 = instance.getConfig();
Properties properties2 = config2.getProperties();
assertNotNull(properties2);
assertEquals("5", properties2.get(MERGE_FIRST_RUN_DELAY_SECONDS.getName()));
assertEquals("5", properties2.get(MERGE_NEXT_RUN_DELAY_SECONDS.getName()));
assertEquals("277", properties2.get(PARTITION_COUNT.getName()));
}
@Test
public void testInstance() {
assertNotNull(instance);
Set<Member> members = instance.getCluster().getMembers();
assertEquals(1, members.size());
Member member = members.iterator().next();
InetSocketAddress inetSocketAddress = member.getSocketAddress();
assertEquals(5700, inetSocketAddress.getPort());
assertEquals("test-instance", config.getInstanceName());
assertEquals("HAZELCAST_ENTERPRISE_LICENSE_KEY", config.getLicenseKey());
assertEquals(277, instance.getPartitionService().getPartitions().size());
}
@Test
public void testHazelcastInstances() {
assertNotNull(map1);
assertNotNull(map2);
assertNotNull(multiMap);
assertNotNull(replicatedMap);
assertNotNull(queue);
assertNotNull(topic);
assertNotNull(set);
assertNotNull(list);
assertNotNull(executorService);
assertNotNull(idGenerator);
assertNotNull(atomicLong);
assertNotNull(atomicReference);
assertNotNull(countDownLatch);
assertNotNull(semaphore);
assertNotNull(lock);
assertEquals("map1", map1.getName());
assertEquals("map2", map2.getName());
assertEquals("testMultimap", multiMap.getName());
assertEquals("replicatedMap", replicatedMap.getName());
assertEquals("testQ", queue.getName());
assertEquals("testTopic", topic.getName());
assertEquals("set", set.getName());
assertEquals("list", list.getName());
assertEquals("idGenerator", idGenerator.getName());
assertEquals("atomicLong", atomicLong.getName());
assertEquals("atomicReference", atomicReference.getName());
assertEquals("countDownLatch", countDownLatch.getName());
assertEquals("semaphore", semaphore.getName());
}
@Test
public void testWanReplicationConfig() {
WanReplicationConfig wcfg = config.getWanReplicationConfig("testWan");
assertNotNull(wcfg);
WanPublisherConfig publisherConfig = wcfg.getWanPublisherConfigs().get(0);
assertEquals("tokyo", publisherConfig.getGroupName());
assertEquals("com.hazelcast.enterprise.wan.replication.WanBatchReplication", publisherConfig.getClassName());
assertEquals(WANQueueFullBehavior.THROW_EXCEPTION, publisherConfig.getQueueFullBehavior());
assertEquals(1000, publisherConfig.getQueueCapacity());
Map<String, Comparable> publisherProps = publisherConfig.getProperties();
assertEquals("50", publisherProps.get("batch.size"));
assertEquals("3000", publisherProps.get("batch.max.delay.millis"));
assertEquals("false", publisherProps.get("snapshot.enabled"));
assertEquals("5000", publisherProps.get("response.timeout.millis"));
assertEquals(WanAcknowledgeType.ACK_ON_OPERATION_COMPLETE.name(), publisherProps.get("ack.type"));
assertEquals("pass", publisherProps.get("group.password"));
WanPublisherConfig customPublisher = wcfg.getWanPublisherConfigs().get(1);
assertEquals("istanbul", customPublisher.getGroupName());
assertEquals("com.hazelcast.wan.custom.CustomPublisher", customPublisher.getClassName());
assertEquals(WANQueueFullBehavior.THROW_EXCEPTION_ONLY_IF_REPLICATION_ACTIVE, customPublisher.getQueueFullBehavior());
Map<String, Comparable> customPublisherProps = customPublisher.getProperties();
assertEquals("prop.publisher", customPublisherProps.get("custom.prop.publisher"));
assertEquals("5", customPublisherProps.get("discovery.period"));
assertEquals("2", customPublisherProps.get("maxEndpoints"));
assertAwsConfig(customPublisher.getAwsConfig());
assertDiscoveryConfig(customPublisher.getDiscoveryConfig());
WanPublisherConfig publisherPlaceHolderConfig = wcfg.getWanPublisherConfigs().get(2);
assertEquals(5000, publisherPlaceHolderConfig.getQueueCapacity());
WanConsumerConfig consumerConfig = wcfg.getWanConsumerConfig();
assertEquals("com.hazelcast.wan.custom.WanConsumer", consumerConfig.getClassName());
Map<String, Comparable> consumerProps = consumerConfig.getProperties();
assertEquals("prop.consumer", consumerProps.get("custom.prop.consumer"));
WanReplicationConfig config2 = config.getWanReplicationConfig("testWan2");
WanConsumerConfig consumerConfig2 = config2.getWanConsumerConfig();
consumerConfig2.setProperties(consumerProps);
assertInstanceOf(DummyWanConsumer.class, consumerConfig2.getImplementation());
assertEquals("prop.consumer", consumerConfig2.getProperties().get("custom.prop.consumer"));
}
@Test
public void testConfigListeners() {
assertNotNull(membershipListener);
List<ListenerConfig> list = config.getListenerConfigs();
assertEquals(2, list.size());
for (ListenerConfig lc : list) {
if (lc.getClassName() != null) {
assertNull(lc.getImplementation());
assertEquals(DummyMembershipListener.class.getName(), lc.getClassName());
} else {
assertNotNull(lc.getImplementation());
assertEquals(membershipListener, lc.getImplementation());
}
}
}
@Test
public void testPartitionGroupConfig() {
PartitionGroupConfig pgc = config.getPartitionGroupConfig();
assertTrue(pgc.isEnabled());
assertEquals(PartitionGroupConfig.MemberGroupType.CUSTOM, pgc.getGroupType());
assertEquals(2, pgc.getMemberGroupConfigs().size());
for (MemberGroupConfig mgc : pgc.getMemberGroupConfigs()) {
assertEquals(2, mgc.getInterfaces().size());
}
}
@Test
public void testSSLConfig() {
SSLConfig sslConfig = config.getNetworkConfig().getSSLConfig();
assertNotNull(sslConfig);
assertFalse(sslConfig.isEnabled());
assertEquals(DummySSLContextFactory.class.getName(), sslConfig.getFactoryClassName());
assertEquals(sslContextFactory, sslConfig.getFactoryImplementation());
}
@Test
public void testSocketInterceptorConfig() {
SocketInterceptorConfig socketInterceptorConfig = config.getNetworkConfig().getSocketInterceptorConfig();
assertNotNull(socketInterceptorConfig);
assertFalse(socketInterceptorConfig.isEnabled());
assertEquals(DummySocketInterceptor.class.getName(), socketInterceptorConfig.getClassName());
assertEquals(socketInterceptor, socketInterceptorConfig.getImplementation());
}
@Test
public void testManagementCenterConfig() {
ManagementCenterConfig managementCenterConfig = config.getManagementCenterConfig();
assertNotNull(managementCenterConfig);
assertTrue(managementCenterConfig.isEnabled());
assertEquals("myserver:80", managementCenterConfig.getUrl());
assertEquals(2, managementCenterConfig.getUpdateInterval());
assertTrue(managementCenterConfig.getMutualAuthConfig().isEnabled());
assertEquals(1, managementCenterConfig.getMutualAuthConfig().getProperties().size());
assertEquals("who.let.the.cat.out.class", managementCenterConfig.getMutualAuthConfig().getFactoryClassName());
}
@Test
public void testMemberAttributesConfig() {
MemberAttributeConfig memberAttributeConfig = config.getMemberAttributeConfig();
assertNotNull(memberAttributeConfig);
assertEquals("spring-group", memberAttributeConfig.getStringAttribute("cluster.group.name"));
assertEquals(new Integer(5700), memberAttributeConfig.getIntAttribute("cluster.port.int"));
assertEquals(new Long(5700), memberAttributeConfig.getLongAttribute("cluster.port.long"));
assertEquals(new Short("5700"), memberAttributeConfig.getShortAttribute("cluster.port.short"));
assertEquals(new Byte("111"), memberAttributeConfig.getByteAttribute("attribute.byte"));
assertTrue(memberAttributeConfig.getBooleanAttribute("attribute.boolean"));
assertEquals(0.0d, memberAttributeConfig.getDoubleAttribute("attribute.double"), 0.0001d);
assertEquals(1234.5678, memberAttributeConfig.getFloatAttribute("attribute.float"), 0.0001);
}
@Test
public void testSerializationConfig() {
SerializationConfig serializationConfig = config.getSerializationConfig();
assertEquals(ByteOrder.BIG_ENDIAN, serializationConfig.getByteOrder());
assertFalse(serializationConfig.isCheckClassDefErrors());
assertEquals(13, serializationConfig.getPortableVersion());
Map<Integer, String> dataSerializableFactoryClasses
= serializationConfig.getDataSerializableFactoryClasses();
assertFalse(dataSerializableFactoryClasses.isEmpty());
assertEquals(DummyDataSerializableFactory.class.getName(), dataSerializableFactoryClasses.get(1));
Map<Integer, DataSerializableFactory> dataSerializableFactories
= serializationConfig.getDataSerializableFactories();
assertFalse(dataSerializableFactories.isEmpty());
assertEquals(DummyDataSerializableFactory.class, dataSerializableFactories.get(2).getClass());
Map<Integer, String> portableFactoryClasses = serializationConfig.getPortableFactoryClasses();
assertFalse(portableFactoryClasses.isEmpty());
assertEquals(DummyPortableFactory.class.getName(), portableFactoryClasses.get(1));
Map<Integer, PortableFactory> portableFactories = serializationConfig.getPortableFactories();
assertFalse(portableFactories.isEmpty());
assertEquals(DummyPortableFactory.class, portableFactories.get(2).getClass());
Collection<SerializerConfig> serializerConfigs = serializationConfig.getSerializerConfigs();
assertFalse(serializerConfigs.isEmpty());
GlobalSerializerConfig globalSerializerConfig = serializationConfig.getGlobalSerializerConfig();
assertNotNull(globalSerializerConfig);
assertEquals(dummySerializer, globalSerializerConfig.getImplementation());
}
@Test
public void testNativeMemoryConfig() {
NativeMemoryConfig nativeMemoryConfig = config.getNativeMemoryConfig();
assertFalse(nativeMemoryConfig.isEnabled());
assertEquals(MemoryUnit.MEGABYTES, nativeMemoryConfig.getSize().getUnit());
assertEquals(256, nativeMemoryConfig.getSize().getValue());
assertEquals(20, nativeMemoryConfig.getPageSize());
assertEquals(NativeMemoryConfig.MemoryAllocatorType.POOLED, nativeMemoryConfig.getAllocatorType());
assertEquals(10.2, nativeMemoryConfig.getMetadataSpacePercentage(), 0.1);
assertEquals(10, nativeMemoryConfig.getMinBlockSize());
}
@Test
public void testReplicatedMapConfig() {
assertNotNull(config);
assertEquals(1, config.getReplicatedMapConfigs().size());
ReplicatedMapConfig replicatedMapConfig = config.getReplicatedMapConfig("replicatedMap");
assertNotNull(replicatedMapConfig);
assertEquals("replicatedMap", replicatedMapConfig.getName());
assertEquals(200, replicatedMapConfig.getReplicationDelayMillis());
assertEquals(16, replicatedMapConfig.getConcurrencyLevel());
assertEquals(InMemoryFormat.OBJECT, replicatedMapConfig.getInMemoryFormat());
assertFalse(replicatedMapConfig.isStatisticsEnabled());
assertFalse(replicatedMapConfig.isAsyncFillup());
replicatedMapConfig.getListenerConfigs();
for (ListenerConfig listener : replicatedMapConfig.getListenerConfigs()) {
if (listener.getClassName() != null) {
assertNull(listener.getImplementation());
assertTrue(listener.isIncludeValue());
assertFalse(listener.isLocal());
} else {
assertNotNull(listener.getImplementation());
assertEquals(entryListener, listener.getImplementation());
assertTrue(listener.isLocal());
assertTrue(listener.isIncludeValue());
}
}
}
@Test
public void testQuorumConfig() {
assertNotNull(config);
assertEquals(1, config.getQuorumConfigs().size());
QuorumConfig quorumConfig = config.getQuorumConfig("my-quorum");
assertNotNull(quorumConfig);
assertEquals("my-quorum", quorumConfig.getName());
assertEquals("com.hazelcast.spring.DummyQuorumFunction", quorumConfig.getQuorumFunctionClassName());
assertTrue(quorumConfig.isEnabled());
assertEquals(2, quorumConfig.getSize());
assertEquals(2, quorumConfig.getListenerConfigs().size());
assertEquals(QuorumType.READ, quorumConfig.getType());
assertEquals("com.hazelcast.spring.DummyQuorumListener", quorumConfig.getListenerConfigs().get(0).getClassName());
assertNotNull(quorumConfig.getListenerConfigs().get(1).getImplementation());
}
@Test
public void testFullQueryCacheConfig() {
MapConfig mapConfig = config.getMapConfig("map-with-query-cache");
QueryCacheConfig queryCacheConfig = mapConfig.getQueryCacheConfigs().get(0);
EntryListenerConfig entryListenerConfig = queryCacheConfig.getEntryListenerConfigs().get(0);
assertTrue(entryListenerConfig.isIncludeValue());
assertFalse(entryListenerConfig.isLocal());
assertEquals("com.hazelcast.spring.DummyEntryListener", entryListenerConfig.getClassName());
assertFalse(queryCacheConfig.isIncludeValue());
assertEquals("my-query-cache-1", queryCacheConfig.getName());
assertEquals(12, queryCacheConfig.getBatchSize());
assertEquals(33, queryCacheConfig.getBufferSize());
assertEquals(12, queryCacheConfig.getDelaySeconds());
assertEquals(InMemoryFormat.OBJECT, queryCacheConfig.getInMemoryFormat());
assertTrue(queryCacheConfig.isCoalesce());
assertFalse(queryCacheConfig.isPopulate());
assertIndexesEqual(queryCacheConfig);
assertEquals("__key > 12", queryCacheConfig.getPredicateConfig().getSql());
assertEquals(EvictionPolicy.LRU, queryCacheConfig.getEvictionConfig().getEvictionPolicy());
assertEquals(EvictionConfig.MaxSizePolicy.ENTRY_COUNT, queryCacheConfig.getEvictionConfig().getMaximumSizePolicy());
assertEquals(111, queryCacheConfig.getEvictionConfig().getSize());
}
private void assertIndexesEqual(QueryCacheConfig queryCacheConfig) {
for (MapIndexConfig mapIndexConfig : queryCacheConfig.getIndexConfigs()) {
assertEquals("name", mapIndexConfig.getAttribute());
assertFalse(mapIndexConfig.isOrdered());
}
}
@Test
public void testMapNativeMaxSizePolicy() {
MapConfig mapConfig = config.getMapConfig("map-with-native-max-size-policy");
MaxSizeConfig maxSizeConfig = mapConfig.getMaxSizeConfig();
assertEquals(MaxSizeConfig.MaxSizePolicy.USED_NATIVE_MEMORY_PERCENTAGE, maxSizeConfig.getMaxSizePolicy());
}
@Test
public void testHotRestart() {
File dir = new File("/mnt/hot-restart/");
File hotBackupDir = new File("/mnt/hot-backup/");
HotRestartPersistenceConfig hotRestartPersistenceConfig = config.getHotRestartPersistenceConfig();
assertTrue(hotRestartPersistenceConfig.isEnabled());
assertEquals(dir.getAbsolutePath(), hotRestartPersistenceConfig.getBaseDir().getAbsolutePath());
assertEquals(hotBackupDir.getAbsolutePath(), hotRestartPersistenceConfig.getBackupDir().getAbsolutePath());
assertEquals(1111, hotRestartPersistenceConfig.getValidationTimeoutSeconds());
assertEquals(2222, hotRestartPersistenceConfig.getDataLoadTimeoutSeconds());
assertEquals(PARTIAL_RECOVERY_MOST_COMPLETE, hotRestartPersistenceConfig.getClusterDataRecoveryPolicy());
}
@Test
public void testMapEvictionPolicies() {
assertEquals(EvictionPolicy.LFU, config.getMapConfig("lfuEvictionMap").getEvictionPolicy());
assertEquals(EvictionPolicy.LRU, config.getMapConfig("lruEvictionMap").getEvictionPolicy());
assertEquals(EvictionPolicy.NONE, config.getMapConfig("noneEvictionMap").getEvictionPolicy());
assertEquals(EvictionPolicy.RANDOM, config.getMapConfig("randomEvictionMap").getEvictionPolicy());
}
@Test
public void testMemberNearCacheEvictionPolicies() {
assertEquals(EvictionPolicy.LFU, getNearCacheEvictionPolicy("lfuNearCacheEvictionMap", config));
assertEquals(EvictionPolicy.LRU, getNearCacheEvictionPolicy("lruNearCacheEvictionMap", config));
assertEquals(EvictionPolicy.NONE, getNearCacheEvictionPolicy("noneNearCacheEvictionMap", config));
assertEquals(EvictionPolicy.RANDOM, getNearCacheEvictionPolicy("randomNearCacheEvictionMap", config));
}
private EvictionPolicy getNearCacheEvictionPolicy(String mapName, Config config) {
return config.getMapConfig(mapName).getNearCacheConfig().getEvictionConfig().getEvictionPolicy();
}
@Test
public void testMapEvictionPolicyClassName() {
MapConfig mapConfig = config.getMapConfig("mapWithMapEvictionPolicyClassName");
String expectedComparatorClassName = "com.hazelcast.map.eviction.LRUEvictionPolicy";
assertEquals(expectedComparatorClassName, mapConfig.getMapEvictionPolicy().getClass().getName());
}
@Test
public void testMapEvictionPolicyImpl() {
MapConfig mapConfig = config.getMapConfig("mapWithMapEvictionPolicyImpl");
assertEquals(DummyMapEvictionPolicy.class, mapConfig.getMapEvictionPolicy().getClass());
}
@Test
public void testWhenBothMapEvictionPolicyClassNameAndEvictionPolicySet() {
MapConfig mapConfig = config.getMapConfig("mapBothMapEvictionPolicyClassNameAndEvictionPolicy");
String expectedComparatorClassName = "com.hazelcast.map.eviction.LRUEvictionPolicy";
assertEquals(expectedComparatorClassName, mapConfig.getMapEvictionPolicy().getClass().getName());
}
@Test
public void testMapEventJournalConfigIsWellParsed() {
final EventJournalConfig journalConfig = config.getMapEventJournalConfig("mapName");
assertTrue(journalConfig.isEnabled());
assertEquals(123, journalConfig.getCapacity());
assertEquals(321, journalConfig.getTimeToLiveSeconds());
}
@Test
public void testCacheEventJournalConfigIsWellParsed() {
final EventJournalConfig journalConfig = config.getCacheEventJournalConfig("cacheName");
assertTrue(journalConfig.isEnabled());
assertEquals(123, journalConfig.getCapacity());
assertEquals(321, journalConfig.getTimeToLiveSeconds());
}
@Test
public void testExplicitPortCountConfiguration() {
int portCount = instance.getConfig().getNetworkConfig().getPortCount();
assertEquals(42, portCount);
}
}
| |
package biz.paluch.logging.gelf.jul;
import static biz.paluch.logging.gelf.LogMessageField.NamedLogField.*;
import java.util.Collections;
import java.util.logging.*;
import biz.paluch.logging.RuntimeContainer;
import biz.paluch.logging.gelf.GelfMessageAssembler;
import biz.paluch.logging.gelf.LogMessageField;
import biz.paluch.logging.gelf.PropertyProvider;
import biz.paluch.logging.gelf.intern.*;
/**
* Logging-Handler for GELF (Graylog Extended Logging Format). This Java-Util-Logging Handler creates GELF Messages and posts
* them using UDP (default) or TCP. Following parameters are supported/needed:
* <ul>
* <li>host (Mandatory): Hostname/IP-Address of the Logstash Host
* <ul>
* <li>(the host) for UDP, e.g. 127.0.0.1 or some.host.com</li>
* <li>See docs for more details</li>
* </ul>
* </li>
* <li>port (Optional): Port, default 12201</li>
* <li>version (Optional): GELF Version 1.0 or 1.1, default 1.0</li>
* <li>originHost (Optional): Originating Hostname, default FQDN Hostname</li>
* <li>extractStackTrace (Optional): Post Stack-Trace to StackTrace field (true/false/throwable reference [0 = throwable, 1 =
* throwable.cause, -1 = root cause]), default false</li>
* <li>filterStackTrace (Optional): Perform Stack-Trace filtering (true/false), default false</li>
* <li>includeLogMessageParameters (Optional): Include message parameters from the log event (see
* {@link LogRecord#getParameters()}, default true</li>
* <li>includeLocation (Optional): Include source code location, default true</li>
* <li>mdcProfiling (Optional): Perform Profiling (Call-Duration) based on MDC Data. See <a href="#mdcProfiling">MDC
* Profiling</a>, default false</li>
* <li>facility (Optional): Name of the Facility, default gelf-java</li>
* <li>level (Optional): Log-Level, default INFO</li>
* <li>filter (Optional): Class-Name of a Log-Filter, default none</li>
* <li>additionalField.(number) (Optional): Post additional fields. Eg. .GelfLogHandler.additionalField.0=fieldName=Value</li>
* </ul>
*
* The {@link #publish(LogRecord)} method is thread-safe and may be called by different threads at any time.
*
* @author Mark Paluch
*/
public class GelfLogHandler extends Handler implements ErrorReporter {
protected volatile GelfSender gelfSender;
protected GelfMessageAssembler gelfMessageAssembler;
private final ErrorReporter errorReporter = new MessagePostprocessingErrorReporter(this);
public GelfLogHandler() {
super();
RuntimeContainer.initialize(errorReporter);
gelfMessageAssembler = createGelfMessageAssembler();
initializeDefaultFields();
JulPropertyProvider propertyProvider = new JulPropertyProvider(GelfLogHandler.class);
gelfMessageAssembler.initialize(propertyProvider);
String level = propertyProvider.getProperty(PropertyProvider.PROPERTY_LEVEL);
if (null != level) {
setLevel(Level.parse(level.trim()));
} else {
setLevel(Level.INFO);
}
String additionalFields = propertyProvider.getProperty(PropertyProvider.PROPERTY_ADDITIONAL_FIELDS);
if (null != additionalFields) {
setAdditionalFields(additionalFields);
}
String additionalFieldTypes = propertyProvider.getProperty(PropertyProvider.PROPERTY_ADDITIONAL_FIELD_TYPES);
if (null != additionalFieldTypes) {
setAdditionalFieldTypes(additionalFieldTypes);
}
String includeLocation = propertyProvider.getProperty(PropertyProvider.PROPERTY_INCLUDE_LOCATION);
if (null != includeLocation) {
setIncludeLocation(Boolean.valueOf(includeLocation));
}
String filter = propertyProvider.getProperty(PropertyProvider.PROPERTY_FILTER);
try {
if (null != filter) {
final Class clazz = ClassLoader.getSystemClassLoader().loadClass(filter);
setFilter((Filter) clazz.getDeclaredConstructor().newInstance());
}
} catch (final Exception e) {
// ignore
}
}
protected void initializeDefaultFields() {
gelfMessageAssembler.addFields(LogMessageField.getDefaultMapping(Time, Severity, ThreadName, SourceClassName,
SourceMethodName, SourceSimpleClassName, LoggerName));
}
protected GelfMessageAssembler createGelfMessageAssembler() {
return new GelfMessageAssembler();
}
@Override
public void flush() {
// nothing to do
}
@Override
public void publish(final LogRecord record) {
if (!isLoggable(record)) {
return;
}
try {
if (null == gelfSender) {
synchronized (this) {
if (null == gelfSender) {
gelfSender = createGelfSender();
}
}
}
} catch (Exception e) {
reportError("Could not send GELF message: " + e.getMessage(), e, ErrorManager.OPEN_FAILURE);
return;
}
try {
GelfMessage message = createGelfMessage(record);
if (!message.isValid()) {
reportError("GELF Message is invalid: " + message.toJson(), null, ErrorManager.WRITE_FAILURE);
return;
}
if (null == gelfSender || !gelfSender.sendMessage(message)) {
reportError("Could not send GELF message", null, ErrorManager.WRITE_FAILURE);
}
} catch (Exception e) {
reportError("Could not send GELF message: " + e.getMessage(), e, ErrorManager.FORMAT_FAILURE);
}
}
protected GelfSender createGelfSender() {
return GelfSenderFactory.createSender(gelfMessageAssembler, errorReporter, Collections.<String, Object>emptyMap());
}
@Override
public void reportError(String message, Exception e) {
reportError(message, e, ErrorManager.GENERIC_FAILURE);
}
@Override
public void close() {
if (null != gelfSender) {
Closer.close(gelfSender);
gelfSender = null;
}
}
protected GelfMessage createGelfMessage(final LogRecord record) {
return gelfMessageAssembler.createGelfMessage(new JulLogEvent(record));
}
public void setAdditionalFields(String spec) {
ConfigurationSupport.setAdditionalFields(spec, gelfMessageAssembler);
}
public void setAdditionalFieldTypes(String spec) {
ConfigurationSupport.setAdditionalFieldTypes(spec, gelfMessageAssembler);
}
public void setMdcFields(String spec) {
ConfigurationSupport.setMdcFields(spec, gelfMessageAssembler);
}
public void setDynamicMdcFields(String spec) {
ConfigurationSupport.setDynamicMdcFields(spec, gelfMessageAssembler);
}
public void setDynamicMdcFieldTypes(String spec) {
ConfigurationSupport.setDynamicMdcFieldTypes(spec, gelfMessageAssembler);
}
public String getGraylogHost() {
return gelfMessageAssembler.getHost();
}
public void setGraylogHost(String graylogHost) {
gelfMessageAssembler.setHost(graylogHost);
}
public String getOriginHost() {
return gelfMessageAssembler.getOriginHost();
}
public void setOriginHost(String originHost) {
gelfMessageAssembler.setOriginHost(originHost);
}
public String getHost() {
return gelfMessageAssembler.getHost();
}
public void setHost(String host) {
gelfMessageAssembler.setHost(host);
}
public int getPort() {
return gelfMessageAssembler.getPort();
}
public void setPort(int port) {
gelfMessageAssembler.setPort(port);
}
public int getGraylogPort() {
return gelfMessageAssembler.getPort();
}
public void setGraylogPort(int graylogPort) {
gelfMessageAssembler.setPort(graylogPort);
}
public String getFacility() {
return gelfMessageAssembler.getFacility();
}
public void setFacility(String facility) {
gelfMessageAssembler.setFacility(facility);
}
public String getExtractStackTrace() {
return gelfMessageAssembler.getExtractStackTrace();
}
public void setExtractStackTrace(String extractStacktrace) {
gelfMessageAssembler.setExtractStackTrace(extractStacktrace);
}
public boolean isFilterStackTrace() {
return gelfMessageAssembler.isFilterStackTrace();
}
public void setFilterStackTrace(boolean filterStackTrace) {
gelfMessageAssembler.setFilterStackTrace(filterStackTrace);
}
public boolean isIncludeLogMessageParameters() {
return gelfMessageAssembler.isIncludeLogMessageParameters();
}
public void setIncludeLogMessageParameters(boolean includeLogMessageParameters) {
gelfMessageAssembler.setIncludeLogMessageParameters(includeLogMessageParameters);
}
public boolean isIncludeLocation() {
return gelfMessageAssembler.isIncludeLocation();
}
public void setIncludeLocation(boolean includeLocation) {
gelfMessageAssembler.setIncludeLocation(includeLocation);
}
public String getTimestampPattern() {
return gelfMessageAssembler.getTimestampPattern();
}
public void setTimestampPattern(String timestampPattern) {
gelfMessageAssembler.setTimestampPattern(timestampPattern);
}
public int getMaximumMessageSize() {
return gelfMessageAssembler.getMaximumMessageSize();
}
public void setMaximumMessageSize(int maximumMessageSize) {
gelfMessageAssembler.setMaximumMessageSize(maximumMessageSize);
}
public String getVersion() {
return gelfMessageAssembler.getVersion();
}
public void setVersion(String version) {
gelfMessageAssembler.setVersion(version);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.test.bpmn.event.timer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.engine.common.runtime.Clock;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.delegate.ExecutionListener;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.job.api.Job;
import org.flowable.job.api.JobQuery;
import org.flowable.job.api.TimerJobQuery;
/**
* @author Joram Barrez
*/
public class BoundaryTimerEventTest extends PluggableFlowableTestCase {
private static boolean listenerExecutedStartEvent;
private static boolean listenerExecutedEndEvent;
public static class MyExecutionListener implements ExecutionListener {
private static final long serialVersionUID = 1L;
public void notify(DelegateExecution execution) {
if ("end".equals(execution.getEventName())) {
listenerExecutedEndEvent = true;
} else if ("start".equals(execution.getEventName())) {
listenerExecutedStartEvent = true;
}
}
}
/*
* Test for when multiple boundary timer events are defined on the same user task
*
* Configuration: - timer 1 -> 2 hours -> secondTask - timer 2 -> 1 hour -> thirdTask - timer 3 -> 3 hours -> fourthTask
*
* See process image next to the process xml resource
*/
@Deployment
public void testMultipleTimersOnUserTask() {
Clock clock = processEngineConfiguration.getClock();
// Set the clock fixed
clock.reset();
Date startTime = clock.getCurrentTime();
processEngineConfiguration.setClock(clock);
// After process start, there should be 3 timers created
ProcessInstance pi = runtimeService.startProcessInstanceByKey("multipleTimersOnUserTask");
TimerJobQuery jobQuery = managementService.createTimerJobQuery().processInstanceId(pi.getId());
List<Job> jobs = jobQuery.list();
assertEquals(3, jobs.size());
// After setting the clock to time '1 hour and 5 seconds', the second timer should fire
clock.setCurrentTime(new Date(startTime.getTime() + ((60 * 60 * 1000) + 5000)));
processEngineConfiguration.setClock(clock);
waitForJobExecutorToProcessAllJobs(5000L, 25L);
assertEquals(0L, jobQuery.count());
// which means that the third task is reached
org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult();
assertEquals("Third Task", task.getName());
processEngineConfiguration.resetClock();
}
@Deployment
public void testTimerOnNestingOfSubprocesses() {
Date testStartTime = processEngineConfiguration.getClock().getCurrentTime();
runtimeService.startProcessInstanceByKey("timerOnNestedSubprocesses");
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().orderByTaskName().asc().list();
assertEquals(2, tasks.size());
assertEquals("Inner subprocess task 1", tasks.get(0).getName());
assertEquals("Inner subprocess task 2", tasks.get(1).getName());
// Timer will fire in 2 hours
processEngineConfiguration.getClock().setCurrentTime(new Date(testStartTime.getTime() + ((2 * 60 * 60 * 1000) + 5000)));
Job timer = managementService.createTimerJobQuery().singleResult();
managementService.moveTimerToExecutableJob(timer.getId());
managementService.executeJob(timer.getId());
org.flowable.task.api.Task task = taskService.createTaskQuery().singleResult();
assertEquals("task outside subprocess", task.getName());
}
@Deployment
public void testExpressionOnTimer() {
Clock clock = processEngineConfiguration.getClock();
// Set the clock fixed
clock.reset();
Date startTime = clock.getCurrentTime();
processEngineConfiguration.setClock(clock);
HashMap<String, Object> variables = new HashMap<String, Object>();
variables.put("duration", "PT1H");
// After process start, there should be a timer created
ProcessInstance pi = runtimeService.startProcessInstanceByKey("testExpressionOnTimer", variables);
TimerJobQuery jobQuery = managementService.createTimerJobQuery().processInstanceId(pi.getId());
List<Job> jobs = jobQuery.list();
assertEquals(1, jobs.size());
// After setting the clock to time '1 hour and 5 seconds', the second timer should fire
clock.setCurrentTime(new Date(startTime.getTime() + ((60 * 60 * 1000) + 5000)));
processEngineConfiguration.setClock(clock);
waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(5000L, 200L);
assertEquals(0L, jobQuery.count());
// start execution listener is not executed
assertFalse(listenerExecutedStartEvent);
assertTrue(listenerExecutedEndEvent);
// which means the process has ended
assertProcessEnded(pi.getId());
processEngineConfiguration.resetClock();
}
@Deployment
public void testNullExpressionOnTimer() {
HashMap<String, Object> variables = new HashMap<String, Object>();
variables.put("duration", null);
// After process start, there should be a timer created
ProcessInstance pi = runtimeService.startProcessInstanceByKey("testNullExpressionOnTimer", variables);
// NO job scheduled as null expression set
JobQuery jobQuery = managementService.createJobQuery().processInstanceId(pi.getId());
List<Job> jobs = jobQuery.list();
assertEquals(0, jobs.size());
// which means the process is still running waiting for human task input.
ProcessInstance processInstance = processEngine
.getRuntimeService()
.createProcessInstanceQuery()
.processInstanceId(pi.getId())
.singleResult();
assertNotNull(processInstance);
}
@Deployment
public void testTimerInSingleTransactionProcess() {
// make sure that if a PI completes in single transaction, JobEntities associated with the execution are deleted.
// broken before 5.10, see ACT-1133
runtimeService.startProcessInstanceByKey("timerOnSubprocesses");
assertEquals(0, managementService.createJobQuery().count());
}
@Deployment
public void testRepeatingTimerWithCancelActivity() {
runtimeService.startProcessInstanceByKey("repeatingTimerAndCallActivity");
assertEquals(1, managementService.createTimerJobQuery().count());
assertEquals(1, taskService.createTaskQuery().count());
// Firing job should cancel the user task, destroy the scope,
// re-enter the task and recreate the task. A new timer should also be created.
// This didn't happen before 5.11 (new jobs kept being created). See ACT-1427
Job job = managementService.createTimerJobQuery().singleResult();
managementService.moveTimerToExecutableJob(job.getId());
managementService.executeJob(job.getId());
assertEquals(1, managementService.createTimerJobQuery().count());
assertEquals(1, taskService.createTaskQuery().count());
}
@Deployment
public void testRepeatTimerDuration() throws Exception {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyy.MM.dd hh:mm");
Date currentTime = simpleDateFormat.parse("2015.10.01 11:01");
processEngineConfiguration.getClock().setCurrentTime(currentTime);
runtimeService.startProcessInstanceByKey("repeattimertest");
long twentyFourHours = 24L * 60L * 60L * 1000L;
Date previousDueDate = null;
// Move clock, job should fire
for (int i = 0; i < 3; i++) {
Job job = managementService.createTimerJobQuery().singleResult();
// Verify due date
if (previousDueDate != null) {
assertTrue(job.getDuedate().getTime() - previousDueDate.getTime() >= twentyFourHours);
}
previousDueDate = job.getDuedate();
currentTime = new Date(currentTime.getTime() + twentyFourHours + (60 * 1000));
processEngineConfiguration.getClock().setCurrentTime(currentTime);
managementService.moveTimerToExecutableJob(job.getId());
managementService.executeJob(job.getId());
}
}
@Deployment
public void testBoundaryTimerEvent() throws Exception {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyy.MM.dd hh:mm");
Date currentTime = simpleDateFormat.parse("2015.10.01 11:01");
processEngineConfiguration.getClock().setCurrentTime(currentTime);
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("patient", "kermit");
runtimeService.startProcessInstanceByKey("process1", vars);
// just wait for 2 seconds to run any job if it's the case
try {
waitForJobExecutorToProcessAllJobs(2000, 200);
} catch (Exception ex) {
// expected exception because the boundary timer event created a timer job to be executed after 10 minutes
}
// there should be a userTask waiting for user input
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().list();
assertEquals(1, tasks.size());
assertEquals("First Task", tasks.get(0).getName());
List<Job> jobList = managementService.createTimerJobQuery().list();
assertEquals(1, jobList.size());
// let's see what's happening after 2 minutes
// nothing should change since the timer have to executed after 10 minutes
long twoMinutes = 2L * 60L * 1000L;
currentTime = new Date(currentTime.getTime() + twoMinutes + 1000L);
processEngineConfiguration.getClock().setCurrentTime(currentTime);
try {
waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(2000, 200);
} catch (Exception ex) {
// expected exception because the boundary timer event created a timer job to be executed after 10 minutes
}
tasks = taskService.createTaskQuery().list();
assertEquals(1, tasks.size());
assertEquals("First Task", tasks.get(0).getName());
jobList = managementService.createTimerJobQuery().list();
assertEquals(1, jobList.size());
// after another 8 minutes (the timer will have to execute because it wasa set to be executed @ 10 minutes after process start)
long tenMinutes = 8L * 60L * 1000L;
currentTime = new Date(currentTime.getTime() + tenMinutes);
processEngineConfiguration.getClock().setCurrentTime(currentTime);
try {
waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(2000, 200);
} catch (Exception ex) {
ex.getCause();
// expected exception because a new job is prepared
}
// there should be only one userTask and it should be the one triggered by the boundary timer event.
// after the boundary event is triggered there should be no active job.
tasks = taskService.createTaskQuery().list();
assertEquals(1, tasks.size());
assertEquals("Second Task", tasks.get(0).getName());
jobList = managementService.createJobQuery().list();
assertEquals(0, jobList.size());
jobList = managementService.createTimerJobQuery().list();
assertEquals(0, jobList.size());
}
@Deployment
public void testBoundaryTimerEvent2() throws Exception {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyy.MM.dd hh:mm");
Date currentTime = simpleDateFormat.parse("2015.10.01 11:01");
processEngineConfiguration.getClock().setCurrentTime(currentTime);
runtimeService.startProcessInstanceByKey("timerprocess");
// just wait for 2 seconds to run any job if it's the case
try {
waitForJobExecutorToProcessAllJobs(2000, 200);
} catch (Exception ex) {
// expected exception because the boundary timer event created a timer job to be executed after 10 minutes
}
// there should be a userTask waiting for user input
List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().list();
assertEquals(1, tasks.size());
assertEquals("Start", tasks.get(0).getName());
List<Job> jobList = managementService.createTimerJobQuery().list();
assertEquals(1, jobList.size());
// after another 2 minutes
long tenMinutes = 2L * 60L * 1000L;
currentTime = new Date(currentTime.getTime() + tenMinutes);
processEngineConfiguration.getClock().setCurrentTime(currentTime);
try {
waitForJobExecutorToProcessAllJobsAndExecutableTimerJobs(2000, 200);
} catch (Exception ex) {
ex.getCause();
// expected exception because a new job is prepared
}
// there should be no userTask
tasks = taskService.createTaskQuery().list();
assertEquals(0, tasks.size());
jobList = managementService.createJobQuery().list();
assertEquals(0, jobList.size());
jobList = managementService.createTimerJobQuery().list();
assertEquals(0, jobList.size());
}
}
| |
package pipe.gui.widget;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import pipe.reachability.algorithm.ExplorerUtilities;
import pipe.reachability.algorithm.StateSpaceExplorer;
import pipe.reachability.algorithm.TimelessTrapException;
import pipe.reachability.algorithm.VanishingExplorer;
import pipe.reachability.algorithm.parallel.MassiveParallelStateSpaceExplorer;
import pipe.reachability.algorithm.sequential.SequentialStateSpaceExplorer;
import uk.ac.imperial.io.*;
import uk.ac.imperial.pipe.exceptions.InvalidRateException;
import uk.ac.imperial.pipe.io.PetriNetIOImpl;
import uk.ac.imperial.pipe.io.PetriNetReader;
import uk.ac.imperial.pipe.models.petrinet.PetriNet;
import uk.ac.imperial.state.ClassifiedState;
import uk.ac.imperial.state.Record;
import javax.swing.*;
import javax.xml.bind.JAXBException;
import java.awt.FileDialog;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* JPanel used to load the state space exploration results from Petri nets and binary state space results.
*/
public class StateSpaceLoader {
/**
* Class logger
*/
private static final Logger LOGGER = Logger.getLogger(StateSpaceLoader.class.getName());
/**
* Number of states to explore before reducing and writing out
*/
private static final int STATES_PER_THREAD = 100;
/**
* For loading Petri nets to explore
*/
private final FileDialog loadDialog;
/**
* Enabled if a Petri net is specified, disabled otherwise
*/
private JRadioButton useExistingPetriNetRadioButton;
/**
* Name label for the loaded Petri net
*/
private JTextField petriNetNameLabel;
/**
* Load from file radio, always enabled. Clicking on this
* brings up the file loader
*/
private JRadioButton loadPetriNetFromFileRadioButton;
/**
* Main display panel with all the loading options
*/
private JPanel mainPanel;
/**
* Binary field label displaying the name of the binary state file loaded
*/
private JTextField stateFieldLabel;
/**
* Binary field label displaying the name of the binary transitions file loaded
*/
private JTextField transitionFieldLabel;
/**
* Load from binaries radio, always enabled. Clicking on this will
* bring up the file loader twice, once for the states binary and
* once for the transitions binary.
*/
private JRadioButton loadFromBinariesRadio;
/**
* Default petri net
*/
private PetriNet defaultPetriNet;
/**
* Temporary transitions file for generating results into
*/
private Path temporaryTransitions;
/**
* Temporary states file for generating results into
*/
private Path temporaryStates;
/**
* Last loaded Petri net via the load dialog
*/
private PetriNet lastLoadedPetriNet;
/**
* Binary transitions loaded when binary transitions radio check box is selected
*/
private Path binaryTransitions;
/**
* Binary states loaded when binary transitions radio check box is selected
*/
private Path binaryStates;
/**
* Sets up the load Petri net options with the "use current Petri net" disabled
*
* @param loadDialog dialog
*/
public StateSpaceLoader(FileDialog loadDialog) {
this.loadDialog = loadDialog;
useExistingPetriNetRadioButton.setEnabled(false);
setUp();
}
/**
* Set up the binary radio button action listeners.
* Clicking on the load Petri net brings up a since xml file loader
* Clicking on the load from binaries brings up two loaders, once for
* the states binaries and one for the transition binaries.
*/
private void setUp() {
loadPetriNetFromFileRadioButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
loadData();
}
});
loadFromBinariesRadio.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
loadBinaryFiles();
}
});
}
/**
* Opens the file dialog and saves the selected Petri net into lastLoadedPetriNet
* for use when calculating the state space exploration
*/
private void loadData() {
loadDialog.setMode(FileDialog.LOAD);
loadDialog.setTitle("Select petri net");
loadDialog.setVisible(true);
File[] files = loadDialog.getFiles();
if (files.length > 0) {
File path = files[0];
try {
petriNetNameLabel.setText(path.getName());
PetriNetReader petriNetIO = new PetriNetIOImpl();
lastLoadedPetriNet = petriNetIO.read(path.getAbsolutePath());
} catch (JAXBException | FileNotFoundException e) {
LOGGER.log(Level.SEVERE, e.getMessage());
}
}
}
/**
* Loads the transition and state binary files into the member variables
*/
private void loadBinaryFiles() {
loadDialog.setMode(FileDialog.LOAD);
loadDialog.setTitle("Load transitions file");
loadDialog.setVisible(true);
File[] files = loadDialog.getFiles();
if (files.length > 0) {
File file = files[0];
binaryTransitions = Paths.get(file.toURI());
transitionFieldLabel.setText(file.getName());
} else {
return;
}
loadDialog.setTitle("Load states file");
loadDialog.setVisible(true);
File[] statesFiles = loadDialog.getFiles();
if (statesFiles.length > 0) {
File file = statesFiles[0];
binaryStates = Paths.get(file.toURI());
stateFieldLabel.setText(file.getName());
} else {
LOGGER.log(Level.INFO, "No file loaded");
}
}
/**
* Sets up the load Petri net options with "use current Petri net" set to
* the petriNet parameter
*
* @param petriNet current Petri net
* @param loadDialog dialog
*/
public StateSpaceLoader(PetriNet petriNet, FileDialog loadDialog) {
defaultPetriNet = petriNet;
this.loadDialog = loadDialog;
setUp();
}
public boolean isBinaryLoadChecked() {
return loadFromBinariesRadio.isSelected();
}
public PetriNet getPetriNet() {
return useExistingPetriNetRadioButton.isSelected() ? defaultPetriNet : lastLoadedPetriNet;
}
public JPanel getMainPanel() {
return mainPanel;
}
/**
* Calculates the steady state exploration of a Petri net and stores its results
* in a temporary file.
* <p>
* These results are then read in and turned into a graphical representation using mxGraph
* which is displayed to the user
* </p>
* @param creator explorer creator
* @param vanishingCreator vanishing creator
* @param threads across which to spread work
* @return state space explorer results
* @throws TimelessTrapException unable to exit cyclic vanishing state
* @throws InterruptedException thread interrupted
* @throws ExecutionException task aborted due to exception
* @throws IOException error doing IO
* @throws InvalidRateException functional rate expression invalid
* @throws StateSpaceLoaderException if error during loading from binaries
*/
public StateSpaceExplorer.StateSpaceExplorerResults calculateResults(ExplorerCreator creator,
VanishingExplorerCreator vanishingCreator, int threads)
throws IOException, InterruptedException, ExecutionException, InvalidRateException, TimelessTrapException,
StateSpaceLoaderException {
if (loadFromBinariesRadio.isSelected()) {
return loadFromBinaries();
} else {
KryoStateIO stateWriter = new KryoStateIO();
temporaryTransitions = getTransitionsPath();
temporaryStates = getStatesPath();
PetriNet petriNet = useExistingPetriNetRadioButton.isSelected() ? defaultPetriNet : lastLoadedPetriNet;
if (petriNet == null) {
String message;
if (useExistingPetriNetRadioButton.isSelected()) {
message = "Error cannot calculate analysis metrics. Please load a Petri net/binaries.";
} else {
message = "Error in loaded Petri net, could not read PNML file.";
}
throw new StateSpaceLoaderException(message);
}
ExplorerUtilities explorerUtils = creator.create(petriNet);
VanishingExplorer vanishingExplorer = vanishingCreator.create(explorerUtils);
return generateStateSpace(stateWriter, temporaryTransitions, temporaryStates, petriNet, explorerUtils,
vanishingExplorer, threads);
}
}
/**
* Loads the transitions and states from binaries
*
* @return state space explorer results
* @throws IOException if IO error
* @throws StateSpaceLoaderException if error during loading from binaries
* @return state space explorer results
*/
private StateSpaceExplorer.StateSpaceExplorerResults loadFromBinaries()
throws IOException, StateSpaceLoaderException {
StateReader stateReader = new KryoStateIO();
temporaryTransitions = getTransitionsPath();
temporaryStates = getStatesPath();
return processBinaryResults(stateReader, temporaryTransitions);
}
/**
* @return Path for state space transitions
* @throws IOException if IO error occurs
*/
private Path getTransitionsPath() throws IOException {
return loadFromBinariesRadio.isSelected() ? binaryTransitions : Files.createTempFile("transitions", ".tmp");
}
/**
* @return Path for state space states
* @throws IOException if IO error occurs
*/
private Path getStatesPath() throws IOException {
return loadFromBinariesRadio.isSelected() ? binaryStates : Files.createTempFile("states", ".tmp");
}
/**
* Writes the state space into transitions and states
*
* @param stateWriter writer
* @param transitions to write
* @param states to write
* @param threads number of worker threads to use
* @return state space explorer results
* @throws TimelessTrapException unable to exit cyclic vanishing state
* @throws InterruptedException thread interrupted
* @throws ExecutionException task aborted due to exception
* @throws IOException error doing IO
* @throws InvalidRateException functional rate expression invalid
*/
private StateSpaceExplorer.StateSpaceExplorerResults generateStateSpace(StateWriter stateWriter, Path transitions,
Path states, PetriNet petriNet,
ExplorerUtilities explorerUtils,
VanishingExplorer vanishingExplorer,
int threads)
throws IOException, TimelessTrapException, ExecutionException, InvalidRateException, InterruptedException {
try (OutputStream transitionStream = Files.newOutputStream(transitions);
OutputStream stateStream = Files.newOutputStream(states)) {
try (Output transitionOutput = new Output(transitionStream);
Output stateOutput = new Output(stateStream)) {
return writeStateSpace(stateWriter, transitionOutput, stateOutput, petriNet, explorerUtils,
vanishingExplorer, threads);
}
}
}
/**
* Processes the binary results and returns their state space
*
* @param stateReader reader
* @param transitions to process
* @return state space explorer results
* @throws IOException if IO error
* @throws StateSpaceLoaderException if error during loading from binaries
*/
private StateSpaceExplorer.StateSpaceExplorerResults processBinaryResults(StateReader stateReader, Path transitions)
throws IOException, StateSpaceLoaderException {
try (InputStream inputStream = Files.newInputStream(transitions);
Input transitionInput = new Input(inputStream)) {
try {
Collection<Record> records = readResults(stateReader, transitionInput);
int transitionCount = getTransitionCount(records);
return new StateSpaceExplorer.StateSpaceExplorerResults(transitionCount, records.size());
} catch (IOException e) {
throw new StateSpaceLoaderException(
"Could not parse binaries.\nAre you sure they were generated using the PIPE 5 state space explorer module?",
e);
}
}
}
/**
* Writes the petriNet state space out to a temporary file which is referenced by the objectOutputStream
*
* @param stateWriter format in which to write the results to
* @param transitionOutput stream to write state space to
* @param stateOutput stream to write state integer mappings to
* @param explorerUtilites explorer utilities
* @param threads number of worker threads to use
* @param vanishingExplorer
* @return state space explorer results
* @throws TimelessTrapException unable to exit cyclic vanishing state
* @throws InterruptedException thread interrupted
* @throws ExecutionException task aborted due to exception
* @throws IOException error doing IO
* @throws InvalidRateException functional rate expression invalid
*/
private StateSpaceExplorer.StateSpaceExplorerResults writeStateSpace(StateWriter stateWriter,
Output transitionOutput, Output stateOutput,
PetriNet petriNet,
ExplorerUtilities explorerUtilites,
VanishingExplorer vanishingExplorer, int threads)
throws TimelessTrapException, ExecutionException, InterruptedException, IOException, InvalidRateException {
StateProcessor processor = new StateIOProcessor(stateWriter, transitionOutput, stateOutput);
StateSpaceExplorer stateSpaceExplorer = getStateSpaceExplorer(explorerUtilites, vanishingExplorer, processor, threads);
return stateSpaceExplorer.generate(explorerUtilites.getCurrentState());
}
private StateSpaceExplorer getStateSpaceExplorer( ExplorerUtilities explorerUtilites, VanishingExplorer vanishingExplorer, StateProcessor stateProcessor, int threads) {
if (threads == 1) {
return new SequentialStateSpaceExplorer(explorerUtilites, vanishingExplorer, stateProcessor);
}
return new MassiveParallelStateSpaceExplorer(explorerUtilites, vanishingExplorer, stateProcessor, threads, STATES_PER_THREAD);
}
/**
* Reads results of steady state exploration into a collection of records
*
* @param stateReader reader
* @param input to process
* @return state transitions with rates
* @throws IOException error doing IO
*/
private Collection<Record> readResults(StateReader stateReader, Input input) throws IOException {
MultiStateReader reader = new EntireStateReader(stateReader);
return reader.readRecords(input);
}
/**
* @param records to process
* @return the number of transitions in the state space
*/
private int getTransitionCount(Iterable<Record> records) {
int sum = 0;
for (Record record : records) {
sum += record.successors.size();
}
return sum;
}
/**
* Loads and processes state space
*
* @return results
* @throws IOException error doing IO
* @throws StateSpaceLoaderException if error during loading from binaries
*/
public Results loadStateSpace() throws StateSpaceLoaderException, IOException {
KryoStateIO stateReader = new KryoStateIO();
try (InputStream inputStream = Files.newInputStream(temporaryTransitions);
InputStream stateInputStream = Files.newInputStream(temporaryStates);
Input transitionInput = new Input(inputStream);
Input stateInput = new Input(stateInputStream)) {
Collection<Record> records = readResults(stateReader, transitionInput);
Map<Integer, ClassifiedState> stateMap = readMappings(stateReader, stateInput);
return new Results(records, stateMap);
}
}
/**
* Reads results of the mapping of an integer state representation to
* the Classified State it represents
*
* @param stateReader reader
* @param input to process
* @return state mappings
* @throws IOException error doing IO
*/
private Map<Integer, ClassifiedState> readMappings(StateReader stateReader, Input input) throws IOException {
MultiStateReader reader = new EntireStateReader(stateReader);
return reader.readStates(input);
}
public void saveBinaryFiles() {
if (temporaryStates != null && temporaryTransitions != null) {
copyFile(temporaryTransitions, "Select location for temporary transitions");
copyFile(temporaryStates, "Select location for temporary states");
}
}
/**
* @param temporary path to copy to new location
* @param message displayed message in save file dialog pop up
*/
private void copyFile(Path temporary, String message) {
loadDialog.setMode(FileDialog.SAVE);
loadDialog.setTitle(message);
loadDialog.setVisible(true);
File[] files = loadDialog.getFiles();
if (files.length > 0) {
File file = files[0];
Path path = Paths.get(file.toURI());
try {
Files.copy(temporary, path, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
LOGGER.log(Level.SEVERE, e.getMessage());
}
}
}
public void addPetriNetRadioListener(ActionListener listener) {
loadPetriNetFromFileRadioButton.addActionListener(listener);
useExistingPetriNetRadioButton.addActionListener(listener);
}
public void addBinariesListener(ActionListener listener) {
loadFromBinariesRadio.addActionListener(listener);
}
/**
* Used in place of a lambda to create the explorer utilities needed for generating the
* state space from a Petri net
*/
public interface ExplorerCreator {
ExplorerUtilities create(PetriNet petriNet);
}
/**
* Used in place of a lambda to create the vanishing utilities needed for
* generating the state space from a Petri net
*/
public interface VanishingExplorerCreator {
VanishingExplorer create(ExplorerUtilities utils);
}
/**
* State space exploration results
*/
public class Results {
/**
* Transition records
*/
public final Collection<Record> records;
/**
* Classified state mappings
*/
public final Map<Integer, ClassifiedState> stateMappings;
/**
* Constructor
*
* @param records of results
* @param stateMappings state mappings
*/
public Results(Collection<Record> records, Map<Integer, ClassifiedState> stateMappings) {
this.records = records;
this.stateMappings = stateMappings;
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2012 GigaSpaces Technologies Ltd. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.openspaces.admin.pu.config;
import com.gigaspaces.grid.zone.ZoneHelper;
import com.gigaspaces.security.directory.UserDetails;
import org.openspaces.admin.internal.pu.dependency.DefaultProcessingUnitDependencies;
import org.openspaces.admin.internal.pu.dependency.DefaultProcessingUnitDeploymentDependencies;
import org.openspaces.admin.internal.pu.dependency.InternalProcessingUnitDependencies;
import org.openspaces.admin.internal.pu.dependency.InternalProcessingUnitDependency;
import org.openspaces.admin.pu.ProcessingUnitDeployment;
import org.openspaces.admin.pu.dependency.ProcessingUnitDependencies;
import org.openspaces.admin.pu.dependency.ProcessingUnitDependency;
import org.openspaces.admin.pu.topology.ProcessingUnitConfigHolder;
import org.openspaces.pu.container.support.CommandLineParser.Parameter;
import org.springframework.beans.factory.annotation.Required;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import java.util.*;
/**
* @author itaif
* @since 9.0.1
*/
@XmlRootElement(name = "pu")
public class ProcessingUnitConfig implements ProcessingUnitConfigHolder {
private String processingUnit;
private String name;
private String clusterSchema;
private Integer numberOfInstances;
private Integer numberOfBackups;
private Boolean requiresIsolation;
private Integer maxInstancesPerVM;
private Integer maxInstancesPerMachine;
private Map<String, Integer> maxInstancesPerZone = new HashMap<String, Integer>();
private List<String> zones = new ArrayList<String>();
private String primaryZone;
private Map<String,String> contextProperties = new HashMap<String,String>();
private UserDetailsConfig userDetails;
private String slaLocation;
private Boolean secured;
private Map<String,String> elasticProperties = new HashMap<String,String>();
private ProcessingUnitDependencies<ProcessingUnitDependency> dependencies = new DefaultProcessingUnitDependencies();
public String getProcessingUnit() {
return processingUnit;
}
/**
* @see ProcessingUnitDeployment#ProcessingUnitDeployment(String)
*/
@XmlAttribute
@Required
public void setProcessingUnit(String processingUnit) {
this.processingUnit = processingUnit;
}
@Override
public String getName() {
return name;
}
/**
* @see ProcessingUnitDeployment#name(String)
*/
@Override
public void setName(String name) {
this.name = name;
}
public String getClusterSchema() {
return clusterSchema;
}
/**
* @see ProcessingUnitDeployment#clusterSchema(String)
*/
public void setClusterSchema(String clusterSchema) {
this.clusterSchema = clusterSchema;
}
public Integer getNumberOfInstances() {
return numberOfInstances;
}
/**
* @see ProcessingUnitDeployment#numberOfInstances(int)
*/
public void setNumberOfInstances(Integer numberOfInstances) {
this.numberOfInstances = numberOfInstances;
}
public Integer getNumberOfBackups() {
return numberOfBackups;
}
/**
* @see ProcessingUnitDeployment#numberOfBackups(int)
*/
public void setNumberOfBackups(Integer numberOfBackups) {
this.numberOfBackups = numberOfBackups;
}
public Boolean isRequiresIsolation() {
return requiresIsolation;
}
/**
* @see ProcessingUnitDeployment#requiresIsolation(boolean)
*/
@XmlAttribute(name="requires-isolation")
public void setRequiresIsolation(Boolean requiresIsolation) {
this.requiresIsolation = requiresIsolation;
}
public Integer getMaxInstancesPerVM() {
return maxInstancesPerVM;
}
/**
* @see ProcessingUnitDeployment#maxInstancesPerVM(int)
*/
@XmlAttribute(name="max-instances-per-vm")
public void setMaxInstancesPerVM(Integer maxInstancesPerVM) {
this.maxInstancesPerVM = maxInstancesPerVM;
}
public Integer getMaxInstancesPerMachine() {
return maxInstancesPerMachine;
}
/**
* @see ProcessingUnitDeployment#maxInstancesPerMachine(int)
*/
public void setMaxInstancesPerMachine(Integer maxInstancesPerMachine) {
this.maxInstancesPerMachine = maxInstancesPerMachine;
}
public Map<String,String> getContextProperties() {
return contextProperties;
}
public UserDetailsConfig getUserDetails() {
return userDetails;
}
/**
* @see ProcessingUnitDeployment#userDetails(UserDetails)
*/
@XmlElement(name="user-details")
public void setUserDetails(UserDetailsConfig userDetails) {
this.userDetails = userDetails;
}
public String getSlaLocation() {
return slaLocation;
}
/**
* @see ProcessingUnitDeployment#slaLocation(String)
*/
public void setSlaLocation(String slaLocation) {
this.slaLocation = slaLocation;
}
/**
* @see ProcessingUnitDeployment#secured(boolean)
*/
public Boolean getSecured() {
return secured;
}
/**
* @see ProcessingUnitDeployment#secured(boolean)
*/
public void setSecured(Boolean secured) {
this.secured = secured;
}
public Map<String,String> getElasticProperties() {
return elasticProperties;
}
public Map<String, Integer> getMaxInstancesPerZone() {
return maxInstancesPerZone;
}
/**
* @see ProcessingUnitDeployment#maxInstancesPerZone(String, int)
*/
@XmlTransient
public void setMaxInstancesPerZone(Map<String, Integer> maxInstancesPerZone) {
this.maxInstancesPerZone = maxInstancesPerZone;
}
/**
* @see ProcessingUnitDeployment#maxInstancesPerZone(String, int)
*/
@XmlTransient
public void setMaxInstancesPerZone(String zone, int maxInstancesPerZone) {
getMaxInstancesPerZone().put(zone,maxInstancesPerZone);
}
@XmlElement(name="max-instances-per-zone")
public void setMaxInstancesPerZoneConfig(MaxInstancesPerZoneConfig maxInstancesPerZoneConfig) {
setMaxInstancesPerZone(maxInstancesPerZoneConfig.getZone(), maxInstancesPerZoneConfig.getMaxNumberOfInstances());
}
/**
* @see ProcessingUnitDeployment#setContextProperty(String, String)
*/
public void setContextProperties(Map<String,String> contextProperties) {
this.contextProperties = contextProperties;
}
/**
* @see ProcessingUnitDeployment#setContextProperty(String, String)
*/
@XmlTransient
public void setContextProperty(String key, String value) {
contextProperties.put(key, value);
}
@XmlElement(name="context-property")
public void setContextPropertyConfig(ContextPropertyConfig propertyConfig) {
setContextProperty(propertyConfig.getKey(), propertyConfig.getValue());
}
/**
* @see ProcessingUnitDeployment#setElasticProperty(String, String)
*/
public void setElasticProperties(Map<String,String> elasticProperties) {
this.elasticProperties = elasticProperties;
}
/**
* @see ProcessingUnitDeployment#getDeploymentOptions()
*/
public String[] toDeploymentOptions() {
List<String> deployOptions = new ArrayList<String>();
if (name != null) {
deployOptions.add("-override-name");
deployOptions.add(name);
}
if (slaLocation != null) {
deployOptions.add("-sla");
deployOptions.add(slaLocation);
}
if (clusterSchema != null || numberOfInstances != null || numberOfBackups != null) {
deployOptions.add("-cluster");
if (clusterSchema != null) {
deployOptions.add("schema=" + clusterSchema);
}
if (numberOfInstances != null) {
String totalMembers = "total_members=" + numberOfInstances;
if (numberOfBackups != null) {
totalMembers += "," + numberOfBackups;
}
deployOptions.add(totalMembers);
}
}
if (requiresIsolation != null) {
deployOptions.add("-requires-isolation");
deployOptions.add(requiresIsolation.toString());
}
if (maxInstancesPerVM != null) {
deployOptions.add("-max-instances-per-vm");
deployOptions.add(maxInstancesPerVM.toString());
}
if (maxInstancesPerMachine != null) {
deployOptions.add("-max-instances-per-machine");
deployOptions.add(maxInstancesPerMachine.toString());
}
if (!maxInstancesPerZone.isEmpty()) {
deployOptions.add("-max-instances-per-zone");
deployOptions.add(ZoneHelper.unparse(maxInstancesPerZone));
}
if (!zones.isEmpty()) {
deployOptions.add("-zones");
for (String requiredZone : zones) {
deployOptions.add(requiredZone);
}
}
if (primaryZone != null) {
deployOptions.add("-primary-zone");
deployOptions.add(primaryZone);
}
if (!elasticProperties.isEmpty()){
deployOptions.add("-elastic-properties");
for (Map.Entry<String, String> elasticProp : elasticProperties.entrySet()){
deployOptions.add(elasticProp.getKey() + "=" + elasticProp.getValue());
}
}
for (Map.Entry<String,String> entry : contextProperties.entrySet()) {
deployOptions.add("-properties");
deployOptions.add("embed://" + entry.getKey() + "=" + entry.getValue());
}
for (Parameter parameter : ((InternalProcessingUnitDependencies<?,?>)getDependencies()).toCommandLineParameters()) {
deployOptions.add("-"+parameter.getName());
for (String arg : parameter.getArguments()) {
deployOptions.add(arg);
}
}
deployOptions.add(getProcessingUnit());
return deployOptions.toArray(new String[deployOptions.size()]);
}
public String[] getZones() {
return zones.toArray(new String[zones.size()]);
}
/**
* @see ProcessingUnitDeployment#addZone(String)
*/
public void setZones(String[] zones) {
this.zones = new ArrayList<String>(Arrays.asList(zones));
}
/**
* @see ProcessingUnitDeployment#addZone(String)
*/
@XmlTransient
public void addZone(String zone) {
zones.add(zone);
}
/**
* @see ProcessingUnitDeployment#addDependencies(org.openspaces.admin.internal.pu.dependency.ProcessingUnitDetailedDependencies)
*/
@Override
public ProcessingUnitDependencies<ProcessingUnitDependency> getDependencies() {
return dependencies;
}
@XmlTransient
@Override
public void setDependencies(ProcessingUnitDependencies<ProcessingUnitDependency> dependencies) {
this.dependencies = dependencies;
}
/**
* A helper method for setting conditions for processing unit deployment.
*/
@SuppressWarnings("unchecked")
@XmlElement(type = ProcessingUnitDependency.class)
public void setDeploymentDependencies(ProcessingUnitDependency[] dependencies) {
DefaultProcessingUnitDeploymentDependencies deploymentDependencies = new DefaultProcessingUnitDeploymentDependencies();
for (ProcessingUnitDependency dependency : dependencies) {
deploymentDependencies.addDependency(dependency);
}
((InternalProcessingUnitDependencies<ProcessingUnitDependency,InternalProcessingUnitDependency>)this.getDependencies()).setDeploymentDependencies(deploymentDependencies);
}
public ProcessingUnitDependency[] getDeploymentDependencies() {
List<ProcessingUnitDependency> dependenciesAsList = new ArrayList<ProcessingUnitDependency>();
for (String name : this.getDependencies().getDeploymentDependencies().getRequiredProcessingUnitsNames()) {
dependenciesAsList.add(this.getDependencies().getDeploymentDependencies().getDependencyByName(name));
}
return dependenciesAsList.toArray(new ProcessingUnitDependency[dependenciesAsList.size()]);
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((clusterSchema == null) ? 0 : clusterSchema.hashCode());
result = prime * result + ((contextProperties == null) ? 0 : contextProperties.hashCode());
result = prime * result + ((dependencies == null) ? 0 : dependencies.hashCode());
result = prime * result + ((elasticProperties == null) ? 0 : elasticProperties.hashCode());
result = prime * result + ((maxInstancesPerMachine == null) ? 0 : maxInstancesPerMachine.hashCode());
result = prime * result + ((maxInstancesPerVM == null) ? 0 : maxInstancesPerVM.hashCode());
result = prime * result + ((requiresIsolation == null) ? 0 : requiresIsolation.hashCode());
result = prime * result + ((maxInstancesPerZone == null) ? 0 : maxInstancesPerZone.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((numberOfBackups == null) ? 0 : numberOfBackups.hashCode());
result = prime * result + ((numberOfInstances == null) ? 0 : numberOfInstances.hashCode());
result = prime * result + ((processingUnit == null) ? 0 : processingUnit.hashCode());
result = prime * result + ((secured == null) ? 0 : secured.hashCode());
result = prime * result + ((slaLocation == null) ? 0 : slaLocation.hashCode());
result = prime * result + ((userDetails == null) ? 0 : userDetails.hashCode());
result = prime * result + ((zones == null) ? 0 : zones.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ProcessingUnitConfig other = (ProcessingUnitConfig) obj;
if (clusterSchema == null) {
if (other.clusterSchema != null)
return false;
} else if (!clusterSchema.equals(other.clusterSchema))
return false;
if (contextProperties == null) {
if (other.contextProperties != null)
return false;
} else if (!contextProperties.equals(other.contextProperties))
return false;
if (dependencies == null) {
if (other.dependencies != null)
return false;
} else if (!dependencies.equals(other.dependencies))
return false;
if (elasticProperties == null) {
if (other.elasticProperties != null)
return false;
} else if (!elasticProperties.equals(other.elasticProperties))
return false;
if (maxInstancesPerMachine == null) {
if (other.maxInstancesPerMachine != null)
return false;
} else if (!maxInstancesPerMachine.equals(other.maxInstancesPerMachine))
return false;
if (requiresIsolation == null) {
if (other.requiresIsolation != null)
return false;
} else if (!requiresIsolation.equals(other.requiresIsolation))
return false;
if (maxInstancesPerVM == null) {
if (other.maxInstancesPerVM != null)
return false;
} else if (!maxInstancesPerVM.equals(other.maxInstancesPerVM))
return false;
if (maxInstancesPerZone == null) {
if (other.maxInstancesPerZone != null)
return false;
} else if (!maxInstancesPerZone.equals(other.maxInstancesPerZone))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (numberOfBackups == null) {
if (other.numberOfBackups != null)
return false;
} else if (!numberOfBackups.equals(other.numberOfBackups))
return false;
if (numberOfInstances == null) {
if (other.numberOfInstances != null)
return false;
} else if (!numberOfInstances.equals(other.numberOfInstances))
return false;
if (processingUnit == null) {
if (other.processingUnit != null)
return false;
} else if (!processingUnit.equals(other.processingUnit))
return false;
if (secured == null) {
if (other.secured != null)
return false;
} else if (!secured.equals(other.secured))
return false;
if (slaLocation == null) {
if (other.slaLocation != null)
return false;
} else if (!slaLocation.equals(other.slaLocation))
return false;
if (userDetails == null) {
if (other.userDetails != null)
return false;
} else if (!userDetails.equals(other.userDetails))
return false;
if (zones == null) {
if (other.zones != null)
return false;
} else if (!zones.equals(other.zones))
return false;
if (primaryZone == null) {
if (other.primaryZone != null)
return false;
} else if (!primaryZone.equals(other.primaryZone))
return false;
return true;
}
@Override
public String toString() {
return "ProcessingUnitConfig [" + (processingUnit != null ? "processingUnit=" + processingUnit + ", " : "")
+ (name != null ? "name=" + name + ", " : "")
+ (clusterSchema != null ? "clusterSchema=" + clusterSchema + ", " : "")
+ (numberOfInstances != null ? "numberOfInstances=" + numberOfInstances + ", " : "")
+ (numberOfBackups != null ? "numberOfBackups=" + numberOfBackups + ", " : "")
+ (requiresIsolation != null ? "requiresIsolation=" + requiresIsolation + ", " : "")
+ (maxInstancesPerVM != null ? "maxInstancesPerVM=" + maxInstancesPerVM + ", " : "")
+ (maxInstancesPerMachine != null ? "maxInstancesPerMachine=" + maxInstancesPerMachine + ", " : "")
+ (maxInstancesPerZone != null ? "maxInstancesPerZone=" + maxInstancesPerZone + ", " : "")
+ (zones != null ? "zones=" + zones + ", " : "")
+ (primaryZone != null ? "primaryZone=" + primaryZone + ", " : "")
+ (contextProperties != null ? "contextProperties=" + contextProperties + ", " : "")
+ (userDetails != null ? "userDetails=" + userDetails + ", " : "")
+ (slaLocation != null ? "slaLocation=" + slaLocation + ", " : "")
+ (secured != null ? "secured=" + secured + ", " : "")
+ (elasticProperties != null ? "elasticProperties=" + elasticProperties + ", " : "")
+ (dependencies != null ? "dependencies=" + dependencies : "") + "]";
}
public String getPrimaryZone()
{
return primaryZone;
}
public void setPrimaryZone(String primaryZone)
{
this.primaryZone = primaryZone;
}
@Override
public ProcessingUnitConfig toProcessingUnitConfig() {
return this;
}
}
| |
package com.bluelinelabs.conductor.demo.controllers;
import android.content.Intent;
import android.graphics.PorterDuff.Mode;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.ColorRes;
import android.support.annotation.NonNull;
import android.support.v4.content.ContextCompat;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.SpannableString;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.style.AbsoluteSizeSpan;
import android.text.style.URLSpan;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.bluelinelabs.conductor.ControllerChangeHandler;
import com.bluelinelabs.conductor.ControllerChangeType;
import com.bluelinelabs.conductor.RouterTransaction;
import com.bluelinelabs.conductor.changehandler.FadeChangeHandler;
import com.bluelinelabs.conductor.changehandler.HorizontalChangeHandler;
import com.bluelinelabs.conductor.changehandler.TransitionChangeHandlerCompat;
import com.bluelinelabs.conductor.demo.R;
import com.bluelinelabs.conductor.demo.changehandler.ArcFadeMoveChangeHandler;
import com.bluelinelabs.conductor.demo.changehandler.FabToDialogTransitionChangeHandler;
import com.bluelinelabs.conductor.demo.controllers.NavigationDemoController.DisplayUpMode;
import com.bluelinelabs.conductor.demo.controllers.base.BaseController;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class HomeController extends BaseController {
private enum DemoModel {
NAVIGATION("Navigation Demos", R.color.red_300),
TRANSITIONS("Transition Demos", R.color.blue_grey_300),
SHARED_ELEMENT_TRANSITIONS("Shared Element Demos", R.color.purple_300),
CHILD_CONTROLLERS("Child Controllers", R.color.orange_300),
VIEW_PAGER("ViewPager", R.color.green_300),
TARGET_CONTROLLER("Target Controller", R.color.pink_300),
MULTIPLE_CHILD_ROUTERS("Multiple Child Routers", R.color.deep_orange_300),
MASTER_DETAIL("Master Detail", R.color.grey_300),
DRAG_DISMISS("Drag Dismiss", R.color.lime_300),
EXTERNAL_MODULES("Bonus Modules", R.color.teal_300);
String title;
@ColorRes int color;
DemoModel(String title, @ColorRes int color) {
this.title = title;
this.color = color;
}
}
private static final String KEY_FAB_VISIBILITY = "HomeController.fabVisibility";
@BindView(R.id.recycler_view) RecyclerView recyclerView;
@BindView(R.id.fab) View fab;
public HomeController() {
setHasOptionsMenu(true);
}
@NonNull
@Override
protected View inflateView(@NonNull LayoutInflater inflater, @NonNull ViewGroup container) {
return inflater.inflate(R.layout.controller_home, container, false);
}
@Override
protected void onViewBound(@NonNull View view) {
super.onViewBound(view);
recyclerView.setHasFixedSize(true);
recyclerView.setLayoutManager(new LinearLayoutManager(view.getContext()));
recyclerView.setAdapter(new HomeAdapter(LayoutInflater.from(view.getContext()), DemoModel.values()));
}
@Override
protected void onSaveViewState(@NonNull View view, @NonNull Bundle outState) {
super.onSaveViewState(view, outState);
outState.putInt(KEY_FAB_VISIBILITY, fab.getVisibility());
}
@Override
protected void onRestoreViewState(@NonNull View view, @NonNull Bundle savedViewState) {
super.onRestoreViewState(view, savedViewState);
//noinspection WrongConstant
fab.setVisibility(savedViewState.getInt(KEY_FAB_VISIBILITY));
}
@Override
public void onCreateOptionsMenu(@NonNull Menu menu, @NonNull MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.home, menu);
}
@Override
protected void onChangeStarted(@NonNull ControllerChangeHandler changeHandler, @NonNull ControllerChangeType changeType) {
setOptionsMenuHidden(!changeType.isEnter);
if (changeType.isEnter) {
setTitle();
}
}
@Override
public boolean onOptionsItemSelected(@NonNull MenuItem item) {
if (item.getItemId() == R.id.about) {
onFabClicked(false);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected String getTitle() {
return "Conductor Demos";
}
@OnClick(R.id.fab)
public void onFabClicked() {
onFabClicked(true);
}
private void onFabClicked(boolean fromFab) {
SpannableString details = new SpannableString("A small, yet full-featured framework that allows building View-based Android applications");
details.setSpan(new AbsoluteSizeSpan(16, true), 0, details.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
final String url = "https://github.com/bluelinelabs/Conductor";
SpannableString link = new SpannableString(url);
link.setSpan(new URLSpan(url) {
@Override
public void onClick(View widget) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(url)));
}
}, 0, link.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
SpannableStringBuilder description = new SpannableStringBuilder();
description.append(details);
description.append("\n\n");
description.append(link);
ControllerChangeHandler pushHandler = fromFab ? new TransitionChangeHandlerCompat(new FabToDialogTransitionChangeHandler(), new FadeChangeHandler(false)) : new FadeChangeHandler(false);
ControllerChangeHandler popHandler = fromFab ? new TransitionChangeHandlerCompat(new FabToDialogTransitionChangeHandler(), new FadeChangeHandler()) : new FadeChangeHandler();
getRouter()
.pushController(RouterTransaction.with(new DialogController("Conductor", description))
.pushChangeHandler(pushHandler)
.popChangeHandler(popHandler));
}
void onModelRowClick(DemoModel model, int position) {
switch (model) {
case NAVIGATION:
getRouter().pushController(RouterTransaction.with(new NavigationDemoController(0, DisplayUpMode.SHOW_FOR_CHILDREN_ONLY))
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler())
.tag(NavigationDemoController.TAG_UP_TRANSACTION)
);
break;
case TRANSITIONS:
getRouter().pushController(TransitionDemoController.getRouterTransaction(0, this));
break;
case TARGET_CONTROLLER:
getRouter().pushController(
RouterTransaction.with(new TargetDisplayController())
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler()));
break;
case VIEW_PAGER:
getRouter().pushController(RouterTransaction.with(new PagerController())
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler()));
break;
case CHILD_CONTROLLERS:
getRouter().pushController(RouterTransaction.with(new ParentController())
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler()));
break;
case SHARED_ELEMENT_TRANSITIONS:
getRouter().pushController(RouterTransaction.with(new CityGridController(model.title, model.color, position))
.pushChangeHandler(new TransitionChangeHandlerCompat(new ArcFadeMoveChangeHandler(), new FadeChangeHandler()))
.popChangeHandler(new TransitionChangeHandlerCompat(new ArcFadeMoveChangeHandler(), new FadeChangeHandler())));
break;
case DRAG_DISMISS:
getRouter().pushController(RouterTransaction.with(new DragDismissController())
.pushChangeHandler(new FadeChangeHandler(false))
.popChangeHandler(new FadeChangeHandler()));
break;
case EXTERNAL_MODULES:
getRouter().pushController(RouterTransaction.with(new ExternalModulesController())
.pushChangeHandler(new HorizontalChangeHandler())
.popChangeHandler(new HorizontalChangeHandler()));
break;
case MULTIPLE_CHILD_ROUTERS:
getRouter().pushController(RouterTransaction.with(new MultipleChildRouterController())
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler()));
break;
case MASTER_DETAIL:
getRouter().pushController(RouterTransaction.with(new MasterDetailListController())
.pushChangeHandler(new FadeChangeHandler())
.popChangeHandler(new FadeChangeHandler()));
break;
}
}
class HomeAdapter extends RecyclerView.Adapter<HomeAdapter.ViewHolder> {
private final LayoutInflater inflater;
private final DemoModel[] items;
public HomeAdapter(LayoutInflater inflater, DemoModel[] items) {
this.inflater = inflater;
this.items = items;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new ViewHolder(inflater.inflate(R.layout.row_home, parent, false));
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.bind(position, items[position]);
}
@Override
public int getItemCount() {
return items.length;
}
class ViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.tv_title) TextView tvTitle;
@BindView(R.id.img_dot) ImageView imgDot;
private DemoModel model;
private int position;
public ViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
void bind(int position, DemoModel item) {
model = item;
tvTitle.setText(item.title);
imgDot.getDrawable().setColorFilter(ContextCompat.getColor(getActivity(), item.color), Mode.SRC_ATOP);
this.position = position;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
tvTitle.setTransitionName(getResources().getString(R.string.transition_tag_title_indexed, position));
imgDot.setTransitionName(getResources().getString(R.string.transition_tag_dot_indexed, position));
}
}
@OnClick(R.id.row_root)
void onRowClick() {
onModelRowClick(model, position);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.iterative.rule.test;
import com.facebook.presto.connector.ConnectorId;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.ExpressionUtils;
import com.facebook.presto.sql.analyzer.TypeSignatureProvider;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.TestingTableHandle;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.AggregationNode.Aggregation;
import com.facebook.presto.sql.planner.plan.AggregationNode.Step;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.DeleteNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Stream;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.String.format;
public class PlanBuilder
{
private final PlanNodeIdAllocator idAllocator;
private final Metadata metadata;
private final Map<Symbol, Type> symbols = new HashMap<>();
public PlanBuilder(PlanNodeIdAllocator idAllocator, Metadata metadata)
{
this.idAllocator = idAllocator;
this.metadata = metadata;
}
public ValuesNode values(Symbol... columns)
{
return new ValuesNode(
idAllocator.getNextId(),
ImmutableList.copyOf(columns),
ImmutableList.of());
}
public ValuesNode values(List<Symbol> columns, List<List<Expression>> rows)
{
return new ValuesNode(idAllocator.getNextId(), columns, rows);
}
public LimitNode limit(long limit, PlanNode source)
{
return new LimitNode(idAllocator.getNextId(), source, limit, false);
}
public SampleNode sample(double sampleRatio, SampleNode.Type type, PlanNode source)
{
return new SampleNode(idAllocator.getNextId(), source, sampleRatio, type);
}
public ProjectNode project(Assignments assignments, PlanNode source)
{
return new ProjectNode(idAllocator.getNextId(), source, assignments);
}
public FilterNode filter(Expression predicate, PlanNode source)
{
return new FilterNode(idAllocator.getNextId(), source, predicate);
}
public AggregationNode aggregation(Consumer<AggregationBuilder> aggregationBuilderConsumer)
{
AggregationBuilder aggregationBuilder = new AggregationBuilder();
aggregationBuilderConsumer.accept(aggregationBuilder);
return aggregationBuilder.build();
}
public class AggregationBuilder
{
private PlanNode source;
private Map<Symbol, Aggregation> assignments = new HashMap<>();
private List<List<Symbol>> groupingSets;
private Step step;
private Optional<Symbol> hashSymbol = Optional.empty();
private Optional<Symbol> groupIdSymbol = Optional.empty();
public AggregationBuilder source(PlanNode source)
{
this.source = source;
return this;
}
public AggregationBuilder addAggregation(Symbol output, Expression expression, List<Type> inputTypes)
{
checkArgument(expression instanceof FunctionCall);
FunctionCall aggregation = (FunctionCall) expression;
Signature signature = metadata.getFunctionRegistry().resolveFunction(aggregation.getName(), TypeSignatureProvider.fromTypes(inputTypes));
return addAggregation(output, new Aggregation(aggregation, signature, Optional.empty()));
}
public AggregationBuilder addAggregation(Symbol output, Aggregation aggregation)
{
assignments.put(output, aggregation);
return this;
}
public AggregationBuilder globalGrouping()
{
return groupingSets(ImmutableList.of(ImmutableList.of()));
}
public AggregationBuilder groupingSets(List<List<Symbol>> groupingSets)
{
this.groupingSets = ImmutableList.copyOf(groupingSets);
return this;
}
public AggregationBuilder step(Step step)
{
this.step = step;
return this;
}
public AggregationBuilder hashSymbol(Symbol hashSymbol)
{
this.hashSymbol = Optional.of(hashSymbol);
return this;
}
public AggregationBuilder groupIdSymbol(Symbol groupIdSymbol)
{
this.groupIdSymbol = Optional.of(groupIdSymbol);
return this;
}
protected AggregationNode build()
{
return new AggregationNode(
idAllocator.getNextId(),
source,
assignments,
groupingSets,
step,
hashSymbol,
groupIdSymbol);
}
}
public ApplyNode apply(Assignments subqueryAssignments, List<Symbol> correlation, PlanNode input, PlanNode subquery)
{
return new ApplyNode(idAllocator.getNextId(), input, subquery, subqueryAssignments, correlation);
}
public TableScanNode tableScan(List<Symbol> symbols, Map<Symbol, ColumnHandle> assignments)
{
Expression originalConstraint = null;
return new TableScanNode(
idAllocator.getNextId(),
new TableHandle(
new ConnectorId("testConnector"),
new TestingTableHandle()),
symbols,
assignments,
Optional.empty(),
TupleDomain.all(),
originalConstraint
);
}
public TableFinishNode tableDelete(SchemaTableName schemaTableName, PlanNode deleteSource, Symbol deleteRowId)
{
TableWriterNode.DeleteHandle deleteHandle = new TableWriterNode.DeleteHandle(
new TableHandle(
new ConnectorId("testConnector"),
new TestingTableHandle()),
schemaTableName
);
return new TableFinishNode(
idAllocator.getNextId(),
exchange(e -> e
.addSource(new DeleteNode(
idAllocator.getNextId(),
deleteSource,
deleteHandle,
deleteRowId,
ImmutableList.of(deleteRowId)
))
.addInputsSet(deleteRowId)
.singleDistributionPartitioningScheme(deleteRowId)
),
deleteHandle,
ImmutableList.of(deleteRowId)
);
}
public ExchangeNode gatheringExchange(ExchangeNode.Scope scope, PlanNode child)
{
return exchange(builder -> builder.type(ExchangeNode.Type.GATHER)
.scope(scope)
.singleDistributionPartitioningScheme(child.getOutputSymbols())
.addSource(child)
.addInputsSet(child.getOutputSymbols()));
}
public ExchangeNode exchange(Consumer<ExchangeBuilder> exchangeBuilderConsumer)
{
ExchangeBuilder exchangeBuilder = new ExchangeBuilder();
exchangeBuilderConsumer.accept(exchangeBuilder);
return exchangeBuilder.build();
}
public class ExchangeBuilder
{
private ExchangeNode.Type type = ExchangeNode.Type.GATHER;
private ExchangeNode.Scope scope = ExchangeNode.Scope.REMOTE;
private PartitioningScheme partitioningScheme;
private List<PlanNode> sources = new ArrayList<>();
private List<List<Symbol>> inputs = new ArrayList<>();
public ExchangeBuilder type(ExchangeNode.Type type)
{
this.type = type;
return this;
}
public ExchangeBuilder scope(ExchangeNode.Scope scope)
{
this.scope = scope;
return this;
}
public ExchangeBuilder singleDistributionPartitioningScheme(Symbol... outputSymbols)
{
return singleDistributionPartitioningScheme(Arrays.asList(outputSymbols));
}
public ExchangeBuilder singleDistributionPartitioningScheme(List<Symbol> outputSymbols)
{
return partitioningScheme(new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), outputSymbols));
}
public ExchangeBuilder fixedHashDistributionParitioningScheme(List<Symbol> outputSymbols, List<Symbol> partitioningSymbols)
{
return partitioningScheme(new PartitioningScheme(Partitioning.create(FIXED_HASH_DISTRIBUTION, ImmutableList.copyOf(partitioningSymbols)), ImmutableList.copyOf(outputSymbols)));
}
public ExchangeBuilder fixedHashDistributionParitioningScheme(List<Symbol> outputSymbols, List<Symbol> partitioningSymbols, Symbol hashSymbol)
{
return partitioningScheme(new PartitioningScheme(Partitioning.create(FIXED_HASH_DISTRIBUTION, ImmutableList.copyOf(partitioningSymbols)), ImmutableList.copyOf(outputSymbols), Optional.of(hashSymbol)));
}
public ExchangeBuilder partitioningScheme(PartitioningScheme partitioningScheme)
{
this.partitioningScheme = partitioningScheme;
return this;
}
public ExchangeBuilder addSource(PlanNode source)
{
this.sources.add(source);
return this;
}
public ExchangeBuilder addInputsSet(Symbol... inputs)
{
return addInputsSet(Arrays.asList(inputs));
}
public ExchangeBuilder addInputsSet(List<Symbol> inputs)
{
this.inputs.add(inputs);
return this;
}
protected ExchangeNode build()
{
return new ExchangeNode(idAllocator.getNextId(), type, scope, partitioningScheme, sources, inputs);
}
}
public Symbol symbol(String name, Type type)
{
Symbol symbol = new Symbol(name);
Type old = symbols.get(symbol);
if (old != null && !old.equals(type)) {
throw new IllegalArgumentException(format("Symbol '%s' already registered with type '%s'", name, old));
}
if (old == null) {
symbols.put(symbol, type);
}
return symbol;
}
public WindowNode window(WindowNode.Specification specification, Map<Symbol, WindowNode.Function> functions, PlanNode source)
{
return new WindowNode(
idAllocator.getNextId(),
source,
specification,
ImmutableMap.copyOf(functions),
Optional.empty(),
ImmutableSet.of(),
0);
}
public static Expression expression(String sql)
{
return ExpressionUtils.rewriteIdentifiersToSymbolReferences(new SqlParser().createExpression(sql));
}
public static List<Expression> expressions(String... expressions)
{
return Stream.of(expressions)
.map(PlanBuilder::expression)
.collect(toImmutableList());
}
public Map<Symbol, Type> getSymbols()
{
return Collections.unmodifiableMap(symbols);
}
}
| |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.logic;
import com.bulletphysics.linearmath.Transform;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.SetMultimap;
import org.lwjgl.BufferUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.config.Config;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent;
import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent;
import org.terasology.entitySystem.entity.lifecycleEvents.OnChangedComponent;
import org.terasology.entitySystem.event.ReceiveEvent;
import org.terasology.entitySystem.systems.BaseComponentSystem;
import org.terasology.entitySystem.systems.RegisterMode;
import org.terasology.entitySystem.systems.RegisterSystem;
import org.terasology.entitySystem.systems.RenderSystem;
import org.terasology.logic.characters.CharacterComponent;
import org.terasology.logic.location.LocationComponent;
import org.terasology.logic.players.LocalPlayer;
import org.terasology.math.AABB;
import org.terasology.math.MatrixUtils;
import org.terasology.math.VecMath;
import org.terasology.math.geom.Matrix4f;
import org.terasology.math.geom.Quat4f;
import org.terasology.math.geom.Vector3f;
import org.terasology.network.ClientComponent;
import org.terasology.network.NetworkSystem;
import org.terasology.registry.In;
import org.terasology.rendering.assets.material.Material;
import org.terasology.rendering.opengl.OpenGLMesh;
import org.terasology.rendering.world.WorldRenderer;
import org.terasology.world.WorldProvider;
import java.nio.FloatBuffer;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
/**
* TODO: This should be made generic (no explicit shader or mesh) and ported directly into WorldRenderer? Later note: some GelCube functionality moved to a module
*
* @author Immortius
*/
@RegisterSystem(RegisterMode.CLIENT)
public class MeshRenderer extends BaseComponentSystem implements RenderSystem {
private static final Logger logger = LoggerFactory.getLogger(MeshRenderer.class);
@In
private NetworkSystem network;
@In
private LocalPlayer localPlayer;
@In
private Config config;
@In
private WorldRenderer worldRenderer;
@In
private WorldProvider worldProvider;
private SetMultimap<Material, EntityRef> opaqueMesh = HashMultimap.create();
private SetMultimap<Material, EntityRef> translucentMesh = HashMultimap.create();
private Map<EntityRef, Material> opaqueEntities = Maps.newHashMap();
private Map<EntityRef, Material> translucentEntities = Maps.newHashMap();
private NearestSortingList opaqueMeshSorter = new NearestSortingList();
private NearestSortingList translucentMeshSorter = new NearestSortingList();
private int lastRendered;
@Override
public void initialise() {
opaqueMeshSorter.initialise(worldRenderer.getActiveCamera());
translucentMeshSorter.initialise(worldRenderer.getActiveCamera());
}
@Override
public void shutdown() {
opaqueMeshSorter.stop();
translucentMeshSorter.stop();
}
@ReceiveEvent(components = {MeshComponent.class, LocationComponent.class})
public void onNewMesh(OnActivatedComponent event, EntityRef entity) {
addMesh(entity);
}
private boolean isHidden(EntityRef entity, MeshComponent mesh) {
if (!mesh.hideFromOwner) {
return false;
}
ClientComponent owner = network.getOwnerEntity(entity).getComponent(ClientComponent.class);
return (owner != null && owner.local);
}
private void addMesh(EntityRef entity) {
MeshComponent meshComp = entity.getComponent(MeshComponent.class);
if (meshComp.material != null) {
if (meshComp.translucent) {
translucentMesh.put(meshComp.material, entity);
translucentEntities.put(entity, meshComp.material);
translucentMeshSorter.add(entity);
} else {
opaqueMesh.put(meshComp.material, entity);
opaqueEntities.put(entity, meshComp.material);
opaqueMeshSorter.add(entity);
}
}
}
@ReceiveEvent(components = {CharacterComponent.class, MeshComponent.class})
public void onLocalMesh(OnChangedComponent event, EntityRef entity) {
removeMesh(entity);
addMesh(entity);
}
@ReceiveEvent(components = {MeshComponent.class})
public void onChangeMesh(OnChangedComponent event, EntityRef entity) {
removeMesh(entity);
addMesh(entity);
}
private void removeMesh(EntityRef entity) {
Material mat = opaqueEntities.remove(entity);
if (mat != null) {
opaqueMesh.remove(mat, entity);
opaqueMeshSorter.remove(entity);
} else {
mat = translucentEntities.remove(entity);
if (mat != null) {
translucentMesh.remove(mat, entity);
translucentMeshSorter.remove(entity);
}
}
}
@ReceiveEvent(components = {MeshComponent.class, LocationComponent.class})
public void onDestroyMesh(BeforeDeactivateComponent event, EntityRef entity) {
removeMesh(entity);
}
@Override
public void renderAlphaBlend() {
if (config.getRendering().isRenderNearest()) {
renderAlphaBlend(Arrays.asList(translucentMeshSorter.getNearest(config.getRendering().getMeshLimit())));
} else {
renderAlphaBlend(translucentEntities.keySet());
}
}
private void renderAlphaBlend(Iterable<EntityRef> entityRefs) {
Vector3f cameraPosition = worldRenderer.getActiveCamera().getPosition();
FloatBuffer tempMatrixBuffer44 = BufferUtils.createFloatBuffer(16);
FloatBuffer tempMatrixBuffer33 = BufferUtils.createFloatBuffer(12);
for (EntityRef entity : entityRefs) {
MeshComponent meshComp = entity.getComponent(MeshComponent.class);
if (meshComp.material.isRenderable()) {
meshComp.material.enable();
LocationComponent location = entity.getComponent(LocationComponent.class);
if (location == null) {
continue;
}
if (isHidden(entity, meshComp)) {
continue;
}
Quat4f worldRot = location.getWorldRotation();
Vector3f worldPos = location.getWorldPosition();
float worldScale = location.getWorldScale();
AABB aabb = meshComp.mesh.getAABB().transform(worldRot, worldPos, worldScale);
if (worldRenderer.getActiveCamera().hasInSight(aabb)) {
Vector3f worldPositionCameraSpace = new Vector3f();
worldPositionCameraSpace.sub(worldPos, cameraPosition);
Matrix4f matrixCameraSpace = new Matrix4f(worldRot, worldPositionCameraSpace, worldScale);
Matrix4f modelViewMatrix = MatrixUtils.calcModelViewMatrix(worldRenderer.getActiveCamera().getViewMatrix(), matrixCameraSpace);
MatrixUtils.matrixToFloatBuffer(modelViewMatrix, tempMatrixBuffer44);
meshComp.material.setMatrix4("projectionMatrix", worldRenderer.getActiveCamera().getProjectionMatrix());
meshComp.material.setMatrix4("worldViewMatrix", tempMatrixBuffer44, true);
MatrixUtils.matrixToFloatBuffer(MatrixUtils.calcNormalMatrix(modelViewMatrix), tempMatrixBuffer33);
meshComp.material.setMatrix3("normalMatrix", tempMatrixBuffer33, true);
meshComp.material.setFloat4("colorOffset", meshComp.color.rf(), meshComp.color.gf(), meshComp.color.bf(), meshComp.color.af(), true);
meshComp.material.setFloat("light", worldRenderer.getRenderingLightValueAt(worldPos), true);
meshComp.material.setFloat("sunlight", worldRenderer.getSunlightValueAt(worldPos), true);
OpenGLMesh mesh = (OpenGLMesh) meshComp.mesh;
meshComp.material.bindTextures();
mesh.render();
}
}
}
}
@Override
public void renderOpaque() {
if (config.getRendering().isRenderNearest()) {
SetMultimap<Material, EntityRef> entitiesToRender = HashMultimap.create();
for (EntityRef entity : Arrays.asList(opaqueMeshSorter.getNearest(config.getRendering().getMeshLimit()))) {
MeshComponent meshComp = entity.getComponent(MeshComponent.class);
if (meshComp != null && meshComp.material != null) {
entitiesToRender.put(meshComp.material, entity);
}
}
renderOpaque(entitiesToRender);
} else {
renderOpaque(opaqueMesh);
}
}
private void renderOpaque(SetMultimap<Material, EntityRef> meshByMaterial) {
Vector3f cameraPosition = worldRenderer.getActiveCamera().getPosition();
Quat4f worldRot = new Quat4f();
Vector3f worldPos = new Vector3f();
Transform transWorldSpace = new Transform();
FloatBuffer tempMatrixBuffer44 = BufferUtils.createFloatBuffer(16);
FloatBuffer tempMatrixBuffer33 = BufferUtils.createFloatBuffer(12);
for (Material material : meshByMaterial.keySet()) {
if (material.isRenderable()) {
OpenGLMesh lastMesh = null;
material.enable();
material.setFloat("sunlight", 1.0f);
material.setFloat("blockLight", 1.0f);
material.setMatrix4("projectionMatrix", worldRenderer.getActiveCamera().getProjectionMatrix());
material.bindTextures();
Set<EntityRef> entities = meshByMaterial.get(material);
lastRendered = entities.size();
for (EntityRef entity : entities) {
MeshComponent meshComp = entity.getComponent(MeshComponent.class);
LocationComponent location = entity.getComponent(LocationComponent.class);
if (isHidden(entity, meshComp) || location == null || meshComp.mesh == null
|| !worldProvider.isBlockRelevant(location.getWorldPosition())) {
continue;
}
if (meshComp.mesh.isDisposed()) {
logger.error("Attempted to render disposed mesh");
continue;
}
location.getWorldRotation(worldRot);
location.getWorldPosition(worldPos);
float worldScale = location.getWorldScale();
javax.vecmath.Matrix4f matrixWorldSpace = new javax.vecmath.Matrix4f(VecMath.to(worldRot), VecMath.to(worldPos), worldScale);
transWorldSpace.set(matrixWorldSpace);
Vector3f worldPositionCameraSpace = new Vector3f();
worldPositionCameraSpace.sub(worldPos, cameraPosition);
Matrix4f matrixCameraSpace = new Matrix4f(worldRot, worldPositionCameraSpace, worldScale);
AABB aabb = meshComp.mesh.getAABB().transform(transWorldSpace);
if (worldRenderer.getActiveCamera().hasInSight(aabb)) {
if (meshComp.mesh != lastMesh) {
if (lastMesh != null) {
lastMesh.postRender();
}
lastMesh = (OpenGLMesh) meshComp.mesh;
lastMesh.preRender();
}
Matrix4f modelViewMatrix = MatrixUtils.calcModelViewMatrix(worldRenderer.getActiveCamera().getViewMatrix(), matrixCameraSpace);
MatrixUtils.matrixToFloatBuffer(modelViewMatrix, tempMatrixBuffer44);
material.setMatrix4("worldViewMatrix", tempMatrixBuffer44, true);
MatrixUtils.matrixToFloatBuffer(MatrixUtils.calcNormalMatrix(modelViewMatrix), tempMatrixBuffer33);
material.setMatrix3("normalMatrix", tempMatrixBuffer33, true);
material.setFloat3("colorOffset", meshComp.color.rf(), meshComp.color.gf(), meshComp.color.bf(), true);
material.setFloat("sunlight", worldRenderer.getSunlightValueAt(worldPos), true);
material.setFloat("blockLight", worldRenderer.getBlockLightValueAt(worldPos), true);
lastMesh.doRender();
}
}
if (lastMesh != null) {
lastMesh.postRender();
}
}
}
}
@Override
public void renderOverlay() {
}
@Override
public void renderFirstPerson() {
}
@Override
public void renderShadows() {
}
public int getLastRendered() {
return lastRendered;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.flink;
import junit.framework.TestCase;
import net.jodah.concurrentunit.Waiter;
import org.apache.commons.io.FileUtils;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.ui.CheckBox;
import org.apache.zeppelin.display.ui.Select;
import org.apache.zeppelin.display.ui.TextBox;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterOutput;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResultMessage;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeoutException;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class FlinkInterpreterTest {
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkInterpreterTest.class);
private FlinkInterpreter interpreter;
private AngularObjectRegistry angularObjectRegistry;
@Before
public void setUp() throws InterpreterException {
Properties p = new Properties();
p.setProperty("zeppelin.flink.printREPLOutput", "true");
p.setProperty("zeppelin.flink.scala.color", "false");
p.setProperty("flink.execution.mode", "local");
p.setProperty("local.number-taskmanager", "4");
interpreter = new FlinkInterpreter(p);
InterpreterGroup intpGroup = new InterpreterGroup();
interpreter.setInterpreterGroup(intpGroup);
interpreter.open();
angularObjectRegistry = new AngularObjectRegistry("flink", null);
}
@After
public void tearDown() throws InterpreterException {
if (interpreter != null) {
interpreter.close();
}
}
@Test
public void testScalaBasic() throws InterpreterException, IOException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret("val a=\"hello world\"", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertEquals("a: String = hello world\n", resultMessages.get(0).getData());
context = getInterpreterContext();
result = interpreter.interpret("print(a)", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertEquals("hello world", resultMessages.get(0).getData());
// java stdout
context = getInterpreterContext();
result = interpreter.interpret("System.out.print(a)", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertEquals("hello world", resultMessages.get(0).getData());
// java stderr
context = getInterpreterContext();
result = interpreter.interpret("System.err.print(a)", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertEquals("hello world", resultMessages.get(0).getData());
// incomplete
result = interpreter.interpret("println(a", getInterpreterContext());
assertEquals(InterpreterResult.Code.INCOMPLETE, result.code());
// syntax error
context = getInterpreterContext();
result = interpreter.interpret("println(b)", context);
assertEquals(InterpreterResult.Code.ERROR, result.code());
resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertTrue(resultMessages.get(0).getData(),
resultMessages.get(0).getData().contains("not found: value b"));
// multiple line
context = getInterpreterContext();
result = interpreter.interpret("\"123\".\ntoInt", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// single line comment
context = getInterpreterContext();
result = interpreter.interpret("/*comment here*/", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
context = getInterpreterContext();
result = interpreter.interpret("/*comment here*/\nprint(\"hello world\")",
context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// multiple line comment
context = getInterpreterContext();
result = interpreter.interpret("/*line 1 \n line 2*/", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// test function
context = getInterpreterContext();
result = interpreter.interpret("def add(x:Int, y:Int)\n{ return x+y }", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("print(add(1,2))", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("/*line 1 \n line 2*/print(\"hello world\")",
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// companion object
result = interpreter.interpret("class Counter {\n " +
"var value: Long = 0} \n" +
"object Counter {\n def apply(x: Long) = new Counter()\n}", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// case class
// context = getInterpreterContext();result = interpreter.interpret(
// "case class WC(word: String, count: Int)\n" +
// "val wordCounts = benv.fromElements(\n" +
// "WC(\"hello\", 1),\n" +
// "WC(\"world\", 2),\n" +
// "WC(\"world\", 8))\n" +
// "wordCounts.collect()",
// context);
// assertEquals(InterpreterResult.Code.SUCCESS, result.code());
context = getInterpreterContext();
result = interpreter.interpret("z.input(\"name\", \"default_name\")",
context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("name") instanceof TextBox);
TextBox textBox = (TextBox) context.getGui().getForms().get("name");
assertEquals("name", textBox.getName());
assertEquals("default_name", textBox.getDefaultValue());
context = getInterpreterContext();
result = interpreter.interpret("z.checkbox(\"checkbox_1\", " +
"Seq(\"value_2\"), Seq((\"value_1\", \"name_1\"), (\"value_2\", \"name_2\")))", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("checkbox_1") instanceof CheckBox);
CheckBox checkBox = (CheckBox) context.getGui().getForms().get("checkbox_1");
assertEquals("checkbox_1", checkBox.getName());
assertEquals(1, checkBox.getDefaultValue().length);
assertEquals("value_2", checkBox.getDefaultValue()[0]);
assertEquals(2, checkBox.getOptions().length);
assertEquals("value_1", checkBox.getOptions()[0].getValue());
assertEquals("name_1", checkBox.getOptions()[0].getDisplayName());
assertEquals("value_2", checkBox.getOptions()[1].getValue());
assertEquals("name_2", checkBox.getOptions()[1].getDisplayName());
context = getInterpreterContext();
result = interpreter.interpret("z.select(\"select_1\", Seq(\"value_2\"), " +
"Seq((\"value_1\", \"name_1\"), (\"value_2\", \"name_2\")))", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("select_1") instanceof Select);
Select select = (Select) context.getGui().getForms().get("select_1");
assertEquals("select_1", select.getName());
// TODO(zjffdu) it seems a bug of GUI, the default value should be 'value_2',
// but it is List(value_2)
// assertEquals("value_2", select.getDefaultValue());
assertEquals(2, select.getOptions().length);
assertEquals("value_1", select.getOptions()[0].getValue());
assertEquals("name_1", select.getOptions()[0].getDisplayName());
assertEquals("value_2", select.getOptions()[1].getValue());
assertEquals("name_2", select.getOptions()[1].getDisplayName());
}
@Test
public void testZShow() throws InterpreterException, IOException {
// show dataset
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret(
"val data = benv.fromElements((1, \"jeff\"), (2, \"andy\"), (3, \"james\"))",
context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
context = getInterpreterContext();
result = interpreter.interpret("z.show(data)", context);
assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
if (interpreter.getFlinkVersion().isAfterFlink114()) {
assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
assertEquals("z.show(DataSet) is not supported after Flink 1.14", resultMessages.get(0).getData());
} else {
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
assertEquals("_1\t_2\n1\tjeff\n2\tandy\n3\tjames\n", resultMessages.get(0).getData());
}
}
@Test
public void testCompletion() throws InterpreterException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret("val a=\"hello world\"", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterCompletion> completions = interpreter.completion("a.", 2,
getInterpreterContext());
assertTrue(completions.size() > 0);
completions = interpreter.completion("benv.", 5, getInterpreterContext());
assertTrue(completions.size() > 0);
}
@Test
public void testBatchWordCount() throws InterpreterException, IOException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret(
"val data = benv.fromElements(\"hello world\", \"hello flink\", \"hello hadoop\")",
context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
context = getInterpreterContext();
result = interpreter.interpret(
"data.flatMap(line => line.split(\"\\\\s\"))\n" +
" .map(w => (w, 1))\n" +
" .groupBy(0)\n" +
" .sum(1)\n" +
" .print()", context);
assertEquals(context.out.toString(), InterpreterResult.Code.SUCCESS, result.code());
String[] expectedCounts = {"(hello,3)", "(world,1)", "(flink,1)", "(hadoop,1)"};
Arrays.sort(expectedCounts);
String[] counts = context.out.toInterpreterResultMessage().get(0).getData().split("\n");
Arrays.sort(counts);
assertArrayEquals(expectedCounts, counts);
}
@Test
public void testStreamWordCount() throws InterpreterException, IOException {
InterpreterContext context = getInterpreterContext();
InterpreterResult result = interpreter.interpret(
"val data = senv.fromElements(\"hello world\", \"hello flink\", \"hello hadoop\")",
context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
context = getInterpreterContext();
result = interpreter.interpret(
"data.flatMap(line => line.split(\"\\\\s\"))\n" +
" .map(w => (w, 1))\n" +
" .keyBy(0)\n" +
" .sum(1)\n" +
" .print()\n" +
"senv.execute()", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
String[] expectedCounts = {"(hello,3)", "(world,1)", "(flink,1)", "(hadoop,1)"};
String output = context.out.toInterpreterResultMessage().get(0).getData();
for (String expectedCount : expectedCounts) {
assertTrue(output, output.contains(expectedCount));
}
}
@Test
public void testCancelStreamSql() throws IOException, InterpreterException, InterruptedException, TimeoutException {
String initStreamScalaScript = FlinkStreamSqlInterpreterTest.getInitStreamScript(1000);
InterpreterResult result = interpreter.interpret(initStreamScalaScript,
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
final Waiter waiter = new Waiter();
Thread thread = new Thread(() -> {
try {
InterpreterContext context = getInterpreterContext();
InterpreterResult result2 = interpreter.interpret(
"val table = stenv.sqlQuery(\"select url, count(1) as pv from " +
"log group by url\")\nz.show(table, streamType=\"update\")", context);
LOGGER.info("---------------" + context.out.toString());
LOGGER.info("---------------" + result2);
waiter.assertTrue(context.out.toString().contains("Job was cancelled"));
waiter.assertEquals(InterpreterResult.Code.ERROR, result2.code());
} catch (Exception e) {
e.printStackTrace();
waiter.fail("Should not fail here");
}
waiter.resume();
});
thread.start();
// the streaming job will run for 20 seconds. check init_stream.scala
// sleep 20 seconds to make sure the job is started but not finished
Thread.sleep(20 * 1000);
InterpreterContext context = getInterpreterContext();
interpreter.cancel(context);
waiter.await(10 * 1000);
// resume job
interpreter.interpret("val table = stenv.sqlQuery(\"select url, count(1) as pv from " +
"log group by url\")\nz.show(table, streamType=\"update\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
TestCase.assertTrue(resultMessages.toString(),
resultMessages.get(0).getData().contains("url\tpv\n"));
}
// TODO(zjffdu) flaky test
// @Test
public void testResumeStreamSqlFromSavePoint() throws IOException, InterpreterException, InterruptedException, TimeoutException {
String initStreamScalaScript = FlinkStreamSqlInterpreterTest.getInitStreamScript(1000);
InterpreterResult result = interpreter.interpret(initStreamScalaScript,
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
File savePointDir = FileUtils.getTempDirectory();
final Waiter waiter = new Waiter();
Thread thread = new Thread(() -> {
try {
InterpreterContext context = getInterpreterContext();
context.getLocalProperties().put("savePointDir", savePointDir.getAbsolutePath());
context.getLocalProperties().put("parallelism", "1");
context.getLocalProperties().put("maxParallelism", "10");
InterpreterResult result2 = interpreter.interpret(
"val table = stenv.sqlQuery(\"select url, count(1) as pv from " +
"log group by url\")\nz.show(table, streamType=\"update\")", context);
System.out.println("------------" + context.out.toString());
System.out.println("------------" + result2);
waiter.assertTrue(context.out.toString().contains("url\tpv\n"));
waiter.assertEquals(InterpreterResult.Code.SUCCESS, result2.code());
} catch (Exception e) {
e.printStackTrace();
waiter.fail("Should not fail here");
}
waiter.resume();
});
thread.start();
// the streaming job will run for 60 seconds. check init_stream.scala
// sleep 20 seconds to make sure the job is started but not finished
Thread.sleep(20 * 1000);
InterpreterContext context = getInterpreterContext();
context.getLocalProperties().put("savePointDir", savePointDir.getAbsolutePath());
context.getLocalProperties().put("parallelism", "2");
context.getLocalProperties().put("maxParallelism", "10");
interpreter.cancel(context);
waiter.await(20 * 1000);
// resume job from savepoint
interpreter.interpret(
"val table = stenv.sqlQuery(\"select url, count(1) as pv from " +
"log group by url\")\nz.show(table, streamType=\"update\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage();
assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType());
TestCase.assertTrue(resultMessages.toString(),
resultMessages.get(0).getData().contains("url\tpv\n"));
}
private InterpreterContext getInterpreterContext() {
InterpreterContext context = InterpreterContext.builder()
.setParagraphId("paragraphId")
.setInterpreterOut(new InterpreterOutput())
.setAngularObjectRegistry(angularObjectRegistry)
.setIntpEventClient(mock(RemoteInterpreterEventClient.class))
.build();
InterpreterContext.set(context);
return context;
}
}
| |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.circuitbreaker;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import com.linecorp.armeria.client.ClientRequestContext;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpResponseDuplicator;
import com.linecorp.armeria.common.Response;
import com.linecorp.armeria.common.annotation.Nullable;
final class CircuitBreakerRuleUtil {
static final CompletableFuture<CircuitBreakerDecision> SUCCESS_DECISION =
CompletableFuture.completedFuture(CircuitBreakerDecision.success());
static final CompletableFuture<CircuitBreakerDecision> FAILURE_DECISION =
CompletableFuture.completedFuture(CircuitBreakerDecision.failure());
static final CompletableFuture<CircuitBreakerDecision> IGNORE_DECISION =
CompletableFuture.completedFuture(CircuitBreakerDecision.ignore());
static final CompletableFuture<CircuitBreakerDecision> NEXT_DECISION =
CompletableFuture.completedFuture(CircuitBreakerDecision.next());
static <T extends Response> CircuitBreakerRuleWithContent<T> fromCircuitBreakerRule(
CircuitBreakerRule circuitBreakerRule) {
return new CircuitBreakerRuleWithContent<T>() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable T response,
@Nullable Throwable cause) {
return circuitBreakerRule.shouldReportAsSuccess(ctx, cause);
}
@Override
public boolean requiresResponseTrailers() {
return circuitBreakerRule.requiresResponseTrailers();
}
};
}
static <T extends Response> CircuitBreakerRule fromCircuitBreakerRuleWithContent(
CircuitBreakerRuleWithContent<T> circuitBreakerRuleWithContent) {
return new CircuitBreakerRule() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable Throwable cause) {
return circuitBreakerRuleWithContent.shouldReportAsSuccess(ctx, null, cause);
}
@Override
public boolean requiresResponseTrailers() {
return circuitBreakerRuleWithContent.requiresResponseTrailers();
}
};
}
static CircuitBreakerRule orElse(CircuitBreakerRule first, CircuitBreakerRule second) {
final boolean requiresResponseTrailers = first.requiresResponseTrailers() ||
second.requiresResponseTrailers();
return new CircuitBreakerRule() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable Throwable cause) {
final CompletionStage<CircuitBreakerDecision> decisionFuture =
first.shouldReportAsSuccess(ctx, cause);
if (decisionFuture == SUCCESS_DECISION ||
decisionFuture == FAILURE_DECISION ||
decisionFuture == IGNORE_DECISION) {
return decisionFuture;
}
if (decisionFuture == NEXT_DECISION) {
return second.shouldReportAsSuccess(ctx, cause);
}
return decisionFuture.thenCompose(decision -> {
if (decision != CircuitBreakerDecision.next()) {
return decisionFuture;
} else {
return second.shouldReportAsSuccess(ctx, cause);
}
});
}
@Override
public boolean requiresResponseTrailers() {
return requiresResponseTrailers;
}
};
}
static <T extends Response> CircuitBreakerRuleWithContent<T> orElse(
CircuitBreakerRule first, CircuitBreakerRuleWithContent<T> second) {
final boolean requiresResponseTrailers = first.requiresResponseTrailers() ||
second.requiresResponseTrailers();
return new CircuitBreakerRuleWithContent<T>() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable T response,
@Nullable Throwable cause) {
if (response instanceof HttpResponse) {
try (HttpResponseDuplicator duplicator = ((HttpResponse) response).toDuplicator()) {
@SuppressWarnings("unchecked")
final CircuitBreakerRuleWithContent<T> duplicatedSecond =
(CircuitBreakerRuleWithContent<T>) withDuplicator(
(CircuitBreakerRuleWithContent<HttpResponse>) second, duplicator);
return handle(ctx, response, cause, fromCircuitBreakerRule(first), duplicatedSecond);
}
} else {
return handle(ctx, response, cause, fromCircuitBreakerRule(first), second);
}
}
@Override
public boolean requiresResponseTrailers() {
return requiresResponseTrailers;
}
};
}
static <T extends Response> CircuitBreakerRuleWithContent<T> orElse(CircuitBreakerRuleWithContent<T> first,
CircuitBreakerRule second) {
final boolean requiresResponseTrailers = first.requiresResponseTrailers() ||
second.requiresResponseTrailers();
return new CircuitBreakerRuleWithContent<T>() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable T response,
@Nullable Throwable cause) {
if (response instanceof HttpResponse) {
try (HttpResponseDuplicator duplicator = ((HttpResponse) response).toDuplicator()) {
@SuppressWarnings("unchecked")
final CircuitBreakerRuleWithContent<T> duplicatedFirst =
(CircuitBreakerRuleWithContent<T>) withDuplicator(
(CircuitBreakerRuleWithContent<HttpResponse>) first, duplicator);
return handle(ctx, response, cause, duplicatedFirst, fromCircuitBreakerRule(second));
}
} else {
return handle(ctx, response, cause, first, fromCircuitBreakerRule(second));
}
}
@Override
public boolean requiresResponseTrailers() {
return requiresResponseTrailers;
}
};
}
@SuppressWarnings("unchecked")
static <T extends Response> CircuitBreakerRuleWithContent<T> orElse(
CircuitBreakerRuleWithContent<T> first, CircuitBreakerRuleWithContent<T> second) {
final boolean requiresResponseTrailers = first.requiresResponseTrailers() ||
second.requiresResponseTrailers();
return new CircuitBreakerRuleWithContent<T>() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(ClientRequestContext ctx,
@Nullable T response,
@Nullable Throwable cause) {
if (response instanceof HttpResponse) {
try (HttpResponseDuplicator duplicator = ((HttpResponse) response).toDuplicator()) {
final CircuitBreakerRuleWithContent<T> duplicatedFirst =
(CircuitBreakerRuleWithContent<T>) withDuplicator(
(CircuitBreakerRuleWithContent<HttpResponse>) first, duplicator);
final CircuitBreakerRuleWithContent<T> duplicatedSecond =
(CircuitBreakerRuleWithContent<T>) withDuplicator(
(CircuitBreakerRuleWithContent<HttpResponse>) second, duplicator);
return handle(ctx, response, cause, duplicatedFirst, duplicatedSecond);
}
} else {
return handle(ctx, response, cause, first, second);
}
}
@Override
public boolean requiresResponseTrailers() {
return requiresResponseTrailers;
}
};
}
private static <T extends Response> CompletionStage<CircuitBreakerDecision> handle(
ClientRequestContext ctx, @Nullable T response, @Nullable Throwable cause,
CircuitBreakerRuleWithContent<T> first, CircuitBreakerRuleWithContent<T> second) {
final CompletionStage<CircuitBreakerDecision> decisionFuture =
first.shouldReportAsSuccess(ctx, response, cause);
if (decisionFuture == SUCCESS_DECISION ||
decisionFuture == FAILURE_DECISION ||
decisionFuture == IGNORE_DECISION) {
return decisionFuture;
}
if (decisionFuture == NEXT_DECISION) {
return second.shouldReportAsSuccess(ctx, response, cause);
}
return decisionFuture.thenCompose(decision -> {
if (decision != CircuitBreakerDecision.next()) {
return decisionFuture;
} else {
return second.shouldReportAsSuccess(ctx, response, cause);
}
});
}
private static CircuitBreakerRuleWithContent<HttpResponse>
withDuplicator(CircuitBreakerRuleWithContent<HttpResponse> ruleWithContent,
HttpResponseDuplicator duplicator) {
return new CircuitBreakerRuleWithContent<HttpResponse>() {
@Override
public CompletionStage<CircuitBreakerDecision> shouldReportAsSuccess(
ClientRequestContext ctx, @Nullable HttpResponse response, @Nullable Throwable cause) {
return ruleWithContent.shouldReportAsSuccess(ctx, duplicator.duplicate(), cause);
}
@Override
public boolean requiresResponseTrailers() {
return ruleWithContent.requiresResponseTrailers();
}
};
}
private CircuitBreakerRuleUtil() {}
}
| |
/*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
*/
package org.eclipse.net4j.util.container.delegate;
import org.eclipse.net4j.util.container.ContainerEvent;
import org.eclipse.net4j.util.container.IContainerDelta;
import org.eclipse.net4j.util.event.IListener;
import java.util.Collection;
import java.util.List;
import java.util.ListIterator;
/**
* A default implementation of a {@link IContainerList container list}.
*
* @author Eike Stepper
* @apiviz.exclude
*/
public class ContainerList<E> extends ContainerCollection<E> implements IContainerList<E>
{
public ContainerList(List<E> delegate)
{
super(delegate);
}
@Override
public List<E> getDelegate()
{
return (List<E>)super.getDelegate();
}
/**
* @category WRITE
*/
public void add(int index, E element)
{
getDelegate().add(index, element);
fireAddedEvent(element);
}
/**
* @category WRITE
*/
public boolean addAll(int index, Collection<? extends E> c)
{
ContainerEvent<E> event = createEvent(getDelegate(), IContainerDelta.Kind.ADDED);
getDelegate().addAll(index, c);
return dispatchEvent(event);
}
/**
* @category READ
*/
public E get(int index)
{
return getDelegate().get(index);
}
/**
* @category READ
*/
public int indexOf(Object o)
{
return getDelegate().indexOf(o);
}
/**
* @category READ
*/
public int lastIndexOf(Object o)
{
return getDelegate().lastIndexOf(o);
}
/**
* @category READ
*/
public ListIterator<E> listIterator()
{
return new DelegatingListIterator(getDelegate().listIterator());
}
/**
* @category READ
*/
public ListIterator<E> listIterator(int index)
{
return new DelegatingListIterator(getDelegate().listIterator(index));
}
/**
* @category WRITE
*/
public E remove(int index)
{
E removed = getDelegate().remove(index);
if (removed != null)
{
fireRemovedEvent(removed);
}
return removed;
}
/**
* @category WRITE
*/
public E set(int index, E element)
{
E removed = getDelegate().set(index, element);
ContainerEvent<E> event = new ContainerEvent<E>(ContainerList.this);
event.addDelta(removed, IContainerDelta.Kind.REMOVED);
event.addDelta(element, IContainerDelta.Kind.ADDED);
IListener[] listeners = getListeners();
if (listeners != null)
{
fireEvent(event, listeners);
}
return removed;
}
/**
* @category READ
*/
public List<E> subList(int fromIndex, int toIndex)
{
return getDelegate().subList(fromIndex, toIndex);
}
/**
* A delegating {@link ListIterator list iterator}.
*
* @author Eike Stepper
* @apiviz.exclude
*/
public class DelegatingListIterator extends DelegatingIterator implements ListIterator<E>
{
public DelegatingListIterator(ListIterator<E> delegate)
{
super(delegate);
}
@Override
public ListIterator<E> getDelegate()
{
return (ListIterator<E>)super.getDelegate();
}
/**
* @category WRITE
*/
public void add(E o)
{
getDelegate().add(o);
fireAddedEvent(o);
last = o;
}
/**
* @category WRITE
*/
public void set(E o)
{
getDelegate().set(o);
ContainerEvent<E> event = new ContainerEvent<E>(ContainerList.this);
event.addDelta(last, IContainerDelta.Kind.REMOVED);
event.addDelta(o, IContainerDelta.Kind.ADDED);
IListener[] listeners = getListeners();
if (listeners != null)
{
fireEvent(event, listeners);
}
last = o;
}
/**
* @category READ
*/
public boolean hasPrevious()
{
return getDelegate().hasPrevious();
}
/**
* @category READ
*/
public int nextIndex()
{
return getDelegate().nextIndex();
}
/**
* @category READ
*/
public E previous()
{
return getDelegate().previous();
}
/**
* @category READ
*/
public int previousIndex()
{
return getDelegate().previousIndex();
}
}
}
| |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.agent.netio.request.handle;
import java.io.File;
import java.io.FileFilter;
import java.io.RandomAccessFile;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.management.MBeanServer;
import scouter.agent.netio.request.anotation.RequestHandler;
import scouter.io.DataInputX;
import scouter.io.DataOutputX;
import scouter.lang.pack.MapPack;
import scouter.lang.pack.Pack;
import scouter.lang.value.BooleanValue;
import scouter.lang.value.ListValue;
import scouter.net.RequestCmd;
import scouter.net.TcpFlag;
import scouter.util.DateUtil;
import scouter.util.SystemUtil;
public class AgentHeapDump {
private String folderName = "heapdump";
private String fileExt = ".hprof";
private long lastCallTime = 0;
@RequestHandler(RequestCmd.OBJECT_CALL_HEAP_DUMP)
public Pack callHeapDump(Pack param) {
long curTime = System.currentTimeMillis();
if (curTime - lastCallTime < 10000) {
MapPack p = new MapPack();
p.put("success", new BooleanValue(false));
p.put("msg", "please wait 10 sec. from last request...");
return p;
}
lastCallTime = curTime;
long time = ((MapPack) param).getLong("time");
String yyyymmdd = DateUtil.yyyymmdd(time);
String hhmmss = DateUtil.hhmmss(time);
String fName = yyyymmdd + "-" + hhmmss + ((MapPack) param).getText("fName").replaceAll("/", "_") + fileExt;
File hprofDir = new File(folderName);
if (hprofDir.exists() == false) {
hprofDir.mkdirs();
}
File hprofFile = new File(hprofDir, fName);
if (hprofFile.exists()) {
hprofFile.delete();
}
// version check.
if (SystemUtil.IS_JAVA_1_5) {
MapPack p = new MapPack();
p.put("success", new BooleanValue(false));
p.put("msg", "dumpHeap only works on a Sun Java 1.6+ VM");
return p;
}
String error = dumpHeap(folderName + "/" + fName);
if (error == null) {
MapPack p = new MapPack();
p.put("success", new BooleanValue(true));
p.put("msg", "Successfully request heap dump...");
return p;
} else {
MapPack p = new MapPack();
p.put("success", new BooleanValue(false));
p.put("msg", error);
return p;
}
// HotSpotDiagnosticMXBean hdm =
// sun.management.ManagementFactory.getDiagnosticMXBean();
// try {
// hdm.dumpHeap(folderName+"/"+fName, true);
//
// MapPack p = new MapPack();
// p.put("success", new BooleanValue(true));
// p.put("msg", "Successfully request heap dump...");
//
// return p;
//
// } catch (IOException e) {
// MapPack p = new MapPack();
// p.put("success", new BooleanValue(false));
// p.put("msg", e.getMessage());
//
// return p;
// }
}
public String dumpHeap(String fileName) {
Class clazz;
try {
clazz = Class.forName("com.sun.management.HotSpotDiagnosticMXBean");
} catch (ClassNotFoundException e) {
return "ERROR: dumpHeap only works on a Sun Java 1.6+ VM containing "
+ "the class com.sun.management.HotSpotDiagnosticMXBean";
}
Object hotspotMBean = null;
try {
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
hotspotMBean = ManagementFactory.newPlatformMXBeanProxy(server,
"com.sun.management:type=HotSpotDiagnostic", clazz);
} catch (Throwable e) {
return "ERROR: dumpHeap was unable to obtain the HotSpotDiagnosticMXBean: " + e.getMessage();
}
try {
Method method = hotspotMBean.getClass().getMethod("dumpHeap", String.class, Boolean.TYPE);
method.invoke(hotspotMBean, fileName, true);
} catch (InvocationTargetException e) {
Throwable t = e.getCause() != null ? e.getCause() : e;
return "ERROR: dumpHeap threw a InvocationTargetException: " + t;
} catch (Throwable e) {
return "ERROR: dumpHeap threw a Throwable: " + e;
}
return null;
}
@RequestHandler(RequestCmd.OBJECT_DELETE_HEAP_DUMP)
public Pack deleteHeapDump(Pack param) {
File delFile = new File(folderName + "/" + ((MapPack) param).getText("delfileName"));
if (delFile.exists() == false) {
MapPack p = new MapPack();
p.put("success", new BooleanValue(false));
p.put("msg", "file \'" + delFile.getName() + "\' is not exist...");
return p;
} else {
delFile.delete();
MapPack p = new MapPack();
p.put("success", new BooleanValue(true));
p.put("msg", "Successfully deleted...");
return p;
}
}
@RequestHandler(RequestCmd.OBJECT_LIST_HEAP_DUMP)
public Pack listHeapDump(Pack param) {
MapPack p = new MapPack();
ListValue nameLv = p.newList("name");
ListValue sizeLv = p.newList("size");
File hprofDir = new File(folderName);
if (hprofDir.exists() == false) {
return null;
}
File[] fileList = hprofDir.listFiles(new ContentFilter());
if (fileList != null && fileList.length > 0) {
for (int i = 0; i < fileList.length; i++) {
File f = fileList[i];
if (/* f.isFile() && */f.getName().endsWith(fileExt)) {
nameLv.add(f.getName());
sizeLv.add(f.length());
}
}
return p;
}
return p;
}
public class ContentFilter implements FileFilter {
public boolean accept(File file) {
return file.isFile() || !file.getName().startsWith(".");
}
}
@RequestHandler(RequestCmd.OBJECT_DOWNLOAD_HEAP_DUMP)
public Pack downloadHeapDump(Pack param, DataInputX in, DataOutputX out) {
int buff = 2 * 1024 * 1024;
File downloadFile = new File(folderName + "/" + ((MapPack) param).getText("fileName"));
try {
RandomAccessFile raf = new RandomAccessFile(downloadFile, "r");
byte[] buffer = new byte[buff];
int read = 0;
long offset = downloadFile.length();
int unitsize;
while (read < offset) {
unitsize = (int) (((offset - read) >= buff) ? buff : (offset - read));
raf.read(buffer, 0, unitsize);
out.writeByte(TcpFlag.HasNEXT);
out.writeBlob(buffer, 0, unitsize);
read += unitsize;
}
raf.close();
return null;
} catch (Throwable e) {
e.printStackTrace();
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.jdbc.thin;
import java.io.Serializable;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Properties;
import java.util.StringTokenizer;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.configuration.ClientConnectorConfiguration;
import org.apache.ignite.internal.processors.odbc.SqlStateCode;
import org.apache.ignite.internal.processors.query.NestedTxMode;
import org.apache.ignite.internal.util.HostAndPortRange;
import org.apache.ignite.internal.util.typedef.F;
import org.jetbrains.annotations.Nullable;
/**
* Holds JDBC connection properties.
*/
public class ConnectionPropertiesImpl implements ConnectionProperties, Serializable {
/** */
private static final long serialVersionUID = 0L;
/** Prefix for property names. */
public static final String PROP_PREFIX = "ignite.jdbc.";
/** Default socket buffer size. */
private static final int DFLT_SOCK_BUFFER_SIZE = 64 * 1024;
/** Property: schema. */
private static final String PROP_SCHEMA = "schema";
/** Connection URL. */
private String url;
/** Addresses. */
private HostAndPortRange [] addrs;
/** Schema name. Hidden property. Is used to set default schema name part of the URL. */
private StringProperty schema = new StringProperty(PROP_SCHEMA,
"Schema name of the connection", "PUBLIC", null, false, null);
/** Distributed joins property. */
private BooleanProperty distributedJoins = new BooleanProperty(
"distributedJoins", "Enable distributed joins", false, false);
/** Enforce join order property. */
private BooleanProperty enforceJoinOrder = new BooleanProperty(
"enforceJoinOrder", "Enable enforce join order", false, false);
/** Collocated property. */
private BooleanProperty collocated = new BooleanProperty(
"collocated", "Enable collocated query", false, false);
/** Replicated only property. */
private BooleanProperty replicatedOnly = new BooleanProperty(
"replicatedOnly", "Specify if the all queries contain only replicated tables", false, false);
/** Auto close server cursor property. */
private BooleanProperty autoCloseServerCursor = new BooleanProperty(
"autoCloseServerCursor", "Enable auto close server cursors when last piece of result set is retrieved. " +
"If the server-side cursor is already closed, you may get an exception when trying to call " +
"`ResultSet.getMetadata()` method.", false, false);
/** TCP no delay property. */
private BooleanProperty tcpNoDelay = new BooleanProperty(
"tcpNoDelay", "TCP no delay flag", true, false);
/** Lazy query execution property. */
private BooleanProperty lazy = new BooleanProperty(
"lazy", "Enable lazy query execution", false, false);
/** Socket send buffer size property. */
private IntegerProperty socketSendBuffer = new IntegerProperty(
"socketSendBuffer", "Socket send buffer size",
DFLT_SOCK_BUFFER_SIZE, false, 0, Integer.MAX_VALUE);
/** Socket receive buffer size property. */
private IntegerProperty socketReceiveBuffer = new IntegerProperty(
"socketReceiveBuffer", "Socket send buffer size",
DFLT_SOCK_BUFFER_SIZE, false, 0, Integer.MAX_VALUE);
/** Executes update queries on ignite server nodes flag. */
private BooleanProperty skipReducerOnUpdate = new BooleanProperty(
"skipReducerOnUpdate", "Enable execution update queries on ignite server nodes", false, false);
/** Nested transactions handling strategy. */
private StringProperty nestedTxMode = new StringProperty(
"nestedTransactionsMode", "Way to handle nested transactions", NestedTxMode.ERROR.name(),
new String[] { NestedTxMode.COMMIT.name(), NestedTxMode.ERROR.name(), NestedTxMode.IGNORE.name() },
false, new PropertyValidator() {
private static final long serialVersionUID = 0L;
@Override public void validate(String mode) throws SQLException {
if (!F.isEmpty(mode)) {
try {
NestedTxMode.valueOf(mode.toUpperCase());
}
catch (IllegalArgumentException e) {
throw new SQLException("Invalid nested transactions handling mode, allowed values: " +
Arrays.toString(nestedTxMode.choices), SqlStateCode.CLIENT_CONNECTION_FAILED);
}
}
}
});
/** SSL: Use SSL connection to Ignite node. */
private StringProperty sslMode = new StringProperty("sslMode",
"The SSL mode of the connection", SSL_MODE_DISABLE,
new String[] {SSL_MODE_DISABLE, SSL_MODE_REQUIRE}, false, null);
/** SSL: Client certificate key store url. */
private StringProperty sslProtocol = new StringProperty("sslProtocol",
"SSL protocol name", null, null, false, null);
/** SSL: Supported SSL cipher suites. */
private StringProperty sslCipherSuites = new StringProperty("sslCipherSuites",
"Supported SSL ciphers", null,
null, false, null);
/** SSL: Key algorithm name. */
private StringProperty sslKeyAlgorithm = new StringProperty("sslKeyAlgorithm",
"SSL key algorithm name", "SunX509", null, false, null);
/** SSL: Client certificate key store url. */
private StringProperty sslClientCertificateKeyStoreUrl =
new StringProperty("sslClientCertificateKeyStoreUrl",
"Client certificate key store URL",
null, null, false, null);
/** SSL: Client certificate key store password. */
private StringProperty sslClientCertificateKeyStorePassword =
new StringProperty("sslClientCertificateKeyStorePassword",
"Client certificate key store password",
null, null, false, null);
/** SSL: Client certificate key store type. */
private StringProperty sslClientCertificateKeyStoreType =
new StringProperty("sslClientCertificateKeyStoreType",
"Client certificate key store type",
null, null, false, null);
/** SSL: Trusted certificate key store url. */
private StringProperty sslTrustCertificateKeyStoreUrl =
new StringProperty("sslTrustCertificateKeyStoreUrl",
"Trusted certificate key store URL", null, null, false, null);
/** SSL Trusted certificate key store password. */
private StringProperty sslTrustCertificateKeyStorePassword =
new StringProperty("sslTrustCertificateKeyStorePassword",
"Trusted certificate key store password", null, null, false, null);
/** SSL: Trusted certificate key store type. */
private StringProperty sslTrustCertificateKeyStoreType =
new StringProperty("sslTrustCertificateKeyStoreType",
"Trusted certificate key store type",
null, null, false, null);
/** SSL: Trust all certificates. */
private BooleanProperty sslTrustAll = new BooleanProperty("sslTrustAll",
"Trust all certificates", false, false);
/** SSL: Custom class name that implements Factory<SSLSocketFactory>. */
private StringProperty sslFactory = new StringProperty("sslFactory",
"Custom class name that implements Factory<SSLSocketFactory>", null, null, false, null);
/** User name to authenticate the client on the server side. */
private StringProperty user = new StringProperty(
"user", "User name to authenticate the client on the server side", null, null, false, null);
/** User's password. */
private StringProperty passwd = new StringProperty(
"password", "User's password", null, null, false, null);
/** Data page scan flag. */
private BooleanProperty dataPageScanEnabled = new BooleanProperty("dataPageScanEnabled",
"Whether data page scan for queries is allowed. If not specified, server defines the default behaviour.",
null, false);
/** Partition awareness flag. */
private BooleanProperty partitionAwareness = new BooleanProperty(
"partitionAwareness",
"Whether jdbc thin partition awareness is enabled.",
false, false);
/** Update batch size (the size of internal batches are used for INSERT/UPDATE/DELETE operation). */
private IntegerProperty updateBatchSize = new IntegerProperty("updateBatchSize",
"Update bach size (the size of internal batches are used for INSERT/UPDATE/DELETE operation). " +
"Set to 1 to prevent deadlock on update where keys sequence are different " +
"in several concurrent updates.", null, false, 1, Integer.MAX_VALUE);
/** Partition awareness SQL cache size. */
private IntegerProperty partitionAwarenessSQLCacheSize = new IntegerProperty("partitionAwarenessSQLCacheSize",
"The size of sql cache that is used within partition awareness optimization.",
1_000, false, 1, Integer.MAX_VALUE);
/** Partition awareness partition distributions cache size. */
private IntegerProperty partitionAwarenessPartDistributionsCacheSize = new IntegerProperty(
"partitionAwarenessPartitionDistributionsCacheSize",
"The size of partition distributions cache that is used within partition awareness optimization.",
1_000, false, 1, Integer.MAX_VALUE);
/** Query timeout. */
private IntegerProperty qryTimeout = new IntegerProperty("queryTimeout",
"Sets the number of seconds the driver will wait for a <code>Statement</code> object to execute." +
" Zero means there is no limits.",
0, false, 0, Integer.MAX_VALUE);
/** JDBC connection timeout. */
private IntegerProperty connTimeout = new IntegerProperty("connectionTimeout",
"Sets the number of milliseconds JDBC client will waits for server to response." +
" Zero means there is no limits.",
0, false, 0, Integer.MAX_VALUE);
/** Properties array. */
private final ConnectionProperty [] propsArray = {
distributedJoins, enforceJoinOrder, collocated, replicatedOnly, autoCloseServerCursor,
tcpNoDelay, lazy, socketSendBuffer, socketReceiveBuffer, skipReducerOnUpdate, nestedTxMode,
sslMode, sslCipherSuites, sslProtocol, sslKeyAlgorithm,
sslClientCertificateKeyStoreUrl, sslClientCertificateKeyStorePassword, sslClientCertificateKeyStoreType,
sslTrustCertificateKeyStoreUrl, sslTrustCertificateKeyStorePassword, sslTrustCertificateKeyStoreType,
sslTrustAll, sslFactory,
user, passwd,
dataPageScanEnabled,
partitionAwareness,
updateBatchSize,
partitionAwarenessSQLCacheSize,
partitionAwarenessPartDistributionsCacheSize,
qryTimeout,
connTimeout
};
/** {@inheritDoc} */
@Override public String getSchema() {
return schema.value();
}
/** {@inheritDoc} */
@Override public void setSchema(String schema) {
this.schema.setValue(schema);
}
/** {@inheritDoc} */
@Override public String getUrl() {
if (url != null)
return url;
else {
if (F.isEmpty(getAddresses()))
return null;
StringBuilder sbUrl = new StringBuilder(JdbcThinUtils.URL_PREFIX);
HostAndPortRange [] addrs = getAddresses();
for (int i = 0; i < addrs.length; i++) {
if (i > 0)
sbUrl.append(',');
sbUrl.append(addrs[i].toString());
}
if (!F.isEmpty(getSchema()))
sbUrl.append('/').append(getSchema());
return sbUrl.toString();
}
}
/** {@inheritDoc} */
@Override public void setUrl(String url) throws SQLException {
this.url = url;
init(url, new Properties());
}
/** {@inheritDoc} */
@Override public HostAndPortRange[] getAddresses() {
return addrs;
}
/** {@inheritDoc} */
@Override public void setAddresses(HostAndPortRange[] addrs) {
this.addrs = addrs;
}
/** {@inheritDoc} */
@Override public boolean isDistributedJoins() {
return distributedJoins.value();
}
/** {@inheritDoc} */
@Override public void setDistributedJoins(boolean val) {
distributedJoins.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isEnforceJoinOrder() {
return enforceJoinOrder.value();
}
/** {@inheritDoc} */
@Override public void setEnforceJoinOrder(boolean val) {
enforceJoinOrder.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isCollocated() {
return collocated.value();
}
/** {@inheritDoc} */
@Override public void setCollocated(boolean val) {
collocated.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isReplicatedOnly() {
return replicatedOnly.value();
}
/** {@inheritDoc} */
@Override public void setReplicatedOnly(boolean val) {
replicatedOnly.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isAutoCloseServerCursor() {
return autoCloseServerCursor.value();
}
/** {@inheritDoc} */
@Override public void setAutoCloseServerCursor(boolean val) {
autoCloseServerCursor.setValue(val);
}
/** {@inheritDoc} */
@Override public int getSocketSendBuffer() {
return socketSendBuffer.value();
}
/** {@inheritDoc} */
@Override public void setSocketSendBuffer(int size) throws SQLException {
socketSendBuffer.setValue(size);
}
/** {@inheritDoc} */
@Override public int getSocketReceiveBuffer() {
return socketReceiveBuffer.value();
}
/** {@inheritDoc} */
@Override public void setSocketReceiveBuffer(int size) throws SQLException {
socketReceiveBuffer.setValue(size);
}
/** {@inheritDoc} */
@Override public boolean isTcpNoDelay() {
return tcpNoDelay.value();
}
/** {@inheritDoc} */
@Override public void setTcpNoDelay(boolean val) {
tcpNoDelay.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isLazy() {
return lazy.value();
}
/** {@inheritDoc} */
@Override public void setLazy(boolean val) {
lazy.setValue(val);
}
/** {@inheritDoc} */
@Override public boolean isSkipReducerOnUpdate() {
return skipReducerOnUpdate.value();
}
/** {@inheritDoc} */
@Override public void setSkipReducerOnUpdate(boolean val) {
skipReducerOnUpdate.setValue(val);
}
/** {@inheritDoc} */
@Override public String getSslMode() {
return sslMode.value();
}
/** {@inheritDoc} */
@Override public void setSslMode(String mode) {
sslMode.setValue(mode);
}
/** {@inheritDoc} */
@Override public String getSslProtocol() {
return sslProtocol.value();
}
/** {@inheritDoc} */
@Override public void setSslProtocol(String sslProtocol) {
this.sslProtocol.setValue(sslProtocol);
}
/** {@inheritDoc} */
@Override public String getSslCipherSuites() {
return sslCipherSuites.value();
}
/** {@inheritDoc} */
@Override public void setSslCipherSuites(String sslCipherSuites) {
this.sslCipherSuites.setValue(sslCipherSuites);
}
/** {@inheritDoc} */
@Override public String getSslKeyAlgorithm() {
return sslKeyAlgorithm.value();
}
/** {@inheritDoc} */
@Override public void setSslKeyAlgorithm(String keyAlgorithm) {
sslKeyAlgorithm.setValue(keyAlgorithm);
}
/** {@inheritDoc} */
@Override public String getSslClientCertificateKeyStoreUrl() {
return sslClientCertificateKeyStoreUrl.value();
}
/** {@inheritDoc} */
@Override public void setSslClientCertificateKeyStoreUrl(String url) {
sslClientCertificateKeyStoreUrl.setValue(url);
}
/** {@inheritDoc} */
@Override public String getSslClientCertificateKeyStorePassword() {
return sslClientCertificateKeyStorePassword.value();
}
/** {@inheritDoc} */
@Override public void setSslClientCertificateKeyStorePassword(String passwd) {
sslClientCertificateKeyStorePassword.setValue(passwd);
}
/** {@inheritDoc} */
@Override public String getSslClientCertificateKeyStoreType() {
return sslClientCertificateKeyStoreType.value();
}
/** {@inheritDoc} */
@Override public void setSslClientCertificateKeyStoreType(String ksType) {
sslClientCertificateKeyStoreType.setValue(ksType);
}
/** {@inheritDoc} */
@Override public String getSslTrustCertificateKeyStoreUrl() {
return sslTrustCertificateKeyStoreUrl.value();
}
/** {@inheritDoc} */
@Override public void setSslTrustCertificateKeyStoreUrl(String url) {
sslTrustCertificateKeyStoreUrl.setValue(url);
}
/** {@inheritDoc} */
@Override public String getSslTrustCertificateKeyStorePassword() {
return sslTrustCertificateKeyStorePassword.value();
}
/** {@inheritDoc} */
@Override public void setSslTrustCertificateKeyStorePassword(String passwd) {
sslTrustCertificateKeyStorePassword.setValue(passwd);
}
/** {@inheritDoc} */
@Override public String getSslTrustCertificateKeyStoreType() {
return sslTrustCertificateKeyStoreType.value();
}
/** {@inheritDoc} */
@Override public void setSslTrustCertificateKeyStoreType(String ksType) {
sslTrustCertificateKeyStoreType.setValue(ksType);
}
/** {@inheritDoc} */
@Override public boolean isSslTrustAll() {
return sslTrustAll.value();
}
/** {@inheritDoc} */
@Override public void setSslTrustAll(boolean trustAll) {
this.sslTrustAll.setValue(trustAll);
}
/** {@inheritDoc} */
@Override public String getSslFactory() {
return sslFactory.value();
}
/** {@inheritDoc} */
@Override public void setSslFactory(String sslFactory) {
this.sslFactory.setValue(sslFactory);
}
/** {@inheritDoc} */
@Override public String nestedTxMode() {
return nestedTxMode.value();
}
/** {@inheritDoc} */
@Override public void nestedTxMode(String val) {
nestedTxMode.setValue(val);
}
/** {@inheritDoc} */
@Override public void setUsername(String name) {
user.setValue(name);
}
/** {@inheritDoc} */
@Override public String getUsername() {
return user.value();
}
/** {@inheritDoc} */
@Override public void setPassword(String passwd) {
this.passwd.setValue(passwd);
}
/** {@inheritDoc} */
@Override public String getPassword() {
return passwd.value();
}
/** {@inheritDoc} */
@Override public @Nullable Boolean isDataPageScanEnabled() {
return dataPageScanEnabled.value();
}
/** {@inheritDoc} */
@Override public void setDataPageScanEnabled(@Nullable Boolean dataPageScanEnabled) {
this.dataPageScanEnabled.setValue(dataPageScanEnabled);
}
/** {@inheritDoc} */
@Override public boolean isPartitionAwareness() {
return partitionAwareness.value();
}
/** {@inheritDoc} */
@Override public void setPartitionAwareness(boolean partitionAwareness) {
this.partitionAwareness.setValue(partitionAwareness);
}
/** {@inheritDoc} */
@Override public @Nullable Integer getUpdateBatchSize() {
return updateBatchSize.value();
}
/** {@inheritDoc} */
@Override public void setUpdateBatchSize(@Nullable Integer updateBatchSize) throws SQLException {
this.updateBatchSize.setValue(updateBatchSize);
}
/** {@inheritDoc} */
@Override public int getPartitionAwarenessSqlCacheSize() {
return partitionAwarenessSQLCacheSize.value();
}
/** {@inheritDoc} */
@Override public void setPartitionAwarenessSqlCacheSize(int partitionAwarenessSqlCacheSize)
throws SQLException {
this.partitionAwarenessSQLCacheSize.setValue(partitionAwarenessSqlCacheSize);
}
/** {@inheritDoc} */
@Override public int getPartitionAwarenessPartitionDistributionsCacheSize() {
return partitionAwarenessPartDistributionsCacheSize.value();
}
/** {@inheritDoc} */
@Override public void setPartitionAwarenessPartitionDistributionsCacheSize(
int partitionAwarenessPartDistributionsCacheSize) throws SQLException {
this.partitionAwarenessPartDistributionsCacheSize.setValue(
partitionAwarenessPartDistributionsCacheSize);
}
/** {@inheritDoc} */
@Override public Integer getQueryTimeout() {
return qryTimeout.value();
}
/** {@inheritDoc} */
@Override public void setQueryTimeout(@Nullable Integer timeout) throws SQLException {
qryTimeout.setValue(timeout);
}
/** {@inheritDoc} */
@Override public int getConnectionTimeout() {
return connTimeout.value();
}
/** {@inheritDoc} */
@Override public void setConnectionTimeout(@Nullable Integer timeout) throws SQLException {
connTimeout.setValue(timeout);
}
/**
* @param url URL connection.
* @param props Environment properties.
* @throws SQLException On error.
*/
public void init(String url, Properties props) throws SQLException {
Properties props0 = (Properties)props.clone();
if (!F.isEmpty(url))
parseUrl(url, props0);
for (ConnectionProperty aPropsArray : propsArray)
aPropsArray.init(props0);
if (!F.isEmpty(props.getProperty("user"))) {
setUsername(props.getProperty("user"));
setPassword(props.getProperty("password"));
}
}
/**
* Validates and parses connection URL.
*
* @param url URL.
* @param props Properties.
* @throws SQLException On error.
*/
private void parseUrl(String url, Properties props) throws SQLException {
if (F.isEmpty(url))
throw new SQLException("URL cannot be null or empty.");
if (!url.startsWith(JdbcThinUtils.URL_PREFIX))
throw new SQLException("URL must start with \"" + JdbcThinUtils.URL_PREFIX + "\"");
String nakedUrl = url.substring(JdbcThinUtils.URL_PREFIX.length()).trim();
parseUrl0(nakedUrl, props);
}
/**
* Parse naked URL (i.e. without {@link JdbcThinUtils#URL_PREFIX}).
*
* @param url Naked URL.
* @param props Properties.
* @throws SQLException If failed.
*/
private void parseUrl0(String url, Properties props) throws SQLException {
// Determine mode - semicolon or ampersand.
int semicolonPos = url.indexOf(";");
int slashPos = url.indexOf("/");
int queryPos = url.indexOf("?");
boolean semicolonMode;
if (semicolonPos == -1 && slashPos == -1 && queryPos == -1)
// No special char -> any mode could be used, choose semicolon for simplicity.
semicolonMode = true;
else {
if (semicolonPos != -1) {
// Use semicolon mode if it appears earlier than slash or query.
semicolonMode =
(slashPos == -1 || semicolonPos < slashPos) && (queryPos == -1 || semicolonPos < queryPos);
}
else
// Semicolon is not found.
semicolonMode = false;
}
if (semicolonMode)
parseUrlWithSemicolon(url, props);
else
parseUrlWithQuery(url, props);
}
/**
* Parse URL in semicolon mode.
*
* @param url Naked URL
* @param props Properties.
* @throws SQLException If failed.
*/
private void parseUrlWithSemicolon(String url, Properties props) throws SQLException {
int pathPartEndPos = url.indexOf(';');
if (pathPartEndPos == -1)
pathPartEndPos = url.length();
String pathPart = url.substring(0, pathPartEndPos);
String paramPart = null;
if (pathPartEndPos > 0 && pathPartEndPos < url.length())
paramPart = url.substring(pathPartEndPos + 1, url.length());
parseEndpoints(pathPart);
if (!F.isEmpty(paramPart))
parseParameters(paramPart, props, ";");
}
/**
* Parse URL in query mode.
*
* @param url Naked URL
* @param props Properties.
* @throws SQLException If failed.
*/
private void parseUrlWithQuery(String url, Properties props) throws SQLException {
int pathPartEndPos = url.indexOf('?');
if (pathPartEndPos == -1)
pathPartEndPos = url.length();
String pathPart = url.substring(0, pathPartEndPos);
String paramPart = null;
if (pathPartEndPos > 0 && pathPartEndPos < url.length())
paramPart = url.substring(pathPartEndPos + 1, url.length());
String[] pathParts = pathPart.split("/");
parseEndpoints(pathParts[0]);
if (pathParts.length > 2) {
throw new SQLException("Invalid URL format (only schema name is allowed in URL path parameter " +
"'host:port[/schemaName]'): " + this.url, SqlStateCode.CLIENT_CONNECTION_FAILED);
}
setSchema(pathParts.length == 2 ? pathParts[1] : null);
if (!F.isEmpty(paramPart))
parseParameters(paramPart, props, "&");
}
/**
* Parse endpoints.
*
* @param endpointStr Endpoint string.
* @throws SQLException If failed.
*/
private void parseEndpoints(String endpointStr) throws SQLException {
String [] endpoints = endpointStr.split(",");
if (endpoints.length > 0)
addrs = new HostAndPortRange[endpoints.length];
for (int i = 0; i < endpoints.length; ++i ) {
try {
addrs[i] = HostAndPortRange.parse(endpoints[i],
ClientConnectorConfiguration.DFLT_PORT, ClientConnectorConfiguration.DFLT_PORT,
"Invalid endpoint format (should be \"host[:portRangeFrom[..portRangeTo]]\")");
}
catch (IgniteCheckedException e) {
throw new SQLException(e.getMessage(), SqlStateCode.CLIENT_CONNECTION_FAILED, e);
}
}
if (F.isEmpty(addrs) || F.isEmpty(addrs[0].host()))
throw new SQLException("Host name is empty", SqlStateCode.CLIENT_CONNECTION_FAILED);
}
/**
* Validates and parses URL parameters.
*
* @param paramStr Parameters string.
* @param props Properties.
* @param delimChar Delimiter character.
* @throws SQLException If failed.
*/
private void parseParameters(String paramStr, Properties props, String delimChar) throws SQLException {
StringTokenizer st = new StringTokenizer(paramStr, delimChar);
boolean insideBrace = false;
String key = null;
String val = null;
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (!insideBrace) {
int eqSymPos = token.indexOf('=');
if (eqSymPos < 0) {
throw new SQLException("Invalid parameter format (should be \"key1=val1" + delimChar +
"key2=val2" + delimChar + "...\"): " + token);
}
if (eqSymPos == token.length())
throw new SQLException("Invalid parameter format (key and value cannot be empty): " + token);
key = token.substring(0, eqSymPos);
val = token.substring(eqSymPos + 1, token.length());
if (val.startsWith("{")) {
val = val.substring(1);
insideBrace = true;
}
}
else
val += delimChar + token;
if (val.endsWith("}")) {
insideBrace = false;
val = val.substring(0, val.length() - 1);
}
if (val.contains("{") || val.contains("}")) {
throw new SQLException("Braces cannot be escaped in the value. " +
"Please use the connection Properties for such values. [property=" + key + ']');
}
if (!insideBrace) {
if (key.isEmpty() || val.isEmpty())
throw new SQLException("Invalid parameter format (key and value cannot be empty): " + token);
if (PROP_SCHEMA.equalsIgnoreCase(key))
setSchema(val);
else
props.setProperty(PROP_PREFIX + key, val);
}
}
}
/**
* @return Driver's properties info array.
*/
public DriverPropertyInfo[] getDriverPropertyInfo() {
DriverPropertyInfo[] infos = new DriverPropertyInfo[propsArray.length];
for (int i = 0; i < propsArray.length; ++i)
infos[i] = propsArray[i].getDriverPropertyInfo();
return infos;
}
/**
* @return Properties set contains connection parameters.
*/
public Properties storeToProperties() {
Properties props = new Properties();
for (ConnectionProperty prop : propsArray) {
if (prop.valueObject() != null)
props.setProperty(PROP_PREFIX + prop.getName(), prop.valueObject());
}
return props;
}
/**
*
*/
private interface PropertyValidator extends Serializable {
/**
* @param val String representation of the property value to validate.
* @throws SQLException On validation fails.
*/
void validate(String val) throws SQLException;
}
/**
*
*/
private abstract static class ConnectionProperty implements Serializable {
/** */
private static final long serialVersionUID = 0L;
/** Name. */
protected String name;
/** Property description. */
protected String desc;
/** Default value. */
protected Object dfltVal;
/**
* An array of possible values if the value may be selected
* from a particular set of values; otherwise null.
*/
protected String [] choices;
/** Required flag. */
protected boolean required;
/** Property validator. */
protected PropertyValidator validator;
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param choices Possible values.
* @param required {@code true} if the property is required.
*/
ConnectionProperty(String name, String desc, Object dfltVal, String[] choices, boolean required) {
this.name = name;
this.desc = desc;
this.dfltVal = dfltVal;
this.choices = choices;
this.required = required;
}
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param choices Possible values.
* @param required {@code true} if the property is required.
* @param validator Property validator.
*/
ConnectionProperty(String name, String desc, Object dfltVal, String[] choices, boolean required,
PropertyValidator validator) {
this.name = name;
this.desc = desc;
this.dfltVal = dfltVal;
this.choices = choices;
this.required = required;
this.validator = validator;
}
/**
* @return Default value.
*/
Object getDfltVal() {
return dfltVal;
}
/**
* @return Property name.
*/
String getName() {
return name;
}
/**
* @return Array of possible values if the value may be selected
* from a particular set of values; otherwise null
*/
String[] choices() {
return choices;
}
/**
* @param props Properties.
* @throws SQLException On error.
*/
void init(Properties props) throws SQLException {
String strVal = props.getProperty(PROP_PREFIX + name);
if (required && strVal == null) {
throw new SQLException("Property '" + name + "' is required but not defined",
SqlStateCode.CLIENT_CONNECTION_FAILED);
}
if (validator != null)
validator.validate(strVal);
checkChoices(strVal);
props.remove(name);
init(strVal);
}
/**
* @param strVal Checked value.
* @throws SQLException On check error.
*/
protected void checkChoices(String strVal) throws SQLException {
if (strVal == null)
return;
if (choices != null) {
for (String ch : choices) {
if (ch.equalsIgnoreCase(strVal))
return;
}
throw new SQLException("Invalid property value. [name=" + name + ", val=" + strVal
+ ", choices=" + Arrays.toString(choices) + ']', SqlStateCode.CLIENT_CONNECTION_FAILED);
}
}
/**
* @param str String representation of the
* @throws SQLException on error.
*/
abstract void init(String str) throws SQLException;
/**
* @return String representation of the property value.
*/
abstract String valueObject();
/**
* @return JDBC property info object.
*/
DriverPropertyInfo getDriverPropertyInfo() {
DriverPropertyInfo dpi = new DriverPropertyInfo(name, valueObject());
dpi.choices = choices();
dpi.required = required;
dpi.description = desc;
return dpi;
}
}
/**
*
*/
private static class BooleanProperty extends ConnectionProperty {
/** */
private static final long serialVersionUID = 0L;
/** Bool choices. */
private static final String [] boolChoices = new String[] {Boolean.TRUE.toString(), Boolean.FALSE.toString()};
/** Value. */
private Boolean val;
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param required {@code true} if the property is required.
*/
BooleanProperty(String name, String desc, @Nullable Boolean dfltVal, boolean required) {
super(name, desc, dfltVal, boolChoices, required);
val = dfltVal;
}
/**
* @return Property value.
*/
@Nullable Boolean value() {
return val;
}
/** {@inheritDoc} */
@Override void init(String str) throws SQLException {
if (str == null)
val = (Boolean)dfltVal;
else {
if (Boolean.TRUE.toString().equalsIgnoreCase(str))
val = true;
else if (Boolean.FALSE.toString().equalsIgnoreCase(str))
val = false;
else
throw new SQLException("Failed to parse boolean property [name=" + name +
", value=" + str + ']', SqlStateCode.CLIENT_CONNECTION_FAILED);
}
}
/** {@inheritDoc} */
@Override String valueObject() {
if (val == null)
return null;
return Boolean.toString(val);
}
/**
* @param val Property value to set.
*/
void setValue(Boolean val) {
this.val = val;
}
}
/**
*
*/
private abstract static class NumberProperty extends ConnectionProperty {
/** */
private static final long serialVersionUID = 0L;
/** Value. */
protected Number val;
/** Allowed value range. */
private Number [] range;
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param required {@code true} if the property is required.
* @param min Lower bound of allowed range.
* @param max Upper bound of allowed range.
*/
NumberProperty(String name, String desc, Number dfltVal, boolean required, Number min, Number max) {
super(name, desc, dfltVal, null, required);
val = dfltVal;
range = new Number[] {min, max};
}
/** {@inheritDoc} */
@Override void init(String str) throws SQLException {
if (str == null)
val = dfltVal != null ? (Number)dfltVal : null;
else {
try {
setValue(parse(str));
}
catch (NumberFormatException e) {
throw new SQLException("Failed to parse int property [name=" + name +
", value=" + str + ']', SqlStateCode.CLIENT_CONNECTION_FAILED);
}
}
}
/**
* @param str String value.
* @return Number value.
* @throws NumberFormatException On parse error.
*/
protected abstract Number parse(String str) throws NumberFormatException;
/** {@inheritDoc} */
@Override String valueObject() {
return val != null ? String.valueOf(val) : null;
}
/**
* @param val Property value.
* @throws SQLException On error.
*/
void setValue(Number val) throws SQLException {
if (range != null) {
if (val.doubleValue() < range[0].doubleValue()) {
throw new SQLException("Property cannot be lower than " + range[0].toString() + " [name=" + name +
", value=" + val.toString() + ']', SqlStateCode.CLIENT_CONNECTION_FAILED);
}
if (val.doubleValue() > range[1].doubleValue()) {
throw new SQLException("Property cannot be upper than " + range[1].toString() + " [name=" + name +
", value=" + val.toString() + ']', SqlStateCode.CLIENT_CONNECTION_FAILED);
}
}
this.val = val;
}
}
/**
*
*/
private static class IntegerProperty extends NumberProperty {
/** */
private static final long serialVersionUID = 0L;
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param required {@code true} if the property is required.
* @param min Lower bound of allowed range.
* @param max Upper bound of allowed range.
*/
IntegerProperty(String name, String desc, Number dfltVal, boolean required, int min, int max) {
super(name, desc, dfltVal, required, min, max);
}
/** {@inheritDoc} */
@Override protected Number parse(String str) throws NumberFormatException {
return Integer.parseInt(str);
}
/**
* @return Property value.
*/
Integer value() {
return val != null ? val.intValue() : null;
}
}
/**
*
*/
private static class StringProperty extends ConnectionProperty {
/** */
private static final long serialVersionUID = 0L;
/** Value */
private String val;
/**
* @param name Name.
* @param desc Description.
* @param dfltVal Default value.
* @param choices Possible values.
* @param required {@code true} if the property is required.
* @param validator Property value validator.
*/
StringProperty(String name, String desc, String dfltVal, String[] choices, boolean required,
PropertyValidator validator) {
super(name, desc, dfltVal, choices, required, validator);
val = dfltVal;
}
/**
* @param val Property value.
*/
void setValue(String val) {
this.val = val;
}
/**
* @return Property value.
*/
String value() {
return val;
}
/** {@inheritDoc} */
@Override void init(String str) throws SQLException {
if (validator != null)
validator.validate(str);
if (str == null)
val = (String)dfltVal;
else
val = str;
}
/** {@inheritDoc} */
@Override String valueObject() {
return val;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.proxy.soap.authenticationAdmin;
import org.apache.axis2.AxisFault;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.ServiceClient;
import org.apache.axis2.client.Stub;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.axis2.description.TransportOutDescription;
import org.wso2.carbon.identity.entitlement.proxy.AbstractEntitlementServiceClient;
import org.wso2.carbon.identity.entitlement.proxy.Attribute;
import org.wso2.carbon.identity.entitlement.proxy.ProxyConstants;
import org.wso2.carbon.identity.entitlement.proxy.XACMLRequetBuilder;
import org.wso2.carbon.identity.entitlement.stub.EntitlementPolicyAdminServiceStub;
import org.wso2.carbon.identity.entitlement.stub.EntitlementServiceStub;
import org.wso2.carbon.identity.entitlement.stub.dto.EntitledAttributesDTO;
import org.wso2.carbon.identity.entitlement.stub.dto.EntitledResultSetDTO;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class SOAPEntitlementServiceClient extends AbstractEntitlementServiceClient {
private Map<String, EntitlementServiceStub> entitlementStub = new ConcurrentHashMap<String, EntitlementServiceStub>();
private Map<String, EntitlementPolicyAdminServiceStub> policyAdminStub = new ConcurrentHashMap<String, EntitlementPolicyAdminServiceStub>();
private Map<String, Authenticator> authenticators = new ConcurrentHashMap<String, Authenticator>();
private String serverUrl;
private String userName;
private String password;
private boolean reuseSession = true;
public SOAPEntitlementServiceClient(String serverUrl, String username, String password, boolean reuseSession) {
this.serverUrl = serverUrl;
this.userName = username;
this.password = password;
this.reuseSession = reuseSession;
}
@Override
public String getDecision(Attribute[] attributes, String appId) throws Exception {
String xacmlRequest = XACMLRequetBuilder.buildXACML3Request(attributes);
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
String result = getDecision(xacmlRequest, stub, authenticator);
stub._getServiceClient().cleanupTransport();
return result;
}
@Override
public boolean subjectCanActOnResource(String subjectType, String alias, String actionId,
String resourceId, String domainId, String appId) throws Exception {
Attribute subjectAttribute = new Attribute("urn:oasis:names:tc:xacml:1.0:subject-category:access-subject", subjectType, ProxyConstants.DEFAULT_DATA_TYPE, alias);
Attribute actionAttribute = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:action", "urn:oasis:names:tc:xacml:1.0:action:action-id", ProxyConstants.DEFAULT_DATA_TYPE, actionId);
Attribute resourceAttribute = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:resource", "urn:oasis:names:tc:xacml:1.0:resource:resource-id", ProxyConstants.DEFAULT_DATA_TYPE, resourceId);
Attribute environmentAttribute = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:environment", "urn:oasis:names:tc:xacml:1.0:environment:environment-id", ProxyConstants.DEFAULT_DATA_TYPE, domainId);
Attribute[] tempArr = {subjectAttribute, actionAttribute, resourceAttribute, environmentAttribute};
String xacmlRequest = XACMLRequetBuilder.buildXACML3Request(tempArr);
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
String result = getDecision(xacmlRequest, stub, authenticator);
stub._getServiceClient().cleanupTransport();
return (result.contains("Permit"));
}
@Override
public boolean subjectCanActOnResource(String subjectType, String alias, String actionId,
String resourceId, Attribute[] attributes, String domainId, String appId)
throws Exception {
Attribute[] attrs = new Attribute[attributes.length + 4];
attrs[0] = new Attribute("urn:oasis:names:tc:xacml:1.0:subject-category:access-subject", subjectType, ProxyConstants.DEFAULT_DATA_TYPE, alias);
for (int i = 0; i < attributes.length; i++) {
attrs[i + 1] = new Attribute("urn:oasis:names:tc:xacml:1.0:subject-category:access-subject", attributes[i].getType(),
attributes[i].getId(), attributes[i].getValue());
}
attrs[attrs.length - 3] = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:action", "urn:oasis:names:tc:xacml:1.0:action:action-id", ProxyConstants.DEFAULT_DATA_TYPE, actionId);
attrs[attrs.length - 2] = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:resource", "urn:oasis:names:tc:xacml:1.0:resource:resource-id", ProxyConstants.DEFAULT_DATA_TYPE, resourceId);
attrs[attrs.length - 1] = new Attribute("urn:oasis:names:tc:xacml:3.0:attribute-category:environment", "urn:oasis:names:tc:xacml:1.0:environment:environment-id", ProxyConstants.DEFAULT_DATA_TYPE, domainId);
String xacmlRequest = XACMLRequetBuilder.buildXACML3Request(attrs);
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
String result = getDecision(xacmlRequest, stub, authenticator);
stub._getServiceClient().cleanupTransport();
return (result.contains("Permit"));
}
@Override
public List<String> getResourcesForAlias(String alias, String appId) throws Exception {
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
List<String> results = getResources(getEntitledAttributes(alias, null,
ProxyConstants.SUBJECT_ID, null, false, stub, authenticator));
stub._getServiceClient().cleanupTransport();
return results;
}
@Override
public List<String> getActionableResourcesForAlias(String alias, String appId) throws Exception {
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
List<String> results = getResources(getEntitledAttributes(alias, null,
ProxyConstants.SUBJECT_ID, null, true, stub, authenticator));
stub._getServiceClient().cleanupTransport();
return results;
}
@Override
public List<String> getActionsForResource(String alias, String resource, String appId)
throws Exception {
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName, password);
List<String> results = getActions(getEntitledAttributes(alias, resource,
ProxyConstants.SUBJECT_ID, null, false, stub, authenticator));
stub._getServiceClient().cleanupTransport();
return results;
}
@Override
public List<String> getActionableChildResourcesForAlias(String alias, String parentResource,
String action, String appId) throws Exception {
EntitlementServiceStub stub = getEntitlementStub(serverUrl);
Authenticator authenticator = getAuthenticator(serverUrl, userName,
password);
List<String> results = getResources(getEntitledAttributes(alias, parentResource,
ProxyConstants.SUBJECT_ID, action, true, stub, authenticator));
stub._getServiceClient().cleanupTransport();
return results;
}
private Authenticator getAuthenticator(String serverUrl, String userName, String password)
throws Exception {
if (reuseSession) {
if (authenticators.containsKey(serverUrl)) {
return authenticators.get(serverUrl);
}
}
Authenticator authenticator = new Authenticator(userName, password, serverUrl + "AuthenticationAdmin");
setAuthCookie(false, getEntitlementStub(serverUrl), authenticator);
setAuthCookie(false, getEntitlementAdminStub(serverUrl), authenticator);
authenticators.put(serverUrl, authenticator);
return authenticator;
}
private EntitlementServiceStub getEntitlementStub(String serverUrl) throws Exception {
if (entitlementStub.containsKey(serverUrl)) {
return entitlementStub.get(serverUrl);
}
EntitlementServiceStub stub;
ConfigurationContext configurationContext = ConfigurationContextFactory.createDefaultConfigurationContext();
HashMap<String, TransportOutDescription> transportsOut = configurationContext
.getAxisConfiguration().getTransportsOut();
for (TransportOutDescription transportOutDescription : transportsOut.values()) {
transportOutDescription.getSender().init(configurationContext, transportOutDescription);
}
stub = new EntitlementServiceStub(configurationContext, serverUrl + "EntitlementService");
entitlementStub.put(serverUrl, stub);
return stub;
}
private EntitlementPolicyAdminServiceStub getEntitlementAdminStub(String serverUrl)
throws Exception {
if (policyAdminStub.containsKey(serverUrl)) {
return policyAdminStub.get(serverUrl);
}
EntitlementPolicyAdminServiceStub stub;
ConfigurationContext configurationContext = ConfigurationContextFactory.createDefaultConfigurationContext();
HashMap<String, TransportOutDescription> transportsOut = configurationContext
.getAxisConfiguration().getTransportsOut();
for (TransportOutDescription transportOutDescription : transportsOut.values()) {
transportOutDescription.getSender().init(configurationContext, transportOutDescription);
}
stub = new EntitlementPolicyAdminServiceStub(configurationContext, serverUrl
+ "EntitlementPolicyAdminService");
policyAdminStub.put(serverUrl, stub);
return stub;
}
private String getDecision(String request, EntitlementServiceStub stub,
Authenticator authenticator) throws Exception {
try {
return stub.getDecision(request);
} catch (AxisFault e) {
if (ProxyConstants.SESSION_TIME_OUT.equals(e.getFaultCode().getLocalPart())) {
setAuthCookie(true, stub, authenticator);
return stub.getDecision(request);
} else {
throw e;
}
}
}
private EntitledAttributesDTO[] getEntitledAttributes(String subjectName, String resourceName,
String subjectId, String action, boolean enableChildSearch,
EntitlementServiceStub stub, Authenticator authenticator) throws Exception {
EntitledResultSetDTO results;
try {
results = stub.getEntitledAttributes(subjectName, resourceName, subjectId, action,
enableChildSearch);
} catch (AxisFault e) {
if (ProxyConstants.SESSION_TIME_OUT.equals(e.getFaultCode().getLocalPart())) {
setAuthCookie(true, stub, authenticator);
results = stub.getEntitledAttributes(subjectName, resourceName, subjectId, action,
enableChildSearch);
} else {
throw e;
}
}
return results.getEntitledAttributesDTOs();
}
private List<String> getResources(EntitledAttributesDTO[] entitledAttrs) {
List<String> list = new ArrayList<String>();
if (entitledAttrs != null) {
for (EntitledAttributesDTO dto : entitledAttrs) {
list.add(dto.getResourceName());
}
}
return list;
}
private List<String> getActions(EntitledAttributesDTO[] entitledAttrs) {
List<String> list = new ArrayList<String>();
if (entitledAttrs != null) {
for (EntitledAttributesDTO dto : entitledAttrs) {
list.add(dto.getAction());
}
}
return list;
}
private void setAuthCookie(boolean isExpired, Stub stub, Authenticator authenticator)
throws Exception {
ServiceClient client = stub._getServiceClient();
Options option = client.getOptions();
option.setManageSession(true);
option.setProperty(org.apache.axis2.transport.http.HTTPConstants.COOKIE_STRING,
authenticator.getCookie(isExpired));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.hive;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
import org.apache.hadoop.hive.ql.io.orc.OrcFlowFileWriter;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.util.hive.HiveJdbcCommon;
import org.apache.nifi.util.hive.HiveUtils;
import org.apache.hadoop.hive.ql.io.orc.NiFiOrcUtils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* The ConvertAvroToORC processor takes an Avro-formatted flow file as input and converts it into ORC format.
*/
@SideEffectFree
@SupportsBatching
@Tags({"avro", "orc", "hive", "convert"})
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@CapabilityDescription("Converts an Avro record into ORC file format. This processor provides a direct mapping of an Avro record to an ORC record, such "
+ "that the resulting ORC file will have the same hierarchical structure as the Avro document. If an incoming FlowFile contains a stream of "
+ "multiple Avro records, the resultant FlowFile will contain a ORC file containing all of the Avro records. If an incoming FlowFile does "
+ "not contain any records, an empty ORC file is the output.")
@WritesAttributes({
@WritesAttribute(attribute = "mime.type", description = "Sets the mime type to application/octet-stream"),
@WritesAttribute(attribute = "filename", description = "Sets the filename to the existing filename with the extension replaced by / added to by .orc"),
@WritesAttribute(attribute = "record.count", description = "Sets the number of records in the ORC file."),
@WritesAttribute(attribute = "hive.ddl", description = "Creates a partial Hive DDL statement for creating a table in Hive from this ORC file. "
+ "This can be used in ReplaceText for setting the content to the DDL. To make it valid DDL, add \"LOCATION '<path_to_orc_file_in_hdfs>'\", where "
+ "the path is the directory that contains this ORC file on HDFS. For example, ConvertAvroToORC can send flow files to a PutHDFS processor to send the file to "
+ "HDFS, then to a ReplaceText to set the content to this DDL (plus the LOCATION clause as described), then to PutHiveQL processor to create the table "
+ "if it doesn't exist.")
})
public class ConvertAvroToORC extends AbstractProcessor {
// Attributes
public static final String ORC_MIME_TYPE = "application/octet-stream";
public static final String HIVE_DDL_ATTRIBUTE = "hive.ddl";
public static final String RECORD_COUNT_ATTRIBUTE = "record.count";
// Properties
public static final PropertyDescriptor ORC_CONFIGURATION_RESOURCES = new PropertyDescriptor.Builder()
.name("orc-config-resources")
.displayName("ORC Configuration Resources")
.description("A file or comma separated list of files which contains the ORC configuration (hive-site.xml, e.g.). Without this, Hadoop "
+ "will search the classpath for a 'hive-site.xml' file or will revert to a default configuration. Please see the ORC documentation for more details.")
.required(false).addValidator(HiveUtils.createMultipleFilesExistValidator()).build();
public static final PropertyDescriptor STRIPE_SIZE = new PropertyDescriptor.Builder()
.name("orc-stripe-size")
.displayName("Stripe Size")
.description("The size of the memory buffer (in bytes) for writing stripes to an ORC file")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("64 MB")
.build();
public static final PropertyDescriptor BUFFER_SIZE = new PropertyDescriptor.Builder()
.name("orc-buffer-size")
.displayName("Buffer Size")
.description("The maximum size of the memory buffers (in bytes) used for compressing and storing a stripe in memory. This is a hint to the ORC writer, "
+ "which may choose to use a smaller buffer size based on stripe size and number of columns for efficient stripe writing and memory utilization.")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("10 KB")
.build();
public static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder()
.name("orc-compression-type")
.displayName("Compression Type")
.required(true)
.allowableValues("NONE", "ZLIB", "SNAPPY", "LZO")
.defaultValue("NONE")
.build();
public static final PropertyDescriptor HIVE_TABLE_NAME = new PropertyDescriptor.Builder()
.name("orc-hive-table-name")
.displayName("Hive Table Name")
.description("An optional table name to insert into the hive.ddl attribute. The generated DDL can be used by "
+ "a PutHiveQL processor (presumably after a PutHDFS processor) to create a table backed by the converted ORC file. "
+ "If this property is not provided, the full name (including namespace) of the incoming Avro record will be normalized "
+ "and used as the table name.")
.required(false)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.build();
// Relationships
static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("A FlowFile is routed to this relationship after it has been converted to ORC format.")
.build();
static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("A FlowFile is routed to this relationship if it cannot be parsed as Avro or cannot be converted to ORC for any reason")
.build();
private final static List<PropertyDescriptor> propertyDescriptors;
private final static Set<Relationship> relationships;
private volatile Configuration orcConfig;
/*
* Will ensure that the list of property descriptors is built only once.
* Will also create a Set of relationships
*/
static {
List<PropertyDescriptor> _propertyDescriptors = new ArrayList<>();
_propertyDescriptors.add(ORC_CONFIGURATION_RESOURCES);
_propertyDescriptors.add(STRIPE_SIZE);
_propertyDescriptors.add(BUFFER_SIZE);
_propertyDescriptors.add(COMPRESSION_TYPE);
_propertyDescriptors.add(HIVE_TABLE_NAME);
propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors);
Set<Relationship> _relationships = new HashSet<>();
_relationships.add(REL_SUCCESS);
_relationships.add(REL_FAILURE);
relationships = Collections.unmodifiableSet(_relationships);
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propertyDescriptors;
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@OnScheduled
public void setup(ProcessContext context) {
boolean confFileProvided = context.getProperty(ORC_CONFIGURATION_RESOURCES).isSet();
if (confFileProvided) {
final String configFiles = context.getProperty(ORC_CONFIGURATION_RESOURCES).getValue();
orcConfig = HiveJdbcCommon.getConfigurationFromFiles(configFiles);
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
try {
long startTime = System.currentTimeMillis();
final long stripeSize = context.getProperty(STRIPE_SIZE).asDataSize(DataUnit.B).longValue();
final int bufferSize = context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
final CompressionKind compressionType = CompressionKind.valueOf(context.getProperty(COMPRESSION_TYPE).getValue());
final AtomicReference<Schema> hiveAvroSchema = new AtomicReference<>(null);
final AtomicInteger totalRecordCount = new AtomicInteger(0);
final String fileName = flowFile.getAttribute(CoreAttributes.FILENAME.key());
flowFile = session.write(flowFile, (rawIn, rawOut) -> {
try (final InputStream in = new BufferedInputStream(rawIn);
final OutputStream out = new BufferedOutputStream(rawOut);
final DataFileStream<GenericRecord> reader = new DataFileStream<>(in, new GenericDatumReader<>())) {
// Create ORC schema from Avro schema
Schema avroSchema = reader.getSchema();
TypeInfo orcSchema = NiFiOrcUtils.getOrcField(avroSchema);
if (orcConfig == null) {
orcConfig = new Configuration();
}
OrcFlowFileWriter orcWriter = NiFiOrcUtils.createWriter(
out,
new Path(fileName),
orcConfig,
orcSchema,
stripeSize,
compressionType,
bufferSize);
try {
int recordCount = 0;
GenericRecord currRecord = null;
while (reader.hasNext()) {
currRecord = reader.next(currRecord);
List<Schema.Field> fields = currRecord.getSchema().getFields();
if (fields != null) {
Object[] row = new Object[fields.size()];
for (int i = 0; i < fields.size(); i++) {
Schema.Field field = fields.get(i);
Schema fieldSchema = field.schema();
Object o = currRecord.get(field.name());
try {
row[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldSchema), o);
} catch (ArrayIndexOutOfBoundsException aioobe) {
getLogger().error("Index out of bounds at record {} for column {}, type {}, and object {}",
new Object[]{recordCount, i, fieldSchema.getType().getName(), o.toString()},
aioobe);
throw new IOException(aioobe);
}
}
orcWriter.addRow(NiFiOrcUtils.createOrcStruct(orcSchema, row));
recordCount++;
}
}
hiveAvroSchema.set(avroSchema);
totalRecordCount.set(recordCount);
} finally {
// finished writing this record, close the writer (which will flush to the flow file)
orcWriter.close();
}
}
});
final String hiveTableName = context.getProperty(HIVE_TABLE_NAME).isSet()
? context.getProperty(HIVE_TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue()
: NiFiOrcUtils.normalizeHiveTableName(hiveAvroSchema.get().getFullName());
String hiveDDL = NiFiOrcUtils.generateHiveDDL(hiveAvroSchema.get(), hiveTableName);
// Add attributes and transfer to success
flowFile = session.putAttribute(flowFile, RECORD_COUNT_ATTRIBUTE, Integer.toString(totalRecordCount.get()));
flowFile = session.putAttribute(flowFile, HIVE_DDL_ATTRIBUTE, hiveDDL);
StringBuilder newFilename = new StringBuilder();
int extensionIndex = fileName.lastIndexOf(".");
if (extensionIndex != -1) {
newFilename.append(fileName.substring(0, extensionIndex));
} else {
newFilename.append(fileName);
}
newFilename.append(".orc");
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), ORC_MIME_TYPE);
flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), newFilename.toString());
session.transfer(flowFile, REL_SUCCESS);
session.getProvenanceReporter().modifyContent(flowFile, "Converted "+totalRecordCount.get()+" records", System.currentTimeMillis() - startTime);
} catch (final ProcessException pe) {
getLogger().error("Failed to convert {} from Avro to ORC due to {}; transferring to failure", new Object[]{flowFile, pe});
session.transfer(flowFile, REL_FAILURE);
}
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.exception.OSecurityException;
import com.orientechnologies.orient.core.metadata.security.OSecurityShared;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
@Test(groups = "security")
public class RestrictedTest {
private ODatabaseDocumentTx database;
private ODocument adminRecord;
private ODocument writerRecord;
@Parameters(value = "url")
public RestrictedTest(String iURL) {
database = new ODatabaseDocumentTx(iURL);
}
@AfterMethod
protected void closeDb() {
database.close();
}
@Test
public void testCreateRestrictedClass() {
database.open("admin", "admin");
database.getMetadata().getSchema().createClass("CMSDocument", database.getMetadata().getSchema().getClass("ORestricted"));
adminRecord = new ODocument("CMSDocument").field("user", "admin").save();
adminRecord.reload();
}
@Test(dependsOnMethods = "testCreateRestrictedClass")
public void testFilteredQuery() throws IOException {
database.open("writer", "writer");
List<?> result = database.query(new OSQLSynchQuery<Object>("select from CMSDocument"));
Assert.assertTrue(result.isEmpty());
}
@Test(dependsOnMethods = "testFilteredQuery")
public void testCreateAsWriter() throws IOException {
database.open("writer", "writer");
writerRecord = new ODocument("CMSDocument").field("user", "writer").save();
writerRecord.reload();
}
@Test(dependsOnMethods = "testCreateAsWriter")
public void testFilteredQueryAsReader() throws IOException {
database.open("reader", "reader");
List<OIdentifiable> result = database.query(new OSQLSynchQuery<Object>("select from CMSDocument"));
Assert.assertEquals(result.size(), 0);
}
@Test(dependsOnMethods = "testFilteredQueryAsReader")
public void testFilteredQueryAsAdmin() throws IOException {
database.open("admin", "admin");
List<OIdentifiable> result = database.query(new OSQLSynchQuery<Object>("select from CMSDocument where user = 'writer'"));
Assert.assertEquals(result.size(), 1);
}
@Test(dependsOnMethods = "testFilteredQueryAsAdmin")
public void testFilteredQueryAsWriter() throws IOException {
database.open("writer", "writer");
List<OIdentifiable> result = database.query(new OSQLSynchQuery<Object>("select from CMSDocument"));
Assert.assertEquals(result.size(), 1);
}
@Test(dependsOnMethods = "testFilteredQueryAsWriter")
public void testFilteredDirectReadAsWriter() throws IOException {
database.open("writer", "writer");
Assert.assertNull(database.load(adminRecord));
}
@Test(dependsOnMethods = "testFilteredDirectReadAsWriter")
public void testFilteredDirectUpdateAsWriter() throws IOException {
database.open("writer", "writer");
adminRecord.field("user", "writer-hacker");
try {
adminRecord.save();
} catch (OSecurityException e) {
// OK AS EXCEPTION
} catch (ORecordNotFoundException e) {
// OK AS EXCEPTION
}
database.close();
database.open("admin", "admin");
Assert.assertEquals(((ODocument) adminRecord.reload()).field("user"), "admin");
}
@Test(dependsOnMethods = "testFilteredDirectUpdateAsWriter")
public void testFilteredDirectDeleteAsWriter() throws IOException {
database.open("writer", "writer");
try {
adminRecord.delete();
} catch (OSecurityException e) {
// OK AS EXCEPTION
} catch (ORecordNotFoundException e) {
// OK AS EXCEPTION
}
database.close();
database.open("admin", "admin");
adminRecord.reload();
Assert.assertEquals(adminRecord.field("user"), "admin");
}
@Test(dependsOnMethods = "testFilteredDirectDeleteAsWriter")
public void testFilteredHackingAllowFieldAsWriter() throws IOException {
database.open("writer", "writer");
try {
// FORCE LOADING
Set<OIdentifiable> allows = adminRecord.field(OSecurityShared.ALLOW_ALL_FIELD);
allows.add(database.getMetadata().getSecurity().getUser(database.getUser().getName()).getDocument().getIdentity());
adminRecord.save();
} catch (OSecurityException e) {
// OK AS EXCEPTION
} catch (ORecordNotFoundException e) {
// OK AS EXCEPTION
}
database.close();
database.open("admin", "admin");
adminRecord.reload();
}
@Test(dependsOnMethods = "testFilteredHackingAllowFieldAsWriter")
public void testAddReaderAsRole() throws IOException {
database.open("writer", "writer");
Set<OIdentifiable> allows = ((ODocument) writerRecord.reload()).field(OSecurityShared.ALLOW_ALL_FIELD);
allows.add(database.getMetadata().getSecurity().getRole("reader").getDocument().getIdentity());
writerRecord.save();
}
@Test(dependsOnMethods = "testAddReaderAsRole")
public void testReaderCanSeeWriterDocumentAfterPermission() throws IOException {
database.open("reader", "reader");
Assert.assertNotNull(database.load(writerRecord));
}
@Test(dependsOnMethods = "testReaderCanSeeWriterDocumentAfterPermission")
public void testWriterRoleCanRemoveReader() throws IOException {
database.open("writer", "writer");
Assert.assertEquals(((Collection<?>) writerRecord.field(OSecurityShared.ALLOW_ALL_FIELD)).size(), 2);
database.getMetadata().getSecurity().disallowRole(writerRecord, OSecurityShared.ALLOW_ALL_FIELD, "reader");
Assert.assertEquals(((Collection<?>) writerRecord.field(OSecurityShared.ALLOW_ALL_FIELD)).size(), 1);
writerRecord.save();
}
@Test(dependsOnMethods = "testWriterRoleCanRemoveReader")
public void testReaderCannotSeeWriterDocument() throws IOException {
database.open("reader", "reader");
Assert.assertNull(database.load(writerRecord.getIdentity()));
}
@Test(dependsOnMethods = "testReaderCannotSeeWriterDocument")
public void testWriterAddReaderUserOnlyForRead() throws IOException {
database.open("writer", "writer");
database.getMetadata().getSecurity().allowUser(writerRecord, OSecurityShared.ALLOW_READ_FIELD, "reader");
writerRecord.save();
}
@Test(dependsOnMethods = "testWriterAddReaderUserOnlyForRead")
public void testReaderCanSeeWriterDocument() throws IOException {
database.open("reader", "reader");
Assert.assertNotNull(database.load(writerRecord.getIdentity()));
}
@Test(dependsOnMethods = "testWriterAddReaderUserOnlyForRead", expectedExceptions = OSecurityException.class)
public void testTruncateClass() {
database.open("admin", "admin");
database.command(new OCommandSQL("truncate class CMSDocument")).execute();
}
@Test(dependsOnMethods = "testTruncateClass", expectedExceptions = OSecurityException.class)
public void testTruncateUnderlyingCluster() {
database.open("admin", "admin");
database.command(new OCommandSQL("truncate cluster CMSDocument")).execute();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
*
*/
package org.apache.lens.doc;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.testng.annotations.Test;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* The Class TestGenerateConfigDoc.
*/
public class TestGenerateConfigDoc {
/** The Constant SERVER_CONF_FILE. */
public static final String SERVER_CONF_FILE = "../lens-server/src/main/resources/lensserver-default.xml";
/** The Constant SESSION_CONF_FILE. */
public static final String SESSION_CONF_FILE = "../lens-server/src/main/resources/lenssession-default.xml";
/** The Constant HIVE_DRIVER_CONF_FILE. */
public static final String HIVE_DRIVER_CONF_FILE = "../lens-driver-hive/src/main/resources/hivedriver-default.xml";
/** The Constant JDBC_DRIVER_CONF_FILE. */
public static final String JDBC_DRIVER_CONF_FILE = "../lens-driver-jdbc/src/main/resources/jdbcdriver-default.xml";
/** The Constant CLIENT_CONF_FILE. */
public static final String CLIENT_CONF_FILE = "../lens-client/src/main/resources/lens-client-default.xml";
/** The Constant CUBE_QUERY_CONF_FILE. */
public static final String CUBE_QUERY_CONF_FILE = "../lens-cube/src/main/resources/olap-query-conf.xml";
/** The Constant APT_FILE. */
public static final String APT_FILE = "../src/site/apt/admin/config.apt";
/** The Constant SESSION_APT_FILE. */
public static final String SESSION_APT_FILE = "../src/site/apt/admin/session-config.apt";
/** The Constant HIVE_DRIVER_APT_FILE. */
public static final String HIVE_DRIVER_APT_FILE = "../src/site/apt/admin/hivedriver-config.apt";
/** The Constant JDBC_DRIVER_APT_FILE. */
public static final String JDBC_DRIVER_APT_FILE = "../src/site/apt/admin/jdbcdriver-config.apt";
/** The Constant CLIENT_APT_FILE. */
public static final String CLIENT_APT_FILE = "../src/site/apt/user/client-config.apt";
/** The Constant CUBE_QUERY_CONF_APT_FILE. */
public static final String CUBE_QUERY_CONF_APT_FILE = "../src/site/apt/user/olap-query-conf.apt";
/**
* Generate server config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateServerConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(SERVER_CONF_FILE, APT_FILE);
printer.generateDoc("Lens server configuration");
}
/**
* Generate session config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateSessionConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(SESSION_CONF_FILE, SESSION_APT_FILE);
printer.generateDoc("Lens session configuration");
}
/**
* Generate hivedriver config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateHivedriverConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(HIVE_DRIVER_CONF_FILE, HIVE_DRIVER_APT_FILE);
printer.generateDoc("Hive driver configuration");
}
/**
* Generate jdbcdriver config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateJdbcdriverConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(JDBC_DRIVER_CONF_FILE, JDBC_DRIVER_APT_FILE);
printer.generateDoc("Jdbc driver configuration");
}
/**
* Generate client config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateClientConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(CLIENT_CONF_FILE, CLIENT_APT_FILE);
printer.generateDoc("Lens client configuration");
}
/**
* Generate olap query config doc.
*
* @throws Exception
* the exception
*/
@Test
public void generateOLAPQueryConfigDoc() throws Exception {
ConfigPrinter printer = new ConfigPrinter(CUBE_QUERY_CONF_FILE, CUBE_QUERY_CONF_APT_FILE);
printer.generateDoc("OLAP query configuration");
}
/**
* The Class ConfigEntry.
*/
class ConfigEntry {
/** The name. */
private String name;
/** The value. */
private String value;
/** The description. */
private String description;
/**
* Validate.
*
* @throws IllegalArgumentException
* the illegal argument exception
*/
public void validate() throws IllegalArgumentException {
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Name cannot be empty");
}
if (description == null || description.isEmpty()) {
throw new IllegalArgumentException("Description cannot be empty for property: " + name);
}
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
public String toString() {
return name + ":" + value + ":" + description;
}
}
/**
* The Class ConfigPrinter.
*/
class ConfigPrinter extends DefaultHandler {
/** The config file. */
private final String configFile;
/** The output apt file. */
private final String outputAPTFile;
/** The buf. */
private StringBuilder buf;
/** The entry. */
private ConfigEntry entry;
/** The entries. */
private List<ConfigEntry> entries;
/** The in property. */
boolean inProperty;
/**
* Instantiates a new config printer.
*
* @param confFile
* the conf file
* @param outputAPTFile
* the output apt file
*/
public ConfigPrinter(String confFile, String outputAPTFile) {
configFile = confFile;
this.outputAPTFile = outputAPTFile;
entries = new ArrayList<ConfigEntry>();
}
/**
* Read config file.
*
* @throws IOException
* Signals that an I/O exception has occurred.
* @throws ParserConfigurationException
* the parser configuration exception
* @throws SAXException
* the SAX exception
*/
public void readConfigFile() throws IOException, ParserConfigurationException, SAXException {
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser parser = factory.newSAXParser();
parser.parse(new File(configFile), this);
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String, java.lang.String,
* org.xml.sax.Attributes)
*/
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if ("property".equalsIgnoreCase(qName)) {
inProperty = true;
entry = new ConfigEntry();
}
buf = new StringBuilder();
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (inProperty && "name".equalsIgnoreCase(qName)) {
entry.name = buf.toString();
} else if (inProperty && "value".equalsIgnoreCase(qName)) {
entry.value = buf.toString();
if (entry.value == null || entry.value.isEmpty()) {
entry.value = " ";
}
} else if (inProperty && "description".equalsIgnoreCase(qName)) {
// replace new lines with space
entry.description = buf.toString().replaceAll("\\r|\\n", "");
} else if (inProperty && "property".equalsIgnoreCase(qName)) {
entry.validate();
entries.add(entry);
inProperty = false;
}
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.helpers.DefaultHandler#characters(char[], int, int)
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
buf.append(ch, start, length);
}
/**
* Prints the apt.
*
* @param heading
* the heading
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public void printAPT(String heading) throws IOException {
PrintWriter apt = null;
try {
apt = new PrintWriter(new FileWriter(outputAPTFile));
Collections.sort(entries, new Comparator<ConfigEntry>() {
@Override
public int compare(ConfigEntry e1, ConfigEntry e2) {
return e1.name.compareTo(e2.name);
}
});
// Add license
apt.println("~~");
apt.println("~~ Licensed to the Apache Software Foundation (ASF) under one");
apt.println("~~ or more contributor license agreements. See the NOTICE file");
apt.println("~~ distributed with this work for additional information");
apt.println("~~ regarding copyright ownership. The ASF licenses this file");
apt.println("~~ to you under the Apache License, Version 2.0 (the");
apt.println("~~ \"License\"); you may not use this file except in compliance");
apt.println("~~ with the License. You may obtain a copy of the License at");
apt.println("~~");
apt.println("~~ http://www.apache.org/licenses/LICENSE-2.0");
apt.println("~~");
apt.println("~~ Unless required by applicable law or agreed to in writing,");
apt.println("~~ software distributed under the License is distributed on an");
apt.println("~~ \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY");
apt.println("~~ KIND, either express or implied. See the License for the");
apt.println("~~ specific language governing permissions and limitations");
apt.println("~~ under the License.");
apt.println("~~");
apt.println("");
// Print header
apt.println(heading);
apt.println();
apt.println("===");
apt.println();
// Print config entries
int i = 1;
apt.println("*--+--+---+--+");
apt.println("|<<No.>>|<<Property Name>>|<<Default Value>>|<<Description>>|");
for (ConfigEntry entry : entries) {
apt.println("*--+--+---+--+");
apt.print("|");
apt.print(i++);
apt.print("|");
apt.print(entry.name);
apt.print("|");
apt.print(entry.value);
apt.print("|");
apt.print(entry.description == null ? "" : entry.description);
apt.println("|");
}
apt.println("*--+--+---+--+");
apt.println("The configuration parameters and their default values");
apt.flush();
} finally {
if (apt != null) {
apt.close();
}
}
}
/**
* Generate doc.
*
* @param heading
* the heading
* @throws IOException
* Signals that an I/O exception has occurred.
* @throws ParserConfigurationException
* the parser configuration exception
* @throws SAXException
* the SAX exception
*/
public void generateDoc(String heading) throws IOException, ParserConfigurationException, SAXException {
readConfigFile();
printAPT(heading);
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import org.junit.Test;
import java.io.EOFException;
import java.nio.charset.Charset;
import static io.netty.util.internal.EmptyArrays.*;
import static org.junit.Assert.*;
/**
* Tests channel buffer streams
*/
public class ByteBufStreamTest {
@Test
public void testAll() throws Exception {
ByteBuf buf = Unpooled.buffer(0, 65536);
try {
new ByteBufOutputStream(null);
fail();
} catch (NullPointerException e) {
// Expected
}
ByteBufOutputStream out = new ByteBufOutputStream(buf);
try {
assertSame(buf, out.buffer());
out.writeBoolean(true);
out.writeBoolean(false);
out.writeByte(42);
out.writeByte(224);
out.writeBytes("Hello, World!");
out.writeChars("Hello, World");
out.writeChar('!');
out.writeDouble(42.0);
out.writeFloat(42.0f);
out.writeInt(42);
out.writeLong(42);
out.writeShort(42);
out.writeShort(49152);
out.writeUTF("Hello, World!");
out.writeBytes("The first line\r\r\n");
out.write(EMPTY_BYTES);
out.write(new byte[]{1, 2, 3, 4});
out.write(new byte[]{1, 3, 3, 4}, 0, 0);
} finally {
out.close();
}
try {
new ByteBufInputStream(null, true);
fail();
} catch (NullPointerException e) {
// Expected
}
try {
new ByteBufInputStream(null, 0, true);
fail();
} catch (NullPointerException e) {
// Expected
}
try {
new ByteBufInputStream(buf.retain(), -1, false);
} catch (IllegalArgumentException e) {
// Expected
}
try {
new ByteBufInputStream(buf.retain(), buf.capacity() + 1, false);
} catch (IndexOutOfBoundsException e) {
// Expected
}
ByteBufInputStream in = new ByteBufInputStream(buf, true);
try {
assertTrue(in.markSupported());
in.mark(Integer.MAX_VALUE);
assertEquals(buf.writerIndex(), in.skip(Long.MAX_VALUE));
assertFalse(buf.isReadable());
in.reset();
assertEquals(0, buf.readerIndex());
assertEquals(4, in.skip(4));
assertEquals(4, buf.readerIndex());
in.reset();
assertTrue(in.readBoolean());
assertFalse(in.readBoolean());
assertEquals(42, in.readByte());
assertEquals(224, in.readUnsignedByte());
byte[] tmp = new byte[13];
in.readFully(tmp);
assertEquals("Hello, World!", new String(tmp, "ISO-8859-1"));
assertEquals('H', in.readChar());
assertEquals('e', in.readChar());
assertEquals('l', in.readChar());
assertEquals('l', in.readChar());
assertEquals('o', in.readChar());
assertEquals(',', in.readChar());
assertEquals(' ', in.readChar());
assertEquals('W', in.readChar());
assertEquals('o', in.readChar());
assertEquals('r', in.readChar());
assertEquals('l', in.readChar());
assertEquals('d', in.readChar());
assertEquals('!', in.readChar());
assertEquals(42.0, in.readDouble(), 0.0);
assertEquals(42.0f, in.readFloat(), 0.0);
assertEquals(42, in.readInt());
assertEquals(42, in.readLong());
assertEquals(42, in.readShort());
assertEquals(49152, in.readUnsignedShort());
assertEquals("Hello, World!", in.readUTF());
assertEquals("The first line", in.readLine());
assertEquals("", in.readLine());
assertEquals(4, in.read(tmp));
assertEquals(1, tmp[0]);
assertEquals(2, tmp[1]);
assertEquals(3, tmp[2]);
assertEquals(4, tmp[3]);
assertEquals(-1, in.read());
assertEquals(-1, in.read(tmp));
try {
in.readByte();
fail();
} catch (EOFException e) {
// Expected
}
try {
in.readFully(tmp, 0, -1);
fail();
} catch (IndexOutOfBoundsException e) {
// Expected
}
try {
in.readFully(tmp);
fail();
} catch (EOFException e) {
// Expected
}
} finally {
// Ownership was transferred to the ByteBufOutputStream, before we close we must retain the underlying
// buffer.
buf.retain();
in.close();
}
assertEquals(buf.readerIndex(), in.readBytes());
buf.release();
}
@Test
public void testReadLine() throws Exception {
Charset utf8 = Charset.forName("UTF-8");
ByteBuf buf = Unpooled.buffer();
ByteBufInputStream in = new ByteBufInputStream(buf, true);
String s = in.readLine();
assertNull(s);
int charCount = 5; //total chars in the string below without new line characters
byte[] abc = "a\nb\r\nc\nd\ne".getBytes(utf8);
buf.writeBytes(abc);
in.mark(charCount);
assertEquals("a", in.readLine());
assertEquals("b", in.readLine());
assertEquals("c", in.readLine());
assertEquals("d", in.readLine());
assertEquals("e", in.readLine());
assertNull(in.readLine());
in.reset();
int count = 0;
while (in.readLine() != null) {
++count;
if (count > charCount) {
fail("readLine() should have returned null");
}
}
assertEquals(charCount, count);
in.close();
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender;
import com.navercorp.pinpoint.profiler.context.Span;
import com.navercorp.pinpoint.profiler.context.SpanChunk;
import com.navercorp.pinpoint.profiler.sender.planer.SendDataPlaner;
import com.navercorp.pinpoint.profiler.sender.planer.SpanChunkStreamSendDataPlaner;
import com.navercorp.pinpoint.profiler.util.ByteBufferUtils;
import com.navercorp.pinpoint.profiler.util.ObjectPool;
import com.navercorp.pinpoint.thrift.io.HeaderTBaseSerializer;
import org.apache.thrift.TBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.DatagramChannel;
import java.util.Iterator;
/**
* @author Taejin Koo
*/
public class SpanStreamUdpSender extends AbstractDataSender {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
public static final int SOCKET_TIMEOUT = 1000 * 5;
public static final int SEND_BUFFER_SIZE = 1024 * 64 * 16;
public static final int DEFAULT_BUFFER_SIZE = 1024 * 16;
public static final int UDP_MAX_PACKET_LENGTH = 65507;
private final SpanStreamSendDataFactory spanStreamSendDataFactory;
private final DatagramChannel udpChannel;
private final AsyncQueueingExecutor<Object> executor;
private final ObjectPool<HeaderTBaseSerializer> serializerPool;
private final SpanStreamSendDataSerializer spanStreamSendDataSerializer;
private final StandbySpanStreamDataSendWorker standbySpanStreamDataSendWorker;
public SpanStreamUdpSender(String host, int port, String threadName, int queueSize) {
this(host, port, threadName, queueSize, SOCKET_TIMEOUT, SEND_BUFFER_SIZE);
}
public SpanStreamUdpSender(String host, int port, String threadName, int queueSize, int timeout, int sendBufferSize) {
this(host, port, threadName, queueSize, timeout, SEND_BUFFER_SIZE, DEFAULT_BUFFER_SIZE);
}
public SpanStreamUdpSender(String host, int port, String threadName, int queueSize, int timeout, int sendBufferSize, int dataBufferSize) {
if (host == null) {
throw new NullPointerException("host must not be null");
}
if (threadName == null) {
throw new NullPointerException("threadName must not be null");
}
if (queueSize <= 0) {
throw new IllegalArgumentException("queueSize");
}
if (timeout <= 0) {
throw new IllegalArgumentException("timeout");
}
if (sendBufferSize <= 0) {
throw new IllegalArgumentException("sendBufferSize");
}
// TODO If fail to create socket, stop agent start
logger.info("UdpDataSender initialized. host={}, port={}", host, port);
this.udpChannel = createChannel(host, port, timeout, sendBufferSize);
HeaderTBaseSerializerPoolFactory headerTBaseSerializerPoolFactory = new HeaderTBaseSerializerPoolFactory(false, dataBufferSize, true);
this.serializerPool = new ObjectPool<HeaderTBaseSerializer>(headerTBaseSerializerPoolFactory, 16);
this.spanStreamSendDataSerializer = new SpanStreamSendDataSerializer();
this.spanStreamSendDataFactory = new SpanStreamSendDataFactory(dataBufferSize, 16, serializerPool);
this.standbySpanStreamDataSendWorker = new StandbySpanStreamDataSendWorker(new FlushHandler(), new StandbySpanStreamDataStorage());
this.standbySpanStreamDataSendWorker.start();
this.executor = createAsyncQueueingExecutor(queueSize, threadName);
}
private DatagramChannel createChannel(String host, int port, int timeout, int sendBufferSize) {
DatagramChannel datagramChannel = null;
DatagramSocket socket = null;
try {
datagramChannel = DatagramChannel.open();
socket = datagramChannel.socket();
socket.setSoTimeout(timeout);
socket.setSendBufferSize(sendBufferSize);
if (logger.isWarnEnabled()) {
final int checkSendBufferSize = socket.getSendBufferSize();
if (sendBufferSize != checkSendBufferSize) {
logger.warn("DatagramChannel.setSendBufferSize() error. {}!={}", sendBufferSize, checkSendBufferSize);
}
}
InetSocketAddress serverAddress = new InetSocketAddress(host, port);
datagramChannel.connect(serverAddress);
return datagramChannel;
} catch (IOException e) {
if (socket != null) {
socket.close();
}
if (datagramChannel != null) {
try {
datagramChannel.close();
} catch (IOException ignored) {
}
}
throw new IllegalStateException("DatagramChannel create fail. Cause" + e.getMessage(), e);
}
}
@Override
public boolean send(TBase<?, ?> data) {
return executor.execute(data);
}
@Override
public void stop() {
try {
standbySpanStreamDataSendWorker.stop();
} catch (Exception e) {
logger.debug("Failed to stop standbySpanStreamDataSendWorker.", e);
}
try {
udpChannel.close();
} catch (IOException e) {
logger.debug("Failed to close udp channel.", e);
}
executor.stop();
}
@Override
protected void sendPacket(Object message) {
if (logger.isDebugEnabled()) {
logger.debug("sendPacket message:{}", message);
}
if (message instanceof Span) {
handleSpan((Span) message);
} else if (message instanceof SpanChunk) {
handleSpanChunk((SpanChunk) message);
} else {
logger.info("sendPacket fail. invalid type:{}", messageToString(message));
}
}
private String messageToString(Object message) {
if(message == null) {
return null;
}
return message.getClass().toString();
}
private void handleSpan(Span span) {
if (span == null) {
return;
}
HeaderTBaseSerializer serializer = serializerPool.getObject();
PartitionedByteBufferLocator partitionedByteBufferLocator = spanStreamSendDataSerializer.serializeSpanStream(serializer, span);
if (partitionedByteBufferLocator == null) {
serializerPool.returnObject(serializer);
return;
}
doAddAndFlush(partitionedByteBufferLocator, serializer);
}
// streaming
private void handleSpanChunk(SpanChunk spanChunk) {
if (spanChunk == null) {
return;
}
HeaderTBaseSerializer serializer = serializerPool.getObject();
PartitionedByteBufferLocator partitionedByteBufferLocator = spanStreamSendDataSerializer.serializeSpanChunkStream(serializer, spanChunk);
if (partitionedByteBufferLocator == null) {
serializerPool.returnObject(serializer);
return;
}
doAddAndFlush(partitionedByteBufferLocator, serializer);
}
private void doAddAndFlush(PartitionedByteBufferLocator partitionedByteBufferLocator, HeaderTBaseSerializer serializer) {
logger.debug("PartitionedByteBufferLocator {}.", partitionedByteBufferLocator);
SpanStreamSendData currentSpanStreamSendData = standbySpanStreamDataSendWorker.getStandbySpanStreamSendData();
if (currentSpanStreamSendData == null) {
currentSpanStreamSendData = spanStreamSendDataFactory.create();
}
try {
if (!currentSpanStreamSendData.addBuffer(partitionedByteBufferLocator.getByteBuffer())) {
SendDataPlaner sendDataPlaner = new SpanChunkStreamSendDataPlaner(partitionedByteBufferLocator, spanStreamSendDataFactory);
Iterator<SpanStreamSendData> sendDataIterator = sendDataPlaner.getSendDataIterator(currentSpanStreamSendData, serializer);
while (sendDataIterator.hasNext()) {
SpanStreamSendData sendData = sendDataIterator.next();
if (sendData.getFlushMode() == SpanStreamSendDataMode.FLUSH) {
flush(sendData);
} else if (sendData.getFlushMode() == SpanStreamSendDataMode.WAIT_BUFFER) {
boolean isAdded = standbySpanStreamDataSendWorker.addStandbySpanStreamData(sendData);
if (!isAdded) {
flush(sendData);
}
}
}
} else {
boolean isAdded = standbySpanStreamDataSendWorker.addStandbySpanStreamData(currentSpanStreamSendData);
if (!isAdded) {
flush(currentSpanStreamSendData);
}
}
} catch (IOException e) {
logger.warn("UDPChannel write fail.", e);
}
}
private void flush(SpanStreamSendData spanStreamSendData) throws IOException {
if (spanStreamSendData == null) {
return;
}
ByteBuffer[] byteBuffers = spanStreamSendData.getSendBuffers();
int remainingLength = ByteBufferUtils.getRemaining(byteBuffers);
try {
if (remainingLength != 0) {
long sentBufferSize = udpChannel.write(byteBuffers);
if (remainingLength != sentBufferSize) {
logger.warn("sent buffer {}/{}.", sentBufferSize, remainingLength);
} else {
logger.debug("Data sent. size:{}, {}", sentBufferSize);
}
}
} finally {
spanStreamSendData.done();
}
}
class FlushHandler implements StandbySpanStreamDataFlushHandler {
@Override
public void handleFlush(SpanStreamSendData spanStreamSendData) {
if (spanStreamSendData == null) {
return;
}
try {
ByteBuffer[] byteBuffers = spanStreamSendData.getSendBuffers();
int remainingLength = ByteBufferUtils.getRemaining(byteBuffers);
if (remainingLength != 0) {
long sentBufferSize = udpChannel.write(byteBuffers);
if (remainingLength != sentBufferSize) {
logger.warn("sent buffer {}/{}.", sentBufferSize, remainingLength);
} else {
// TODO need check ????
}
}
} catch (IOException e) {
logger.warn("Failed to flush span stream data.", e);
} finally {
spanStreamSendData.done();
}
}
@Override
public void exceptionCaught(SpanStreamSendData spanStreamSendData, Throwable e) {
logger.warn("Failed to flush span stream data.", e);
}
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.webservices;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.UnknownHostException;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.RequestEntity;
import org.pentaho.di.cluster.SlaveConnectionManager;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.webservices.wsdl.Wsdl;
import org.pentaho.di.trans.steps.webservices.wsdl.WsdlOpParameter;
import org.pentaho.di.trans.steps.webservices.wsdl.WsdlOpParameterList;
import org.pentaho.di.trans.steps.webservices.wsdl.WsdlOperation;
import org.pentaho.di.trans.steps.webservices.wsdl.XsdType;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.ctc.wstx.exc.WstxParsingException;
public class WebService extends BaseStep implements StepInterface
{
private static Class<?> PKG = WebServiceMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
public static final String NS_PREFIX = "ns";
private WebServiceData data;
private WebServiceMeta meta;
private int nbRowProcess;
protected long requestTime;
private SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
private SimpleDateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
private DecimalFormat decFormat = new DecimalFormat("00");
private Date dateRef;
public WebService(StepMeta aStepMeta, StepDataInterface aStepData, int value, TransMeta aTransMeta, Trans aTrans)
{
super(aStepMeta, aStepData, value, aTransMeta, aTrans);
// Reference date used to format hours
try
{
dateRef = timeFormat.parse("00:00:00");
}
catch (ParseException e)
{
logError("Unexpected error in WebService constructor: ", e);
setErrors(1);
stopAll();
}
}
public boolean processRow(StepMetaInterface metaInterface, StepDataInterface dataInterface)
throws KettleException
{
meta = (WebServiceMeta) metaInterface;
// if a URL is not specified, throw an exception
if (Const.isEmpty(meta.getUrl())) {
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0014.urlNotSpecified", getStepname()));
}
// if an operation is not specified, throw an exception
if (Const.isEmpty(meta.getOperationName())) {
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0015.OperationNotSelected", getStepname()));
}
data = (WebServiceData) dataInterface;
Object[] vCurrentRow = getRow();
if (first)
{
first=false;
if (getInputRowMeta()!=null) {
data.outputRowMeta = getInputRowMeta().clone();
} else {
data.outputRowMeta = new RowMeta();
}
meta.getFields(data.outputRowMeta, getStepname(), null, null, this);
defineIndexList(getInputRowMeta(), vCurrentRow);
}
else
{
// Input from previous steps, no longer getting any rows, call it a day...
//
if (vCurrentRow==null) {
setOutputDone();
return false;
}
}
if (vCurrentRow != null)
{
nbRowProcess++;
data.argumentRows.add(vCurrentRow);
}
if ((vCurrentRow == null && (nbRowProcess % meta.getCallStep() != 0)) || (vCurrentRow != null && ((nbRowProcess > 0 && nbRowProcess % meta.getCallStep() == 0)))
|| (vCurrentRow == null && (!meta.hasFieldsIn())))
{
requestSOAP(vCurrentRow, getInputRowMeta());
}
// No input received, this one lookup execution is all we're going to do.
//
if (vCurrentRow == null)
{
setOutputDone();
}
return vCurrentRow != null;
}
private List<Integer> indexList;
private void defineIndexList(RowMetaInterface rowMeta, Object[] vCurrentRow) throws KettleException
{
// Create an index list for the input fields
//
indexList = new ArrayList<Integer>();
if (rowMeta!=null) {
for (WebServiceField curField : meta.getFieldsIn())
{
int index = rowMeta.indexOfValue(curField.getName());
if (index>=0)
{
indexList.add(index);
} else
{
throw new KettleException("Required input field ["+curField.getName()+"] couldn't be found in the step input");
}
}
}
// Create a map for the output values too
//
for (WebServiceField curField : meta.getFieldsOut())
{
int index = data.outputRowMeta.indexOfValue(curField.getName());
if (index>=0)
{
// Keep a mapping between the web service name and the index of the target field.
// This makes it easier to populate the fields later on, reading back the result.
//
data.indexMap.put(curField.getWsName(), index);
}
}
}
private String getRequestXML(WsdlOperation operation, boolean qualifyWSField) throws KettleException
{
WsdlOpParameterList parameters = operation.getParameters();
String requestOperation = Const.NVL(meta.getOperationRequestName(), meta.getOperationName());
Iterator<WsdlOpParameter> iterator = parameters.iterator();
List<String> bodyNames = new ArrayList<String>();
while (iterator.hasNext()) {
WsdlOpParameter wsdlOpParameter = iterator.next();
bodyNames.add(wsdlOpParameter.getName().getLocalPart());
}
List<String> headerNames = new ArrayList<String>(parameters.getHeaderNames());
StringBuffer xml = new StringBuffer();
// TODO We only manage one name space for all the elements. See in the
// future how to manage multiple name spaces
//
xml.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
xml.append("<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:"+NS_PREFIX+"=\"");
xml.append(meta.getOperationNamespace());
xml.append("\">\n");
xml.append(" <soapenv:Header>\n");
addParametersToXML(xml, headerNames, qualifyWSField);
xml.append(" </soapenv:Header>\n");
xml.append(" <soapenv:Body>\n");
xml.append(" <" + NS_PREFIX + ":").append(requestOperation).append(">\n"); // OPEN request operation
if (meta.getInFieldContainerName() != null)
{
xml.append(" <" + NS_PREFIX + ":" + meta.getInFieldContainerName() + ">\n");
}
addParametersToXML(xml, bodyNames, qualifyWSField);
if (meta.getInFieldContainerName() != null)
{
xml.append(" </" + NS_PREFIX + ":" + meta.getInFieldContainerName() + ">\n");
}
xml.append(" </" + NS_PREFIX + ":").append(requestOperation).append(">\n"); // CLOSE request operation
xml.append(" </soapenv:Body>\n");
xml.append("</soapenv:Envelope>\n");
return xml.toString();
}
/**
*
* @param xml the XML this method is appending to.
* @param names the header names
* @param formNameIsQualified indicates if the we are to use the namespace prefix when writing the WS field name
* @throws KettleException
*/
private void addParametersToXML(StringBuffer xml, List<String> names, boolean qualifyWSField) throws KettleException {
// Add the row parameters...
//
for (Object[] vCurrentRow : data.argumentRows) {
if (meta.getInFieldArgumentName() != null)
{
xml.append(" <" + NS_PREFIX + ":").append(meta.getInFieldArgumentName()).append(">\n");
}
for (Integer index : indexList)
{
ValueMetaInterface vCurrentValue = getInputRowMeta().getValueMeta(index);
Object data = vCurrentRow[index];
WebServiceField field = meta.getFieldInFromName(vCurrentValue.getName());
if (field != null && names.contains(field.getWsName()))
{
if (!vCurrentValue.isNull(data))
{
xml.append(" <");
if (qualifyWSField) {
xml.append(NS_PREFIX).append(":");
}
xml.append(field.getWsName()).append(">");
if (XsdType.TIME.equals(field.getXsdType()))
{
// Allow to deal with hours like 36:12:12 (> 24h)
long millis = vCurrentValue.getDate(data).getTime() - dateRef.getTime();
xml.append(decFormat.format(millis / 3600000) + ":"
+ decFormat.format((millis % 3600000) / 60000)
+ ":"
+ decFormat.format(((millis % 60000) / 1000)));
}
else if (XsdType.DATE.equals(field.getXsdType()))
{
xml.append(dateFormat.format(vCurrentValue.getDate(data)));
}
else if (XsdType.BOOLEAN.equals(field.getXsdType()))
{
xml.append(vCurrentValue.getBoolean(data) ? "true" : "false");
}
else if (XsdType.DATE_TIME.equals(field.getXsdType()))
{
xml.append(dateTimeFormat.format(vCurrentValue.getDate(data)));
}
else if (vCurrentValue.isNumber())
{
// TODO: To Fix !! This is very bad coding...
//
xml.append(vCurrentValue.getString(data).trim().replace(',', '.'));
}
else
{
xml.append(Const.trim(vCurrentValue.getString(data)));
}
xml.append("</");
if (qualifyWSField) {
xml.append(NS_PREFIX).append(":");
}
xml.append(field.getWsName()).append(">\n");
}
else
{
xml.append(" <").append(NS_PREFIX).append(":").append(field.getWsName()).append(" xsi:nil=\"true\"/>\n");
}
}
}
if (meta.getInFieldArgumentName() != null)
{
xml.append(" </" + NS_PREFIX + ":").append(meta.getInFieldArgumentName()).append(">\n");
}
}
}
private synchronized void requestSOAP(Object[] rowData, RowMetaInterface rowMeta) throws KettleException
{
Wsdl wsdl;
try{
wsdl = new Wsdl(new java.net.URI(data.realUrl), null, null, meta.getHttpLogin(), meta.getHttpPassword());
}
catch(Exception e){
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0013.ExceptionLoadingWSDL"), e);
}
String vURLService = wsdl.getServiceEndpoint();
HttpClient vHttpClient = SlaveConnectionManager.getInstance().createHttpClient();
PostMethod vHttpMethod = new PostMethod(vURLService);
HostConfiguration vHostConfiguration = new HostConfiguration();
String httpLogin = environmentSubstitute(meta.getHttpLogin());
if (httpLogin != null && !"".equals(httpLogin))
{
vHttpClient.getParams().setAuthenticationPreemptive(true);
Credentials defaultcreds = new UsernamePasswordCredentials(httpLogin, environmentSubstitute(meta.getHttpPassword()));
vHttpClient.getState().setCredentials(AuthScope.ANY, defaultcreds);
}
String proxyHost = environmentSubstitute(meta.getProxyHost());
if (proxyHost != null && !"".equals(proxyHost))
{
vHostConfiguration.setProxy(proxyHost, Const.toInt(environmentSubstitute(meta.getProxyPort()), 8080));
}
try
{
// Generate the XML to send over, determine the correct name for the request...
//
WsdlOperation operation = wsdl.getOperation(meta.getOperationName());
if (operation == null) {
throw new KettleException(BaseMessages.getString(PKG, "WebServices.Exception.OperarationNotSupported", meta.getOperationName(), meta.getUrl()));
}
String xml = getRequestXML(operation, wsdl.getWsdlTypes().isElementFormQualified(wsdl.getTargetNamespace()));
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "WebServices.Log.SOAPEnvelope"));
logDetailed(xml);
}
data.argumentRows.clear(); // ready for the next batch.
URI uri = new URI(vURLService, false);
vHttpMethod.setURI(uri);
vHttpMethod.setRequestHeader("Content-Type", "text/xml;charset=UTF-8");
String soapAction = "\"" + meta.getOperationNamespace();
if (!meta.getOperationNamespace().endsWith("/")) {
soapAction += "/";
}
soapAction+=meta.getOperationName()+"\"";
logDetailed(BaseMessages.getString(PKG, "WebServices.Log.UsingRequestHeaderSOAPAction", soapAction));
vHttpMethod.setRequestHeader("SOAPAction", soapAction);
RequestEntity requestEntity = new ByteArrayRequestEntity(xml.toString().getBytes("UTF-8"), "UTF-8");
vHttpMethod.setRequestEntity(requestEntity);
long currentRequestTime = Const.nanoTime();
int responseCode = vHttpClient.executeMethod(vHostConfiguration, vHttpMethod);
if (responseCode == 200)
{
processRows(vHttpMethod.getResponseBodyAsStream(), rowData, rowMeta, wsdl.getWsdlTypes().isElementFormQualified(wsdl.getTargetNamespace()), vHttpMethod.getResponseCharSet());
}
else if (responseCode == 401)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0011.Authentication", vURLService));
}
else if (responseCode == 404)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0012.NotFound", vURLService));
}
else
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0001.ServerError", Integer.toString(responseCode), Const.NVL(new String(vHttpMethod.getResponseBody()), ""), vURLService) );
}
requestTime += Const.nanoTime() - currentRequestTime;
}
catch (URIException e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0002.InvalidURI", vURLService), e);
}
catch (UnsupportedEncodingException e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0003.UnsupportedEncoding", vURLService), e);
}
catch (HttpException e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0004.HttpException", vURLService), e);
}
catch (UnknownHostException e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0013.UnknownHost", vURLService), e);
}
catch (IOException e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0005.IOException", vURLService), e);
}
finally
{
vHttpMethod.releaseConnection();
}
}
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta = (WebServiceMeta) smi;
data = (WebServiceData) sdi;
data.indexMap = new Hashtable<String,Integer>();
data.realUrl=environmentSubstitute(meta.getUrl());
return super.init(smi, sdi);
}
public void dispose(StepMetaInterface smi, StepDataInterface sdi)
{
meta = (WebServiceMeta) smi;
data = (WebServiceData) sdi;
super.dispose(smi, sdi);
}
private String readStringFromInputStream(InputStream is, String encoding) throws KettleStepException {
try {
StringBuilder sb = new StringBuilder(Math.max(16, is.available()));
char[] tmp = new char[4096];
try {
InputStreamReader reader = new InputStreamReader(is, encoding!=null?encoding:"UTF-8");
for (int cnt; (cnt = reader.read(tmp)) > 0;) {
sb.append(tmp, 0, cnt);
}
} finally {
is.close();
}
return sb.toString();
}
catch(Exception e)
{
throw new KettleStepException("Unable to read web service response data from input stream", e);
}
}
private void processRows(InputStream anXml, Object[] rowData, RowMetaInterface rowMeta, boolean ignoreNamespacePrefix, String encoding) throws KettleException
{
// Just to make sure the old transformations keep working...
//
if (meta.isCompatible()) {
compatibleProcessRows(anXml, rowData, rowMeta, ignoreNamespacePrefix, encoding);
return;
}
// First we should get the complete string
// The problem is that the string can contain XML or any other format such as HTML saying the service is no longer available.
// We're talking about a WEB service here.
// As such, to keep the original parsing scheme, we first read the content.
// Then we create an input stream from the content again.
// It's elaborate, but that way we can report on the failure more correctly.
//
String response = readStringFromInputStream(anXml, encoding);
try {
// What is the expected response object for the operation?
//
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document doc = documentBuilder.parse(new InputSource(new StringReader(response)));
Node envelopeNode = doc.getFirstChild();
String nsPrefix = envelopeNode.getPrefix();
Node bodyNode = XMLHandler.getSubNode(envelopeNode, nsPrefix+":Body");
if (bodyNode==null) XMLHandler.getSubNode(envelopeNode, nsPrefix+":body"); // retry, just in case!
// Create a few objects to help do the layout of XML snippets we find along the way
//
Transformer transformer = null;
try {
Class<?> clazz = Class.forName("org.apache.xalan.processor.TransformerFactoryImpl");
TransformerFactory transformerFactory = (TransformerFactory)clazz.newInstance();
transformer = transformerFactory.newTransformer();
} catch (Throwable t) {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
transformer = transformerFactory.newTransformer();
}
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
if (log.isDetailed()) {
StringWriter bodyXML = new StringWriter();
transformer.transform(new DOMSource(bodyNode), new StreamResult(bodyXML));
logDetailed(bodyXML.toString());
}
// The node directly below the body is the response node
// It's apparently a hassle to get the name in a consistent way, but we know it's the first element node
//
Node responseNode = null;
NodeList nodeList = null;
if (!Const.isEmpty(meta.getRepeatingElementName())) {
// We have specified the repeating element name : use it
//
nodeList = ((Element)bodyNode).getElementsByTagName(meta.getRepeatingElementName());
} else {
if (meta.isReturningReplyAsString()) {
// Just return the body node as an XML string...
//
StringWriter nodeXML = new StringWriter();
transformer.transform(new DOMSource(bodyNode), new StreamResult(nodeXML));
String xml = response; // nodeXML.toString();
Object[] outputRowData = createNewRow(rowData);
int index = rowData==null ? 0 : getInputRowMeta().size();
outputRowData[index++] = xml;
putRow(data.outputRowMeta, outputRowData);
} else {
// We just grab the list of nodes from the children of the body
// Look for the first element node (first real child) and take that one.
// For that child-element, we consider all the children below
//
NodeList responseChildren = bodyNode.getChildNodes();
for (int i=0;i<responseChildren.getLength();i++) {
Node responseChild = responseChildren.item(i);
if (responseChild.getNodeType()==Node.ELEMENT_NODE) {
responseNode = responseChild;
break;
}
}
// See if we want the whole block returned as XML...
//
if (meta.getFieldsOut().size()==1) {
WebServiceField field = meta.getFieldsOut().get(0);
if (field.getWsName().equals(responseNode.getNodeName())) {
// Pass the data as XML
//
StringWriter nodeXML = new StringWriter();
transformer.transform(new DOMSource(responseNode), new StreamResult(nodeXML));
String xml = nodeXML.toString();
Object[] outputRowData = createNewRow(rowData);
int index = rowData==null ? 0 : getInputRowMeta().size();
outputRowData[index++] = xml;
putRow(data.outputRowMeta, outputRowData);
} else {
if (responseNode!=null) {
nodeList = responseNode.getChildNodes();
}
}
} else {
if (responseNode!=null) {
nodeList = responseNode.getChildNodes();
}
}
}
}
// The section below is just for repeating nodes. If we don't have those it ends here.
//
if (nodeList==null || meta.isReturningReplyAsString()) return;
// Allocate a result row in case we are dealing with a single result row
//
Object[] outputRowData = createNewRow(rowData);
// Now loop over the node list found above...
//
boolean singleRow = false;
int fieldsFound = 0;
for (int i=0;i<nodeList.getLength();i++) {
Node node = nodeList.item(i);
if (meta.isReturningReplyAsString()) {
// Just return the body node as an XML string...
//
StringWriter nodeXML = new StringWriter();
transformer.transform(new DOMSource(bodyNode), new StreamResult(nodeXML));
String xml = nodeXML.toString();
outputRowData = createNewRow(rowData);
int index = rowData==null ? 0 : getInputRowMeta().size();
outputRowData[index++] = xml;
putRow(data.outputRowMeta, outputRowData);
} else {
// This node either contains the data for a single row or it contains the first element of a single result response
// If we find the node name in out output result fields list, we are going to consider it a single row result.
//
WebServiceField field = meta.getFieldOutFromWsName(node.getNodeName(), ignoreNamespacePrefix);
if (field!=null) {
if (getNodeValue(outputRowData, node, field, transformer, true)) {
// We found a match.
// This means that we are dealing with a single row
// It also means that we need to update the output index pointer
//
singleRow=true;
fieldsFound++;
}
} else {
// If we didn't already get data in the previous block we'll assume multiple rows coming back.
//
if (!singleRow) {
// Sticking with the multiple-results scenario...
//
// TODO: remove next 2 lines, added for debug reasons.
//
if (log.isDetailed()) {
StringWriter nodeXML = new StringWriter();
transformer.transform(new DOMSource(node), new StreamResult(nodeXML));
logDetailed(BaseMessages.getString(PKG, "WebServices.Log.ResultRowDataFound", nodeXML.toString()));
}
// Allocate a new row...
//
outputRowData = createNewRow(rowData);
// Let's see what's in there...
//
NodeList childNodes = node.getChildNodes();
for (int j=0;j<childNodes.getLength();j++) {
Node childNode = childNodes.item(j);
field = meta.getFieldOutFromWsName(childNode.getNodeName(), ignoreNamespacePrefix);
if (field!=null) {
if (getNodeValue(outputRowData, childNode, field, transformer, false)) {
// We found a match.
// This means that we are dealing with a single row
// It also means that we need to update the output index pointer
//
fieldsFound++;
}
}
}
// Prevent empty rows from being sent out.
//
if (fieldsFound>0) {
// Send a row in a series of rows on its way.
//
putRow(data.outputRowMeta, outputRowData);
}
}
}
}
}
if (singleRow && fieldsFound>0) {
// Send the single row on its way.
//
putRow(data.outputRowMeta, outputRowData);
}
}
catch (Exception e)
{
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0010.OutputParsingError", response.toString()), e);
}
}
private Object[] createNewRow(Object[] inputRowData) {
return inputRowData==null ? RowDataUtil.allocateRowData(data.outputRowMeta.size()) : RowDataUtil.createResizedCopy(inputRowData, data.outputRowMeta.size());
}
private void compatibleProcessRows(InputStream anXml, Object[] rowData, RowMetaInterface rowMeta, boolean ignoreNamespacePrefix, String encoding) throws KettleException {
// First we should get the complete string
// The problem is that the string can contain XML or any other format such as HTML saying the service is no longer available.
// We're talking about a WEB service here.
// As such, to keep the original parsing scheme, we first read the content.
// Then we create an input stream from the content again.
// It's elaborate, but that way we can report on the failure more correctly.
//
String response = readStringFromInputStream(anXml, encoding);
// Create a new reader to feed into the XML Input Factory below...
//
StringReader stringReader = new StringReader(response.toString());
// TODO Very empirical : see if we can do something better here
try {
XMLInputFactory vFactory = XMLInputFactory.newInstance();
XMLStreamReader vReader = vFactory.createXMLStreamReader(stringReader);
Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
int outputIndex = 0;
boolean processing = false;
boolean oneValueRowProcessing = false;
for (int event = vReader.next(); vReader.hasNext(); event = vReader.next()) {
switch (event) {
case XMLStreamConstants.START_ELEMENT:
// Start new code
//START_ELEMENT= 1
//
if (log.isRowLevel())
logRowlevel("START_ELEMENT / " + vReader.getAttributeCount() + " / " + vReader.getNamespaceCount());
// If we start the xml element named like the return type,
// we start a new row
//
if (log.isRowLevel())
logRowlevel("vReader.getLocalName = " + vReader.getLocalName());
if (Const.isEmpty(meta.getOutFieldArgumentName())) {
//getOutFieldArgumentName() == null
if (oneValueRowProcessing) {
WebServiceField field = meta.getFieldOutFromWsName(vReader.getLocalName(), ignoreNamespacePrefix);
if (field != null) {
outputRowData[outputIndex++] = getValue(vReader.getElementText(), field);
putRow(data.outputRowMeta, outputRowData);
oneValueRowProcessing = false;
} else {
if (meta.getOutFieldContainerName().equals(vReader.getLocalName())) {
// meta.getOutFieldContainerName() = vReader.getLocalName()
if (log.isRowLevel())
logRowlevel("OutFieldContainerName = " + meta.getOutFieldContainerName());
oneValueRowProcessing = true;
}
}
}
} else {
//getOutFieldArgumentName() != null
if (log.isRowLevel())
logRowlevel("OutFieldArgumentName = " + meta.getOutFieldArgumentName());
if (meta.getOutFieldArgumentName().equals(vReader.getLocalName())) {
if (log.isRowLevel())
logRowlevel("vReader.getLocalName = " + vReader.getLocalName());
if (log.isRowLevel())
logRowlevel("OutFieldArgumentName = ");
if (processing) {
WebServiceField field = meta.getFieldOutFromWsName(vReader.getLocalName(), ignoreNamespacePrefix);
if (field != null) {
int index = data.outputRowMeta.indexOfValue(field.getName());
if (index >= 0) {
outputRowData[index] = getValue(vReader.getElementText(), field);
}
}
processing = false;
} else {
WebServiceField field = meta.getFieldOutFromWsName(vReader.getLocalName(), ignoreNamespacePrefix);
if (meta.getFieldsOut().size() == 1 && field != null) {
// This can be either a simple return element, or a complex type...
//
try {
if(meta.isPassingInputData()){
for(int i = 0; i<rowMeta.getValueMetaList().size(); i++){
ValueMetaInterface valueMeta = getInputRowMeta().getValueMeta( i );
outputRowData[outputIndex++] = valueMeta.cloneValueData(rowData[i]);
}
}
outputRowData[outputIndex++] = getValue(vReader.getElementText(), field);
putRow(data.outputRowMeta, outputRowData);
} catch (WstxParsingException e) {
throw new KettleStepException("Unable to get value for field [" + field.getName() + "]. Verify that this is not a complex data type by looking at the response XML.", e);
}
} else {
for (WebServiceField curField : meta.getFieldsOut()) {
if (!Const.isEmpty(curField.getName())) {
outputRowData[outputIndex++] = getValue(vReader.getElementText(), curField);
}
}
processing = true;
}
}
} else {
if (log.isRowLevel())
logRowlevel("vReader.getLocalName = " + vReader.getLocalName());
if (log.isRowLevel())
logRowlevel("OutFieldArgumentName = " + meta.getOutFieldArgumentName());
}
}
break;
case XMLStreamConstants.END_ELEMENT:
//END_ELEMENT= 2
if (log.isRowLevel())
logRowlevel("END_ELEMENT");
// If we end the xml element named as the return type, we
// finish a row
if ((meta.getOutFieldArgumentName() == null && meta.getOperationName().equals(vReader.getLocalName()))) {
oneValueRowProcessing = false;
} else if (meta.getOutFieldArgumentName() != null && meta.getOutFieldArgumentName().equals(vReader.getLocalName())) {
putRow(data.outputRowMeta, outputRowData);
processing = false;
}
break;
case XMLStreamConstants.PROCESSING_INSTRUCTION:
//PROCESSING_INSTRUCTION= 3
if (log.isRowLevel())
logRowlevel("PROCESSING_INSTRUCTION");
break;
case XMLStreamConstants.CHARACTERS:
//CHARACTERS= 4
if (log.isRowLevel())
logRowlevel("CHARACTERS");
break;
case XMLStreamConstants.COMMENT:
//COMMENT= 5
if (log.isRowLevel())
logRowlevel("COMMENT");
break;
case XMLStreamConstants.SPACE:
//PROCESSING_INSTRUCTION= 6
if (log.isRowLevel())
logRowlevel("PROCESSING_INSTRUCTION");
break;
case XMLStreamConstants.START_DOCUMENT:
//START_DOCUMENT= 7
if (log.isRowLevel())
logRowlevel("START_DOCUMENT");
if (log.isRowLevel())
logRowlevel(vReader.getText());
break;
case XMLStreamConstants.END_DOCUMENT:
//END_DOCUMENT= 8
if (log.isRowLevel())
logRowlevel("END_DOCUMENT");
break;
case XMLStreamConstants.ENTITY_REFERENCE:
//ENTITY_REFERENCE= 9
if (log.isRowLevel())
logRowlevel("ENTITY_REFERENCE");
break;
case XMLStreamConstants.ATTRIBUTE:
//ATTRIBUTE= 10
if (log.isRowLevel())
logRowlevel("ATTRIBUTE");
break;
case XMLStreamConstants.DTD:
//DTD= 11
if (log.isRowLevel())
logRowlevel("DTD");
break;
case XMLStreamConstants.CDATA:
//CDATA= 12
if (log.isRowLevel())
logRowlevel("CDATA");
break;
case XMLStreamConstants.NAMESPACE:
//NAMESPACE= 13
if (log.isRowLevel())
logRowlevel("NAMESPACE");
break;
case XMLStreamConstants.NOTATION_DECLARATION:
//NOTATION_DECLARATION= 14
if (log.isRowLevel())
logRowlevel("NOTATION_DECLARATION");
break;
case XMLStreamConstants.ENTITY_DECLARATION:
//ENTITY_DECLARATION= 15
if (log.isRowLevel())
logRowlevel("ENTITY_DECLARATION");
break;
default:
break;
}
}
} catch (Exception e) {
throw new KettleStepException(BaseMessages.getString(PKG, "WebServices.ERROR0010.OutputParsingError", response.toString()), e);
}
}
private boolean getNodeValue(Object[] outputRowData, Node node, WebServiceField field, Transformer transformer, boolean singleRowScenario) throws KettleException {
Integer outputIndex = data.indexMap.get(field.getWsName());
if (outputIndex==null) {
// Unknown field : don't look any further, it's not a field we want to use.
//
return false;
}
// if it's a text node or if we recognize the field type, we just grab the value
//
if (node.getNodeType()==Node.TEXT_NODE || !field.isComplex()) {
Object rowValue = null;
// See if this is a node we expect as a return value...
//
String textContent = node.getTextContent();
try {
rowValue = getValue(textContent, field);
outputRowData[outputIndex] = rowValue;
return true;
}
catch(Exception e) {
throw new KettleException("Unable to convert value ["+textContent+"] for field ["+field.getWsName()+"], type ["+field.getXsdType()+"]", e);
}
} else if (node.getNodeType()==Node.ELEMENT_NODE) {
// Perhaps we're dealing with complex data types.
// Perhaps we can just ship the XML snippet over to the next steps.
//
try {
StringWriter childNodeXML = new StringWriter();
transformer.transform(new DOMSource(node), new StreamResult(childNodeXML));
outputRowData[outputIndex] = childNodeXML.toString();
return true;
}
catch(Exception e) {
throw new KettleException("Unable to transform DOM node with name ["+node.getNodeName()+"] to XML", e);
}
}
// Nothing found, return false
//
return false;
}
private Object getValue(String vNodeValue, WebServiceField field) throws XMLStreamException, ParseException
{
if (vNodeValue == null)
{
return null;
}
else
{
if (XsdType.BOOLEAN.equals(field.getXsdType()))
{
return Boolean.valueOf(vNodeValue);
}
else if (XsdType.DATE.equals(field.getXsdType()))
{
try
{
return dateFormat.parse(vNodeValue);
}
catch (ParseException e)
{
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
return null;
}
}
else if (XsdType.TIME.equals(field.getXsdType()))
{
try
{
return timeFormat.parse(vNodeValue);
}
catch (ParseException e)
{
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
return null;
}
}
else if (XsdType.DATE_TIME.equals(field.getXsdType()))
{
try
{
return dateTimeFormat.parse(vNodeValue);
}
catch (ParseException e)
{
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
return null;
}
}
else if (XsdType.INTEGER.equals(field.getXsdType()) || XsdType.SHORT.equals(field.getXsdType()) || XsdType.INTEGER_DESC.equals(field.getXsdType()))
{
try
{
return Long.parseLong(vNodeValue);
}
catch (NumberFormatException e)
{
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
return null;
}
}
else if (XsdType.FLOAT.equals(field.getXsdType()) || XsdType.DOUBLE.equals(field.getXsdType()))
{
try
{
return Double.parseDouble(vNodeValue);
}
catch (NumberFormatException e)
{
logError(Const.getStackTracker(e));
setErrors(1);
stopAll();
return null;
}
}
else if (XsdType.BINARY.equals(field.getXsdType()))
{
return Base64.decodeBase64(vNodeValue.getBytes());
}
else if (XsdType.DECIMAL.equals(field.getXsdType()))
{
return new BigDecimal(vNodeValue);
}
else
{
return vNodeValue;
}
}
}
}
| |
package gov.usgs.cida.gcmrcservices.nude;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Range;
import gov.usgs.cida.nude.column.Column;
import gov.usgs.cida.nude.column.ColumnGrouping;
import gov.usgs.cida.nude.column.SimpleColumn;
import gov.usgs.cida.nude.resultset.inmemory.PeekingResultSet;
import gov.usgs.cida.nude.resultset.inmemory.TableRow;
/**
*
* @author dmsibley
*/
public class BedSedAverageResultSet extends PeekingResultSet {
private static final Logger log = LoggerFactory.getLogger(BedSedAverageResultSet.class);
protected static final BigDecimal cutoffMassInGrams = new BigDecimal("20.000");
protected static final Set<SampleSetRule> rules = new HashSet<>(Arrays.asList(new SampleSetRule[] {
new SampleSetRule(Range.singleton(1), Range.atLeast(1)),
new SampleSetRule(Range.closed(2, 3), Range.atLeast(2)),
new SampleSetRule(Range.atLeast(4), Range.atLeast(3))
}));
protected final ResultSet in;
protected final LinkedList<TableRow> queuedRows;
protected final Column timeColumn;
protected final Column sampleSetColumn;
protected final Column valueColumn;
protected final Column sampleMassColumn;
protected final Column errorColumn;
protected final Column conf95Column;
public BedSedAverageResultSet(ResultSet in, ColumnGrouping colGroup,
Column timeColumn, Column sampleSetColumn, Column valueColumn, Column sampleMassColumn, Column errorColumn, Column conf95Column) {
this.in = in;
this.columns = colGroup;
this.queuedRows = new LinkedList<>();
this.timeColumn = timeColumn;
this.sampleSetColumn = sampleSetColumn;
this.valueColumn = valueColumn;
this.sampleMassColumn = sampleMassColumn;
this.errorColumn = errorColumn;
this.conf95Column = conf95Column;
}
@Override
protected void addNextRow() throws SQLException {
LinkedList<TableRow> result = new LinkedList<>();
String sampleSet = null;
while(0 >= result.size() && in.next() && !in.isAfterLast()) {
TableRow now = TableRow.buildTableRow(in);
if (null != now.getValue(sampleSetColumn)) { // skip over null samplesets
queuedRows.add(now);
if (null == sampleSet) {
sampleSet = queuedRows.peek().getValue(sampleSetColumn);
}
if (null != sampleSet &&
(queuedRows.size() > 1 && !sampleSet.equals(queuedRows.peekLast().getValue(sampleSetColumn)))) {
TableRow averagedRow = flushNextSampleSet();
if (null != averagedRow) {
result.add(averagedRow);
}
sampleSet = null;
}
}
}
if(0 >= result.size()) {
while (in.isAfterLast() && queuedRows.size() > 0) {
//If we're at the end and we have queued rows, flush
TableRow averagedRow = flushNextSampleSet();
if (null != averagedRow) {
result.add(averagedRow);
}
}
}
this.nextRows.addAll(result);
}
protected TableRow flushNextSampleSet() {
TableRow result = null;
LinkedList<TableRow> groupedSampleSet = groupSampleSet(this.queuedRows, this.sampleSetColumn);
String sampleSet = groupedSampleSet.peekFirst().getValue(this.sampleSetColumn);
result = averageRow(groupedSampleSet, timeColumn, valueColumn, sampleMassColumn, errorColumn, conf95Column);
dropSampleSet(this.queuedRows, sampleSet);
return result;
}
public static LinkedList<TableRow> groupSampleSet(LinkedList<TableRow> queuedRows, Column sampleSetColumn) {
LinkedList<TableRow> result = new LinkedList<>();
if (null != queuedRows && !queuedRows.isEmpty()) {
String sampleSet = queuedRows.peekFirst().getValue(sampleSetColumn);
if (null == sampleSet) {
result.add(queuedRows.pollFirst());
} else {
while (!queuedRows.isEmpty() && sampleSet.equals(queuedRows.peekFirst().getValue(sampleSetColumn))) {
result.add(queuedRows.pollFirst());
}
}
}
return result;
}
public static TableRow averageRow(LinkedList<TableRow> groupedSampleSet, Column timeColumn,
Column valueColumn, Column sampleMassColumn, Column errorColumn, Column conf95Column) {
TableRow result = null;
LinkedList<TableRow> validSamples = new LinkedList<>();
for (TableRow sample : groupedSampleSet) {
try {
BigDecimal sampleMass = new BigDecimal(sample.getValue(sampleMassColumn));
if (null != sample.getValue(valueColumn) && cutoffMassInGrams.compareTo(sampleMass) <= 0) {
validSamples.add(sample);
}
} catch (Exception e) {
log.trace("can't work with sample mass");
}
}
int sampleSetSize = groupedSampleSet.size();
boolean isValid = false;
for (SampleSetRule rule : rules) {
if (rule.sampleSetSize.contains(sampleSetSize)
&& rule.minValidSamples.contains(validSamples.size())) {
isValid = true;
}
}
if (!isValid) {
validSamples.clear();
}
if (0 < validSamples.size()) {
ColumnGrouping inColGroup = validSamples.peek().getColumns();
ColumnGrouping addedColGroup = new ColumnGrouping(inColGroup.getPrimaryKey(), Arrays.asList(new Column[] {
inColGroup.getPrimaryKey(),
errorColumn
}));
ColumnGrouping outColGroup = ColumnGrouping.join(Arrays.asList(new ColumnGrouping[] {inColGroup, addedColGroup}));
ColumnGrouping cumulColGroup = new ColumnGrouping(timeColumn, Arrays.asList(new Column[] {
timeColumn,
valueColumn,
sampleMassColumn
}));
Map<Column, String> modMap = new HashMap<>();
String cuMeanColPrefix = "CUMEAN_";
String lastCuMeanColPrefix = "LASTCUMEAN_";
String stDevColPrefix = "STDEV_";
String lastQColPrefix = "LASTQ_";
String stErrColPrefix = "STERR_";
String conf95ColPrefix = "CONF95_";
int n = 0;
for (TableRow sample : validSamples) {
for (Column col : inColGroup) {
modMap.put(col, sample.getValue(col));
}
n++;
//Find the largest scale in the list of samples being averaged
int largestScale = 0;
for (Column col : cumulColGroup) {
//CUMULATIVE MOVING AVERAGE
//http://en.wikipedia.org/wiki/Moving_average#Cumulative_moving_average
Column lastCuMeanColumn = new SimpleColumn(lastCuMeanColPrefix + col.getName());
Column cuMeanColumn = new SimpleColumn(cuMeanColPrefix + col.getName());
String lastCumulativeValue = null;
String cumulativeValue = modMap.get(cuMeanColumn);
try {
BigDecimal x1 = new BigDecimal(sample.getValue(col));
largestScale = x1.scale() > largestScale ? x1.scale() : largestScale;
BigDecimal cmaN = BigDecimal.ZERO;
if (null != cumulativeValue) {
cmaN = new BigDecimal(cumulativeValue);
}
lastCumulativeValue = cmaN.toPlainString();
BigDecimal top = x1.subtract(cmaN);
BigDecimal cmaN1 = cmaN.add(top.divide(new BigDecimal(n), RoundingMode.HALF_EVEN));
cumulativeValue = cmaN1.toPlainString();
} catch (Exception e) {
log.trace("could not average value", e);
}
modMap.put(cuMeanColumn, cumulativeValue);
modMap.put(lastCuMeanColumn, lastCumulativeValue);
}
//Add 1 to the largest scale found so that error bars are precise enough
largestScale+=2;
//STANDARD DEVIATION
//http://en.wikipedia.org/wiki/Standard_deviation#Corrected_sample_standard_deviation
//http://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods
try {
Column lastCuMeanColumn = new SimpleColumn(lastCuMeanColPrefix + valueColumn.getName());
Column cuMeanColumn = new SimpleColumn(cuMeanColPrefix + valueColumn.getName());
Column lastQColumn = new SimpleColumn(lastQColPrefix + valueColumn.getName());
Column stdDevColumn = new SimpleColumn(stDevColPrefix + valueColumn.getName());
BigDecimal lastCuMeanValue = BigDecimal.ZERO;
if (null != modMap.get(lastCuMeanColumn)) {
lastCuMeanValue = new BigDecimal(modMap.get(lastCuMeanColumn));
}
BigDecimal cuMeanValue = null;
if (null != modMap.get(cuMeanColumn)) {
cuMeanValue = new BigDecimal(modMap.get(cuMeanColumn));
}
BigDecimal lastQValue = BigDecimal.ZERO;
if (null != modMap.get(lastQColumn)) {
lastQValue = new BigDecimal(modMap.get(lastQColumn));
}
BigDecimal stdDevValue = null;
if (null != modMap.get(stdDevColumn)) {
stdDevValue = new BigDecimal(modMap.get(stdDevColumn));
}
BigDecimal currVal = null;
if (null != sample.getValue(valueColumn)) {
currVal = new BigDecimal(sample.getValue(valueColumn));
}
BigDecimal qValue = null;
if (null != currVal) {
BigDecimal xAk1 = currVal.subtract(lastCuMeanValue);
BigDecimal xAk = currVal.subtract(cuMeanValue);
qValue = lastQValue.add(xAk1.multiply(xAk));
BigDecimal sampleVariance = qValue.divide(new BigDecimal(n - 1), largestScale, RoundingMode.HALF_EVEN).setScale(largestScale, BigDecimal.ROUND_HALF_EVEN);
stdDevValue = new BigDecimal(Math.sqrt(sampleVariance.doubleValue())).setScale(largestScale, BigDecimal.ROUND_HALF_EVEN);
} else {
log.error("BAD THINGS! We should never have a null value in this area!");
}
String qResult = null;
if (null != qValue) {
qResult = qValue.toPlainString();
}
modMap.put(lastQColumn, qResult);
String stDevResult = null;
if (null != stdDevValue) {
stDevResult = stdDevValue.toPlainString();
}
modMap.put(stdDevColumn, stDevResult);
} catch (Exception e) {
log.trace("could not calculate Standard Deviation", e);
}
//STANDARD ERROR
//http://en.wikipedia.org/wiki/Standard_error#Standard_error_of_the_mean
try {
Column stdDevColumn = new SimpleColumn(stDevColPrefix + valueColumn.getName());
Column stdErrColumn = new SimpleColumn(stErrColPrefix + valueColumn.getName());
BigDecimal stdDevValue = null;
if (null != modMap.get(stdDevColumn)) {
stdDevValue = new BigDecimal(modMap.get(stdDevColumn));
}
BigDecimal stdErrValue = stdDevValue.divide(new BigDecimal(Math.sqrt(n)), largestScale, RoundingMode.HALF_EVEN).setScale(largestScale, BigDecimal.ROUND_HALF_EVEN);
String stdErrResult = null;
if (null != stdErrValue) {
stdErrResult = stdErrValue.toPlainString();
}
modMap.put(stdErrColumn, stdErrResult);
} catch (Exception e) {
log.trace("could not calculate Standard Error", e);
}
try {
Column stdErrColumn = new SimpleColumn(stErrColPrefix + valueColumn.getName());
Column conf95Col = new SimpleColumn(conf95ColPrefix + valueColumn.getName());
BigDecimal stdErrValue = null;
if (null != modMap.get(stdErrColumn)) {
stdErrValue = new BigDecimal(modMap.get(stdErrColumn));
}
BigDecimal confidenceInterval = new BigDecimal(1.96);
BigDecimal conf95Value = confidenceInterval.multiply(stdErrValue).setScale(largestScale, BigDecimal.ROUND_HALF_EVEN);
String conf95Result = null;
if (null != conf95Value) {
conf95Result = conf95Value.toPlainString();
}
modMap.put(conf95Col, conf95Result);
} catch (Exception e) {
log.trace("could not calculate 95% confidence interval");
}
}
for (Column col : outColGroup) {
String val = modMap.get(new SimpleColumn(cuMeanColPrefix + col.getName()));
if (null == val) {
val = modMap.get(col);
}
modMap.put(col, val);
}
modMap.put(errorColumn, modMap.get(new SimpleColumn(stErrColPrefix + valueColumn.getName())));
modMap.put(conf95Column, modMap.get(new SimpleColumn(conf95ColPrefix + valueColumn.getName())));
result = new TableRow(outColGroup, modMap);
}
return result;
}
/**
* WARNING SIDE-EFFECTS
*/
private void dropSampleSet(LinkedList<TableRow> queuedRows, String sampleSet) {
if (null != queuedRows && null != sampleSet) {
while (queuedRows.size() > 0
&& sampleSet.equals(queuedRows.peekFirst().getValue(sampleSetColumn))) {
queuedRows.pollFirst();
}
}
}
@Override
public String getCursorName() throws SQLException {
return this.in.getCursorName();
}
public static final class SampleSetRule {
public final Range<Integer> sampleSetSize;
public final Range<Integer> minValidSamples;
public SampleSetRule(Range<Integer> sampleSetSize, Range<Integer> minValidSamples) {
this.sampleSetSize = sampleSetSize;
this.minValidSamples = minValidSamples;
}
@Override
public boolean equals(Object obj) {
if (obj == null) { return false; }
if (obj == this) { return true; }
if (obj instanceof SampleSetRule) {
SampleSetRule rhs = (SampleSetRule) obj;
return new EqualsBuilder()
.append(this.sampleSetSize, rhs.sampleSetSize)
.append(this.minValidSamples, rhs.minValidSamples)
.isEquals();
}
return false;
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(this.sampleSetSize)
.append(this.minValidSamples)
.toHashCode();
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Yahoo! Inc., Seiji Sogabe,
* Andrew Bayer
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.PluginWrapper;
import hudson.PluginManager;
import hudson.model.UpdateCenter.UpdateCenterJob;
import hudson.lifecycle.Lifecycle;
import hudson.util.IOUtils;
import hudson.util.TextFile;
import hudson.util.VersionNumber;
import static hudson.util.TimeUnit2.DAYS;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.jvnet.hudson.crypto.CertificateUtil;
import org.jvnet.hudson.crypto.SignatureOutputStream;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.TeeOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ByteArrayInputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.DigestOutputStream;
import java.security.Signature;
import java.security.cert.X509Certificate;
import java.security.cert.CertificateFactory;
import java.security.cert.TrustAnchor;
import com.trilead.ssh2.crypto.Base64;
import javax.servlet.ServletContext;
/**
* Source of the update center information, like "http://jenkins-ci.org/update-center.json"
*
* <p>
* Jenkins can have multiple {@link UpdateSite}s registered in the system, so that it can pick up plugins
* from different locations.
*
* @author Andrew Bayer
* @author Kohsuke Kawaguchi
* @since 1.333
*/
public class UpdateSite {
/**
* What's the time stamp of data file?
*/
private transient long dataTimestamp = -1;
/**
* When was the last time we asked a browser to check the data for us?
*
* <p>
* There's normally some delay between when we send HTML that includes the check code,
* until we get the data back, so this variable is used to avoid asking too many browseres
* all at once.
*/
private transient volatile long lastAttempt = -1;
/**
* ID string for this update source.
*/
private final String id;
/**
* Path to <tt>update-center.json</tt>, like <tt>http://jenkins-ci.org/update-center.json</tt>.
*/
private final String url;
public UpdateSite(String id, String url) {
this.id = id;
this.url = url;
}
/**
* When read back from XML, initialize them back to -1.
*/
private Object readResolve() {
dataTimestamp = lastAttempt = -1;
return this;
}
/**
* Get ID string.
*/
public String getId() {
return id;
}
public long getDataTimestamp() {
return dataTimestamp;
}
/**
* This is the endpoint that receives the update center data file from the browser.
*/
public void doPostBack(StaplerRequest req, StaplerResponse rsp) throws IOException, GeneralSecurityException {
dataTimestamp = System.currentTimeMillis();
String json = IOUtils.toString(req.getInputStream(),"UTF-8");
JSONObject o = JSONObject.fromObject(json);
int v = o.getInt("updateCenterVersion");
if(v !=1) {
LOGGER.warning("Unrecognized update center version: "+v);
return;
}
if (signatureCheck)
verifySignature(o);
LOGGER.info("Obtained the latest update center data file for UpdateSource "+ id);
getDataFile().write(json);
rsp.setContentType("text/plain"); // So browser won't try to parse response
}
/**
* Verifies the signature in the update center data file.
*/
private boolean verifySignature(JSONObject o) throws GeneralSecurityException, IOException {
JSONObject signature = o.getJSONObject("signature");
if (signature.isNullObject()) {
LOGGER.severe("No signature block found");
return false;
}
o.remove("signature");
List<X509Certificate> certs = new ArrayList<X509Certificate>();
{// load and verify certificates
CertificateFactory cf = CertificateFactory.getInstance("X509");
for (Object cert : o.getJSONArray("certificates")) {
X509Certificate c = (X509Certificate) cf.generateCertificate(new ByteArrayInputStream(Base64.decode(cert.toString().toCharArray())));
c.checkValidity();
certs.add(c);
}
// all default root CAs in JVM are trusted, plus certs bundled in Jenkins
Set<TrustAnchor> anchors = CertificateUtil.getDefaultRootCAs();
ServletContext context = Hudson.getInstance().servletContext;
for (String cert : (Set<String>) context.getResourcePaths("/WEB-INF/update-center-rootCAs")) {
if (cert.endsWith(".txt")) continue; // skip text files that are meant to be documentation
anchors.add(new TrustAnchor((X509Certificate)cf.generateCertificate(context.getResourceAsStream(cert)),null));
}
CertificateUtil.validatePath(certs);
}
// this is for computing a digest to check sanity
MessageDigest sha1 = MessageDigest.getInstance("SHA1");
DigestOutputStream dos = new DigestOutputStream(new NullOutputStream(),sha1);
// this is for computing a signature
Signature sig = Signature.getInstance("SHA1withRSA");
sig.initVerify(certs.get(0));
SignatureOutputStream sos = new SignatureOutputStream(sig);
o.writeCanonical(new OutputStreamWriter(new TeeOutputStream(dos,sos),"UTF-8"));
// did the digest match? this is not a part of the signature validation, but if we have a bug in the c14n
// (which is more likely than someone tampering with update center), we can tell
String computedDigest = new String(Base64.encode(sha1.digest()));
String providedDigest = signature.getString("digest");
if (!computedDigest.equalsIgnoreCase(providedDigest)) {
LOGGER.severe("Digest mismatch: "+computedDigest+" vs "+providedDigest);
return false;
}
if (!sig.verify(Base64.decode(signature.getString("signature").toCharArray()))) {
LOGGER.severe("Signature in the update center doesn't match with the certificate");
return false;
}
return true;
}
/**
* Returns true if it's time for us to check for new version.
*/
public boolean isDue() {
if(neverUpdate) return false;
if(dataTimestamp==-1)
dataTimestamp = getDataFile().file.lastModified();
long now = System.currentTimeMillis();
boolean due = now - dataTimestamp > DAY && now - lastAttempt > 15000;
if(due) lastAttempt = now;
return due;
}
/**
* Loads the update center data, if any.
*
* @return null if no data is available.
*/
public Data getData() {
TextFile df = getDataFile();
if(df.exists()) {
try {
return new Data(JSONObject.fromObject(df.read()));
} catch (IOException e) {
LOGGER.log(Level.SEVERE,"Failed to parse "+df,e);
df.delete(); // if we keep this file, it will cause repeated failures
return null;
}
} else {
return null;
}
}
/**
* Returns a list of plugins that should be shown in the "available" tab.
* These are "all plugins - installed plugins".
*/
public List<Plugin> getAvailables() {
List<Plugin> r = new ArrayList<Plugin>();
Data data = getData();
if(data==null) return Collections.emptyList();
for (Plugin p : data.plugins.values()) {
if(p.getInstalled()==null)
r.add(p);
}
return r;
}
/**
* Gets the information about a specific plugin.
*
* @param artifactId
* The short name of the plugin. Corresponds to {@link PluginWrapper#getShortName()}.
*
* @return
* null if no such information is found.
*/
public Plugin getPlugin(String artifactId) {
Data dt = getData();
if(dt==null) return null;
return dt.plugins.get(artifactId);
}
/**
* Returns an "always up" server for Internet connectivity testing, or null if we are going to skip the test.
*/
public String getConnectionCheckUrl() {
Data dt = getData();
if(dt==null) return "http://www.google.com/";
return dt.connectionCheckUrl;
}
/**
* This is where we store the update center data.
*/
private TextFile getDataFile() {
return new TextFile(new File(Hudson.getInstance().getRootDir(),
"updates/" + getId()+".json"));
}
/**
* Returns the list of plugins that are updates to currently installed ones.
*
* @return
* can be empty but never null.
*/
public List<Plugin> getUpdates() {
Data data = getData();
if(data==null) return Collections.emptyList(); // fail to determine
List<Plugin> r = new ArrayList<Plugin>();
for (PluginWrapper pw : Hudson.getInstance().getPluginManager().getPlugins()) {
Plugin p = pw.getUpdateInfo();
if(p!=null) r.add(p);
}
return r;
}
/**
* Does any of the plugin has updates?
*/
public boolean hasUpdates() {
Data data = getData();
if(data==null) return false;
for (PluginWrapper pw : Hudson.getInstance().getPluginManager().getPlugins()) {
if(!pw.isBundled() && pw.getUpdateInfo()!=null)
// do not advertize updates to bundled plugins, since we generally want users to get them
// as a part of jenkins.war updates. This also avoids unnecessary pinning of plugins.
return true;
}
return false;
}
/**
* Exposed to get rid of hardcoding of the URL that serves up update-center.json
* in Javascript.
*/
public String getUrl() {
return url;
}
/**
* Is this the legacy default update center site?
*/
public boolean isLegacyDefault() {
return id.equals("default") && url.startsWith("http://hudson-ci.org/") || url.startsWith("http://updates.hudson-labs.org/");
}
/**
* In-memory representation of the update center data.
*/
public final class Data {
/**
* The {@link UpdateSite} ID.
*/
public final String sourceId;
/**
* The latest jenkins.war.
*/
public final Entry core;
/**
* Plugins in the repository, keyed by their artifact IDs.
*/
public final Map<String,Plugin> plugins = new TreeMap<String,Plugin>(String.CASE_INSENSITIVE_ORDER);
/**
* If this is non-null, Jenkins is going to check the connectivity to this URL to make sure
* the network connection is up. Null to skip the check.
*/
public final String connectionCheckUrl;
Data(JSONObject o) {
this.sourceId = (String)o.get("id");
if (sourceId.equals("default")) {
core = new Entry(sourceId, o.getJSONObject("core"));
}
else {
core = null;
}
for(Map.Entry<String,JSONObject> e : (Set<Map.Entry<String,JSONObject>>)o.getJSONObject("plugins").entrySet()) {
plugins.put(e.getKey(),new Plugin(sourceId, e.getValue()));
}
connectionCheckUrl = (String)o.get("connectionCheckUrl");
}
/**
* Is there a new version of the core?
*/
public boolean hasCoreUpdates() {
return core != null && core.isNewerThan(Hudson.VERSION);
}
/**
* Do we support upgrade?
*/
public boolean canUpgrade() {
return Lifecycle.get().canRewriteHudsonWar();
}
}
public static class Entry {
/**
* {@link UpdateSite} ID.
*/
public final String sourceId;
/**
* Artifact ID.
*/
public final String name;
/**
* The version.
*/
public final String version;
/**
* Download URL.
*/
public final String url;
public Entry(String sourceId, JSONObject o) {
this.sourceId = sourceId;
this.name = o.getString("name");
this.version = o.getString("version");
this.url = o.getString("url");
}
/**
* Checks if the specified "current version" is older than the version of this entry.
*
* @param currentVersion
* The string that represents the version number to be compared.
* @return
* true if the version listed in this entry is newer.
* false otherwise, including the situation where the strings couldn't be parsed as version numbers.
*/
public boolean isNewerThan(String currentVersion) {
try {
return new VersionNumber(currentVersion).compareTo(new VersionNumber(version)) < 0;
} catch (IllegalArgumentException e) {
// couldn't parse as the version number.
return false;
}
}
}
public final class Plugin extends Entry {
/**
* Optional URL to the Wiki page that discusses this plugin.
*/
public final String wiki;
/**
* Human readable title of the plugin, taken from Wiki page.
* Can be null.
*
* <p>
* beware of XSS vulnerability since this data comes from Wiki
*/
public final String title;
/**
* Optional excerpt string.
*/
public final String excerpt;
/**
* Optional version # from which this plugin release is configuration-compatible.
*/
public final String compatibleSinceVersion;
/**
* Version of Jenkins core this plugin was compiled against.
*/
public final String requiredCore;
/**
* Categories for grouping plugins, taken from labels assigned to wiki page.
* Can be null.
*/
public final String[] categories;
/**
* Dependencies of this plugin.
*/
public final Map<String,String> dependencies = new HashMap<String,String>();
@DataBoundConstructor
public Plugin(String sourceId, JSONObject o) {
super(sourceId, o);
this.wiki = get(o,"wiki");
this.title = get(o,"title");
this.excerpt = get(o,"excerpt");
this.compatibleSinceVersion = get(o,"compatibleSinceVersion");
this.requiredCore = get(o,"requiredCore");
this.categories = o.has("labels") ? (String[])o.getJSONArray("labels").toArray(new String[0]) : null;
for(Object jo : o.getJSONArray("dependencies")) {
JSONObject depObj = (JSONObject) jo;
// Make sure there's a name attribute, that that name isn't maven-plugin - we ignore that one -
// and that the optional value isn't true.
if (get(depObj,"name")!=null
&& !get(depObj,"name").equals("maven-plugin")
&& get(depObj,"optional").equals("false")) {
dependencies.put(get(depObj,"name"), get(depObj,"version"));
}
}
}
private String get(JSONObject o, String prop) {
if(o.has(prop))
return o.getString(prop);
else
return null;
}
public String getDisplayName() {
if(title!=null) return title;
return name;
}
/**
* If some version of this plugin is currently installed, return {@link PluginWrapper}.
* Otherwise null.
*/
public PluginWrapper getInstalled() {
PluginManager pm = Hudson.getInstance().getPluginManager();
return pm.getPlugin(name);
}
/**
* If the plugin is already installed, and the new version of the plugin has a "compatibleSinceVersion"
* value (i.e., it's only directly compatible with that version or later), this will check to
* see if the installed version is older than the compatible-since version. If it is older, it'll return false.
* If it's not older, or it's not installed, or it's installed but there's no compatibleSinceVersion
* specified, it'll return true.
*/
public boolean isCompatibleWithInstalledVersion() {
PluginWrapper installedVersion = getInstalled();
if (installedVersion != null) {
if (compatibleSinceVersion != null) {
if (new VersionNumber(installedVersion.getVersion())
.isOlderThan(new VersionNumber(compatibleSinceVersion))) {
return false;
}
}
}
return true;
}
/**
* Returns a list of dependent plugins which need to be installed or upgraded for this plugin to work.
*/
public List<Plugin> getNeededDependencies() {
List<Plugin> deps = new ArrayList<Plugin>();
for(Map.Entry<String,String> e : dependencies.entrySet()) {
Plugin depPlugin = Hudson.getInstance().getUpdateCenter().getPlugin(e.getKey());
VersionNumber requiredVersion = new VersionNumber(e.getValue());
// Is the plugin installed already? If not, add it.
PluginWrapper current = depPlugin.getInstalled();
if (current ==null) {
deps.add(depPlugin);
}
// If the dependency plugin is installed, is the version we depend on newer than
// what's installed? If so, upgrade.
else if (current.isOlderThan(requiredVersion)) {
deps.add(depPlugin);
}
}
return deps;
}
public boolean isForNewerHudson() {
try {
return requiredCore!=null && new VersionNumber(requiredCore).isNewerThan(
new VersionNumber(Hudson.VERSION.replaceFirst("SHOT *\\(private.*\\)", "SHOT")));
} catch (NumberFormatException nfe) {
return true; // If unable to parse version
}
}
/**
* @deprecated as of 1.326
* Use {@link #deploy()}.
*/
public void install() {
deploy();
}
/**
* Schedules the installation of this plugin.
*
* <p>
* This is mainly intended to be called from the UI. The actual installation work happens
* asynchronously in another thread.
*/
public Future<UpdateCenterJob> deploy() {
Hudson.getInstance().checkPermission(Hudson.ADMINISTER);
UpdateCenter uc = Hudson.getInstance().getUpdateCenter();
for (Plugin dep : getNeededDependencies()) {
LOGGER.log(Level.WARNING, "Adding dependent install of " + dep.name + " for plugin " + name);
dep.deploy();
}
return uc.addJob(uc.new InstallationJob(this, UpdateSite.this, Hudson.getAuthentication()));
}
/**
* Schedules the downgrade of this plugin.
*/
public Future<UpdateCenterJob> deployBackup() {
Hudson.getInstance().checkPermission(Hudson.ADMINISTER);
UpdateCenter uc = Hudson.getInstance().getUpdateCenter();
return uc.addJob(uc.new PluginDowngradeJob(this, UpdateSite.this, Hudson.getAuthentication()));
}
/**
* Making the installation web bound.
*/
public void doInstall(StaplerResponse rsp) throws IOException {
deploy();
rsp.sendRedirect2("../..");
}
/**
* Performs the downgrade of the plugin.
*/
public void doDowngrade(StaplerResponse rsp) throws IOException {
deployBackup();
rsp.sendRedirect2("../..");
}
}
private static final long DAY = DAYS.toMillis(1);
private static final Logger LOGGER = Logger.getLogger(UpdateSite.class.getName());
// The name uses UpdateCenter for compatibility reason.
public static boolean neverUpdate = Boolean.getBoolean(UpdateCenter.class.getName()+".never");
/**
* Off by default until we know this is reasonably working.
*/
public static boolean signatureCheck = Boolean.getBoolean(UpdateCenter.class.getName()+".signatureCheck");
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.recovery;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardNotRecoveringException;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.index.translog.Translog;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.function.LongConsumer;
/**
* Represents a recovery where the current node is the target node of the recovery. To track recoveries in a central place, instances of
* this class are created through {@link RecoveriesCollection}.
*/
public class RecoveryTarget extends AbstractRefCounted implements RecoveryTargetHandler {
private final Logger logger;
private static final AtomicLong idGenerator = new AtomicLong();
private static final String RECOVERY_PREFIX = "recovery.";
private final ShardId shardId;
private final long recoveryId;
private final IndexShard indexShard;
private final DiscoveryNode sourceNode;
private final String tempFilePrefix;
private final Store store;
private final PeerRecoveryTargetService.RecoveryListener listener;
private final LongConsumer ensureClusterStateVersionCallback;
private final AtomicBoolean finished = new AtomicBoolean();
private final ConcurrentMap<String, IndexOutput> openIndexOutputs = ConcurrentCollections.newConcurrentMap();
private final CancellableThreads cancellableThreads;
// last time this status was accessed
private volatile long lastAccessTime = System.nanoTime();
// latch that can be used to blockingly wait for RecoveryTarget to be closed
private final CountDownLatch closedLatch = new CountDownLatch(1);
private final Map<String, String> tempFileNames = ConcurrentCollections.newConcurrentMap();
/**
* Creates a new recovery target object that represents a recovery to the provided shard.
*
* @param indexShard local shard where we want to recover to
* @param sourceNode source node of the recovery where we recover from
* @param listener called when recovery is completed/failed
* @param ensureClusterStateVersionCallback callback to ensure that the current node is at least on a cluster state with the provided
* version; necessary for primary relocation so that new primary knows about all other ongoing
* replica recoveries when replicating documents (see {@link RecoverySourceHandler})
*/
public RecoveryTarget(final IndexShard indexShard,
final DiscoveryNode sourceNode,
final PeerRecoveryTargetService.RecoveryListener listener,
final LongConsumer ensureClusterStateVersionCallback) {
super("recovery_status");
this.cancellableThreads = new CancellableThreads();
this.recoveryId = idGenerator.incrementAndGet();
this.listener = listener;
this.logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
this.indexShard = indexShard;
this.sourceNode = sourceNode;
this.shardId = indexShard.shardId();
this.tempFilePrefix = RECOVERY_PREFIX + UUIDs.base64UUID() + ".";
this.store = indexShard.store();
this.ensureClusterStateVersionCallback = ensureClusterStateVersionCallback;
// make sure the store is not released until we are done.
store.incRef();
indexShard.recoveryStats().incCurrentAsTarget();
}
/**
* Returns a fresh recovery target to retry recovery from the same source node onto the same shard and using the same listener.
*
* @return a copy of this recovery target
*/
public RecoveryTarget retryCopy() {
return new RecoveryTarget(indexShard, sourceNode, listener, ensureClusterStateVersionCallback);
}
public long recoveryId() {
return recoveryId;
}
public ShardId shardId() {
return shardId;
}
public IndexShard indexShard() {
ensureRefCount();
return indexShard;
}
public DiscoveryNode sourceNode() {
return this.sourceNode;
}
public RecoveryState state() {
return indexShard.recoveryState();
}
public CancellableThreads cancellableThreads() {
return cancellableThreads;
}
/** return the last time this RecoveryStatus was used (based on System.nanoTime() */
public long lastAccessTime() {
return lastAccessTime;
}
/** sets the lasAccessTime flag to now */
public void setLastAccessTime() {
lastAccessTime = System.nanoTime();
}
public Store store() {
ensureRefCount();
return store;
}
public RecoveryState.Stage stage() {
return state().getStage();
}
/** renames all temporary files to their true name, potentially overriding existing files */
public void renameAllTempFiles() throws IOException {
ensureRefCount();
store.renameTempFilesSafe(tempFileNames);
}
/**
* Closes the current recovery target and waits up to a certain timeout for resources to be freed.
* Returns true if resetting the recovery was successful, false if the recovery target is already cancelled / failed or marked as done.
*/
boolean resetRecovery(CancellableThreads newTargetCancellableThreads) throws IOException {
if (finished.compareAndSet(false, true)) {
try {
logger.debug("reset of recovery with shard {} and id [{}]", shardId, recoveryId);
} finally {
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now.
decRef();
}
try {
newTargetCancellableThreads.execute(closedLatch::await);
} catch (CancellableThreads.ExecutionCancelledException e) {
logger.trace("new recovery target cancelled for shard {} while waiting on old recovery target with id [{}] to close",
shardId, recoveryId);
return false;
}
RecoveryState.Stage stage = indexShard.recoveryState().getStage();
if (indexShard.recoveryState().getPrimary() && (stage == RecoveryState.Stage.FINALIZE || stage == RecoveryState.Stage.DONE)) {
// once primary relocation has moved past the finalization step, the relocation source can be moved to RELOCATED state
// and start indexing as primary into the target shard (see TransportReplicationAction). Resetting the target shard in this
// state could mean that indexing is halted until the recovery retry attempt is completed and could also destroy existing
// documents indexed and acknowledged before the reset.
assert stage != RecoveryState.Stage.DONE : "recovery should not have completed when it's being reset";
throw new IllegalStateException("cannot reset recovery as previous attempt made it past finalization step");
}
indexShard.performRecoveryRestart();
return true;
}
return false;
}
/**
* cancel the recovery. calling this method will clean temporary files and release the store
* unless this object is in use (in which case it will be cleaned once all ongoing users call
* {@link #decRef()}
* <p>
* if {@link #cancellableThreads()} was used, the threads will be interrupted.
*/
public void cancel(String reason) {
if (finished.compareAndSet(false, true)) {
try {
logger.debug("recovery canceled (reason: [{}])", reason);
cancellableThreads.cancel(reason);
} finally {
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now
decRef();
}
}
}
/**
* fail the recovery and call listener
*
* @param e exception that encapsulating the failure
* @param sendShardFailure indicates whether to notify the master of the shard failure
*/
public void fail(RecoveryFailedException e, boolean sendShardFailure) {
if (finished.compareAndSet(false, true)) {
try {
notifyListener(e, sendShardFailure);
} finally {
try {
cancellableThreads.cancel("failed recovery [" + ExceptionsHelper.stackTrace(e) + "]");
} finally {
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now
decRef();
}
}
}
}
public void notifyListener(RecoveryFailedException e, boolean sendShardFailure) {
listener.onRecoveryFailure(state(), e, sendShardFailure);
}
/** mark the current recovery as done */
public void markAsDone() {
if (finished.compareAndSet(false, true)) {
assert tempFileNames.isEmpty() : "not all temporary files are renamed";
try {
// this might still throw an exception ie. if the shard is CLOSED due to some other event.
// it's safer to decrement the reference in a try finally here.
indexShard.postRecovery("peer recovery done");
} finally {
// release the initial reference. recovery files will be cleaned as soon as ref count goes to zero, potentially now
decRef();
}
listener.onRecoveryDone(state());
}
}
/** Get a temporary name for the provided file name. */
public String getTempNameForFile(String origFile) {
return tempFilePrefix + origFile;
}
public IndexOutput getOpenIndexOutput(String key) {
ensureRefCount();
return openIndexOutputs.get(key);
}
/** remove and {@link org.apache.lucene.store.IndexOutput} for a given file. It is the caller's responsibility to close it */
public IndexOutput removeOpenIndexOutputs(String name) {
ensureRefCount();
return openIndexOutputs.remove(name);
}
/**
* Creates an {@link org.apache.lucene.store.IndexOutput} for the given file name. Note that the
* IndexOutput actually point at a temporary file.
* <p>
* Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput
* at a later stage
*/
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException {
ensureRefCount();
String tempFileName = getTempNameForFile(fileName);
if (tempFileNames.containsKey(tempFileName)) {
throw new IllegalStateException("output for file [" + fileName + "] has already been created");
}
// add first, before it's created
tempFileNames.put(tempFileName, fileName);
IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT);
openIndexOutputs.put(fileName, indexOutput);
return indexOutput;
}
@Override
protected void closeInternal() {
try {
// clean open index outputs
Iterator<Entry<String, IndexOutput>> iterator = openIndexOutputs.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, IndexOutput> entry = iterator.next();
logger.trace("closing IndexOutput file [{}]", entry.getValue());
try {
entry.getValue().close();
} catch (Exception e) {
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage("error while closing recovery output [{}]", entry.getValue()), e);
}
iterator.remove();
}
// trash temporary files
for (String file : tempFileNames.keySet()) {
logger.trace("cleaning temporary file [{}]", file);
store.deleteQuiet(file);
}
} finally {
// free store. increment happens in constructor
store.decRef();
indexShard.recoveryStats().decCurrentAsTarget();
closedLatch.countDown();
}
}
@Override
public String toString() {
return shardId + " [" + recoveryId + "]";
}
private void ensureRefCount() {
if (refCount() <= 0) {
throw new ElasticsearchException("RecoveryStatus is used but it's refcount is 0. Probably a mismatch between incRef/decRef " +
"calls");
}
}
/*** Implementation of {@link RecoveryTargetHandler } */
@Override
public void prepareForTranslogOperations(int totalTranslogOps) throws IOException {
state().getTranslog().totalOperations(totalTranslogOps);
indexShard().skipTranslogRecovery();
}
@Override
public void finalizeRecovery(final long globalCheckpoint) {
indexShard().updateGlobalCheckpointOnReplica(globalCheckpoint);
final IndexShard indexShard = indexShard();
indexShard.finalizeRecovery();
}
@Override
public void ensureClusterStateVersion(long clusterStateVersion) {
ensureClusterStateVersionCallback.accept(clusterStateVersion);
}
@Override
public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws MapperException, IOException {
final RecoveryState.Translog translog = state().getTranslog();
translog.totalOperations(totalTranslogOps);
assert indexShard().recoveryState() == state();
if (indexShard().state() != IndexShardState.RECOVERING) {
throw new IndexShardNotRecoveringException(shardId, indexShard().state());
}
// first convert all translog operations to engine operations to check for mapping updates
List<Engine.Operation> engineOps = operations.stream().map(
op -> {
Engine.Operation engineOp = indexShard().convertToEngineOp(op, Engine.Operation.Origin.PEER_RECOVERY);
if (engineOp instanceof Engine.Index && ((Engine.Index) engineOp).parsedDoc().dynamicMappingsUpdate() != null) {
throw new MapperException("mapping updates are not allowed (type: [" + engineOp.type() + "], id: [" +
((Engine.Index) engineOp).id() + "])");
}
return engineOp;
}
).collect(Collectors.toList());
// actually apply engine operations
for (Engine.Operation engineOp : engineOps) {
indexShard().applyOperation(engineOp);
translog.incrementRecoveredOperations();
}
indexShard().sync();
return indexShard().getLocalCheckpoint();
}
@Override
public void receiveFileInfo(List<String> phase1FileNames,
List<Long> phase1FileSizes,
List<String> phase1ExistingFileNames,
List<Long> phase1ExistingFileSizes,
int totalTranslogOps) {
final RecoveryState.Index index = state().getIndex();
for (int i = 0; i < phase1ExistingFileNames.size(); i++) {
index.addFileDetail(phase1ExistingFileNames.get(i), phase1ExistingFileSizes.get(i), true);
}
for (int i = 0; i < phase1FileNames.size(); i++) {
index.addFileDetail(phase1FileNames.get(i), phase1FileSizes.get(i), false);
}
state().getTranslog().totalOperations(totalTranslogOps);
state().getTranslog().totalOperationsOnStart(totalTranslogOps);
}
@Override
public void cleanFiles(int totalTranslogOps, Store.MetadataSnapshot sourceMetaData) throws IOException {
state().getTranslog().totalOperations(totalTranslogOps);
// first, we go and move files that were created with the recovery id suffix to
// the actual names, its ok if we have a corrupted index here, since we have replicas
// to recover from in case of a full cluster shutdown just when this code executes...
renameAllTempFiles();
final Store store = store();
try {
store.cleanupAndVerify("recovery CleanFilesRequestHandler", sourceMetaData);
} catch (CorruptIndexException | IndexFormatTooNewException | IndexFormatTooOldException ex) {
// this is a fatal exception at this stage.
// this means we transferred files from the remote that have not be checksummed and they are
// broken. We have to clean up this shard entirely, remove all files and bubble it up to the
// source shard since this index might be broken there as well? The Source can handle this and checks
// its content on disk if possible.
try {
try {
store.removeCorruptionMarker();
} finally {
Lucene.cleanLuceneIndex(store.directory()); // clean up and delete all files
}
} catch (Exception e) {
logger.debug("Failed to clean lucene index", e);
ex.addSuppressed(e);
}
RecoveryFailedException rfe = new RecoveryFailedException(state(), "failed to clean after recovery", ex);
fail(rfe, true);
throw rfe;
} catch (Exception ex) {
RecoveryFailedException rfe = new RecoveryFailedException(state(), "failed to clean after recovery", ex);
fail(rfe, true);
throw rfe;
}
}
@Override
public void writeFileChunk(StoreFileMetaData fileMetaData, long position, BytesReference content,
boolean lastChunk, int totalTranslogOps) throws IOException {
final Store store = store();
final String name = fileMetaData.name();
state().getTranslog().totalOperations(totalTranslogOps);
final RecoveryState.Index indexState = state().getIndex();
IndexOutput indexOutput;
if (position == 0) {
indexOutput = openAndPutIndexOutput(name, fileMetaData, store);
} else {
indexOutput = getOpenIndexOutput(name);
}
BytesRefIterator iterator = content.iterator();
BytesRef scratch;
while((scratch = iterator.next()) != null) { // we iterate over all pages - this is a 0-copy for all core impls
indexOutput.writeBytes(scratch.bytes, scratch.offset, scratch.length);
}
indexState.addRecoveredBytesToFile(name, content.length());
if (indexOutput.getFilePointer() >= fileMetaData.length() || lastChunk) {
try {
Store.verify(indexOutput);
} finally {
// we are done
indexOutput.close();
}
final String temporaryFileName = getTempNameForFile(name);
assert Arrays.asList(store.directory().listAll()).contains(temporaryFileName) :
"expected: [" + temporaryFileName + "] in " + Arrays.toString(store.directory().listAll());
store.directory().sync(Collections.singleton(temporaryFileName));
IndexOutput remove = removeOpenIndexOutputs(name);
assert remove == null || remove == indexOutput; // remove maybe null if we got finished
}
}
Path translogLocation() {
return indexShard().shardPath().resolveTranslog();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.Before;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
public class BigArraysTests extends ElasticsearchTestCase {
public static BigArrays randombigArrays() {
final PageCacheRecycler recycler = randomBoolean() ? null : new MockPageCacheRecycler(ImmutableSettings.EMPTY, new ThreadPool("BigArraysTests"));
return new MockBigArrays(ImmutableSettings.EMPTY, recycler);
}
private BigArrays bigArrays;
@Before
public void init() {
bigArrays = randombigArrays();
}
public void testByteArrayGrowth() {
final int totalLen = randomIntBetween(1, 4000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
ByteArray array = bigArrays.newByteArray(startLen, randomBoolean());
byte[] ref = new byte[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomByte();
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertEquals(ref[i], array.get(i));
}
array.close();
}
public void testIntArrayGrowth() {
final int totalLen = randomIntBetween(1, 1000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
IntArray array = bigArrays.newIntArray(startLen, randomBoolean());
int[] ref = new int[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomInt();
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertEquals(ref[i], array.get(i));
}
array.close();
}
public void testLongArrayGrowth() {
final int totalLen = randomIntBetween(1, 1000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
LongArray array = bigArrays.newLongArray(startLen, randomBoolean());
long[] ref = new long[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomLong();
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertEquals(ref[i], array.get(i));
}
array.close();
}
public void testFloatArrayGrowth() {
final int totalLen = randomIntBetween(1, 1000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
FloatArray array = bigArrays.newFloatArray(startLen, randomBoolean());
float[] ref = new float[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomFloat();
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertEquals(ref[i], array.get(i), 0.001d);
}
array.close();
}
public void testDoubleArrayGrowth() {
final int totalLen = randomIntBetween(1, 1000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
DoubleArray array = bigArrays.newDoubleArray(startLen, randomBoolean());
double[] ref = new double[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomDouble();
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertEquals(ref[i], array.get(i), 0.001d);
}
array.close();
}
public void testObjectArrayGrowth() {
final int totalLen = randomIntBetween(1, 1000000);
final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
ObjectArray<Object> array = bigArrays.newObjectArray(startLen);
final Object[] pool = new Object[100];
for (int i = 0; i < pool.length; ++i) {
pool[i] = new Object();
}
Object[] ref = new Object[totalLen];
for (int i = 0; i < totalLen; ++i) {
ref[i] = randomFrom(pool);
array = bigArrays.grow(array, i + 1);
array.set(i, ref[i]);
}
for (int i = 0; i < totalLen; ++i) {
assertSame(ref[i], array.get(i));
}
array.close();
}
public void testByteArrayFill() {
final int len = randomIntBetween(1, 100000);
final int fromIndex = randomIntBetween(0, len - 1);
final int toIndex = randomBoolean()
? Math.min(fromIndex + randomInt(100), len) // single page
: randomIntBetween(fromIndex, len); // likely multiple pages
final ByteArray array2 = bigArrays.newByteArray(len, randomBoolean());
final byte[] array1 = new byte[len];
for (int i = 0; i < len; ++i) {
array1[i] = randomByte();
array2.set(i, array1[i]);
}
final byte rand = randomByte();
Arrays.fill(array1, fromIndex, toIndex, rand);
array2.fill(fromIndex, toIndex, rand);
for (int i = 0; i < len; ++i) {
assertEquals(array1[i], array2.get(i), 0.001d);
}
array2.close();
}
public void testFloatArrayFill() {
final int len = randomIntBetween(1, 100000);
final int fromIndex = randomIntBetween(0, len - 1);
final int toIndex = randomBoolean()
? Math.min(fromIndex + randomInt(100), len) // single page
: randomIntBetween(fromIndex, len); // likely multiple pages
final FloatArray array2 = bigArrays.newFloatArray(len, randomBoolean());
final float[] array1 = new float[len];
for (int i = 0; i < len; ++i) {
array1[i] = randomFloat();
array2.set(i, array1[i]);
}
final float rand = randomFloat();
Arrays.fill(array1, fromIndex, toIndex, rand);
array2.fill(fromIndex, toIndex, rand);
for (int i = 0; i < len; ++i) {
assertEquals(array1[i], array2.get(i), 0.001d);
}
array2.close();
}
public void testDoubleArrayFill() {
final int len = randomIntBetween(1, 100000);
final int fromIndex = randomIntBetween(0, len - 1);
final int toIndex = randomBoolean()
? Math.min(fromIndex + randomInt(100), len) // single page
: randomIntBetween(fromIndex, len); // likely multiple pages
final DoubleArray array2 = bigArrays.newDoubleArray(len, randomBoolean());
final double[] array1 = new double[len];
for (int i = 0; i < len; ++i) {
array1[i] = randomDouble();
array2.set(i, array1[i]);
}
final double rand = randomDouble();
Arrays.fill(array1, fromIndex, toIndex, rand);
array2.fill(fromIndex, toIndex, rand);
for (int i = 0; i < len; ++i) {
assertEquals(array1[i], array2.get(i), 0.001d);
}
array2.close();
}
public void testLongArrayFill() {
final int len = randomIntBetween(1, 100000);
final int fromIndex = randomIntBetween(0, len - 1);
final int toIndex = randomBoolean()
? Math.min(fromIndex + randomInt(100), len) // single page
: randomIntBetween(fromIndex, len); // likely multiple pages
final LongArray array2 = bigArrays.newLongArray(len, randomBoolean());
final long[] array1 = new long[len];
for (int i = 0; i < len; ++i) {
array1[i] = randomLong();
array2.set(i, array1[i]);
}
final long rand = randomLong();
Arrays.fill(array1, fromIndex, toIndex, rand);
array2.fill(fromIndex, toIndex, rand);
for (int i = 0; i < len; ++i) {
assertEquals(array1[i], array2.get(i));
}
array2.close();
}
public void testByteArrayBulkGet() {
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
getRandom().nextBytes(array1);
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
for (int i = 0; i < array1.length; ++i) {
array2.set(i, array1[i]);
}
final BytesRef ref = new BytesRef();
for (int i = 0; i < 1000; ++i) {
final int offset = randomInt(array1.length - 1);
final int len = randomInt(Math.min(randomBoolean() ? 10 : Integer.MAX_VALUE, array1.length - offset));
array2.get(offset, len, ref);
assertEquals(new BytesRef(array1, offset, len), ref);
}
array2.close();
}
public void testByteArrayBulkSet() {
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
getRandom().nextBytes(array1);
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
for (int i = 0; i < array1.length; ) {
final int len = Math.min(array1.length - i, randomBoolean() ? randomInt(10) : randomInt(3 * BigArrays.BYTE_PAGE_SIZE));
array2.set(i, array1, i, len);
i += len;
}
for (int i = 0; i < array1.length; ++i) {
assertEquals(array1[i], array2.get(i));
}
array2.close();
}
public void testByteArrayEquals() {
final ByteArray empty1 = byteArrayWithBytes(BytesRef.EMPTY_BYTES);
final ByteArray empty2 = byteArrayWithBytes(BytesRef.EMPTY_BYTES);
// identity = equality
assertTrue(bigArrays.equals(empty1, empty1));
// equality: both empty
assertTrue(bigArrays.equals(empty1, empty2));
empty1.close();
empty2.close();
// not equal: contents differ
final ByteArray a1 = byteArrayWithBytes(new byte[]{0});
final ByteArray a2 = byteArrayWithBytes(new byte[]{1});
assertFalse(bigArrays.equals(a1, a2));
a1.close();
a2.close();
// not equal: contents differ
final ByteArray a3 = byteArrayWithBytes(new byte[]{1,2,3});
final ByteArray a4 = byteArrayWithBytes(new byte[]{1,1,3});
assertFalse(bigArrays.equals(a3, a4));
a3.close();
a4.close();
// not equal: contents differ
final ByteArray a5 = byteArrayWithBytes(new byte[]{1,2,3});
final ByteArray a6 = byteArrayWithBytes(new byte[]{1,2,4});
assertFalse(bigArrays.equals(a5, a6));
a5.close();
a6.close();
}
public void testByteArrayHashCode() {
// null arg has hashCode 0
assertEquals(0, bigArrays.hashCode(null));
// empty array should have equal hash
final int emptyHash = Arrays.hashCode(BytesRef.EMPTY_BYTES);
final ByteArray emptyByteArray = byteArrayWithBytes(BytesRef.EMPTY_BYTES);
final int emptyByteArrayHash = bigArrays.hashCode(emptyByteArray);
assertEquals(emptyHash, emptyByteArrayHash);
emptyByteArray.close();
// FUN FACT: Arrays.hashCode() and BytesReference.bytesHashCode() are inconsistent for empty byte[]
// final int emptyHash3 = new BytesArray(BytesRef.EMPTY_BYTES).hashCode();
// assertEquals(emptyHash1, emptyHash3); -> fail (1 vs. 0)
// large arrays should be different
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
getRandom().nextBytes(array1);
final int array1Hash = Arrays.hashCode(array1);
final ByteArray array2 = byteArrayWithBytes(array1);
final int array2Hash = bigArrays.hashCode(array2);
assertEquals(array1Hash, array2Hash);
array2.close();
}
private ByteArray byteArrayWithBytes(byte[] bytes) {
ByteArray bytearray = bigArrays.newByteArray(bytes.length);
for (int i = 0; i < bytes.length; ++i) {
bytearray.set(i, bytes[i]);
}
return bytearray;
}
public void testByteAccounting() throws Exception {
for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) {
BigArrays bigArrays = new BigArrays(ImmutableSettings.builder().put(BigArrays.MAX_SIZE_IN_BYTES_SETTING, Long.MAX_VALUE).build(), null);
Method create = BigArrays.class.getMethod("new" + type + "Array", long.class);
final int size = scaledRandomIntBetween(5, 1 << 16);
BigArray array = (BigArray) create.invoke(bigArrays, size);
assertEquals(array.ramBytesUsed(), bigArrays.sizeInBytes());
Method resize = BigArrays.class.getMethod("resize", array.getClass().getInterfaces()[0], long.class);
int newSize = scaledRandomIntBetween(5, 1 << 16);
array = (BigArray) resize.invoke(bigArrays, array, newSize);
assertEquals(array.ramBytesUsed(), bigArrays.sizeInBytes());
array.close();
assertEquals(0, bigArrays.sizeInBytes());
}
}
public void testMaxSizeExceededOnNew() throws Exception {
final int size = scaledRandomIntBetween(5, 1 << 22);
for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) {
BigArrays bigArrays = new BigArrays(ImmutableSettings.builder().put(BigArrays.MAX_SIZE_IN_BYTES_SETTING, randomIntBetween(1, size)).build(), null);
Method create = BigArrays.class.getMethod("new" + type + "Array", long.class);
try {
create.invoke(bigArrays, size);
fail("expected an exception on " + create);
} catch (InvocationTargetException e) {
assertTrue(e.getCause() instanceof ElasticsearchIllegalStateException);
}
assertEquals(0, bigArrays.sizeInBytes());
}
}
public void testMaxSizeExceededOnResize() throws Exception {
for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) {
final long maxSize = randomIntBetween(1 << 10, 1 << 22);
BigArrays bigArrays = new BigArrays(ImmutableSettings.builder().put(BigArrays.MAX_SIZE_IN_BYTES_SETTING, maxSize).build(), null);
Method create = BigArrays.class.getMethod("new" + type + "Array", long.class);
final int size = scaledRandomIntBetween(1, 20);
BigArray array = (BigArray) create.invoke(bigArrays, size);
Method resize = BigArrays.class.getMethod("resize", array.getClass().getInterfaces()[0], long.class);
while (true) {
long newSize = array.size() * 2;
assertEquals(array.ramBytesUsed(), bigArrays.sizeInBytes());
try {
array = (BigArray) resize.invoke(bigArrays, array, newSize);
} catch (InvocationTargetException e) {
assertTrue(e.getCause() instanceof ElasticsearchIllegalStateException);
break;
}
}
assertEquals(array.ramBytesUsed(), bigArrays.sizeInBytes());
array.close();
assertEquals(0, bigArrays.sizeInBytes());
}
}
}
| |
/*
* TouchImageView.java
* By: Michael Ortiz
* Updated By: Patrick Lackemacher
* Updated By: Babay88
* Updated By: @ipsilondev
* Updated By: hank-cp
* Updated By: singpolyma
* -------------------
* Extends Android ImageView to include pinch zooming, panning, fling and double tap zoom.
*/
package com.njlabs.showjava.utils;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.ImageView;
import android.widget.OverScroller;
import android.widget.Scroller;
@SuppressWarnings({"unused", "JavaDoc", "SuspiciousNameCombination"})
public class TouchImageView extends ImageView {
private static final String DEBUG = "DEBUG";
//
// SuperMin and SuperMax multipliers. Determine how much the image can be
// zoomed below or above the zoom boundaries, before animating back to the
// min/max zoom boundary.
//
private static final float SUPER_MIN_MULTIPLIER = .75f;
private static final float SUPER_MAX_MULTIPLIER = 1.25f;
//
// Scale of image ranges from minScale to maxScale, where minScale == 1
// when the image is stretched to fit view.
//
private float normalizedScale;
//
// Matrix applied to image. MSCALE_X and MSCALE_Y should always be equal.
// MTRANS_X and MTRANS_Y are the other values used. prevMatrix is the matrix
// saved prior to the screen rotating.
//
private Matrix matrix, prevMatrix;
private State state;
private float minScale;
private float maxScale;
private float superMinScale;
private float superMaxScale;
private float[] m;
private Context context;
private Fling fling;
private ScaleType mScaleType;
private boolean imageRenderedAtLeastOnce;
private boolean onDrawReady;
private ZoomVariables delayedZoomVariables;
//
// Size of view and previous view size (ie before rotation)
//
private int viewWidth, viewHeight, prevViewWidth, prevViewHeight;
//
// Size of image when it is stretched to fit view. Before and After rotation.
//
private float matchViewWidth, matchViewHeight, prevMatchViewWidth, prevMatchViewHeight;
private ScaleGestureDetector mScaleDetector;
private GestureDetector mGestureDetector;
private GestureDetector.OnDoubleTapListener doubleTapListener = null;
private OnTouchListener userTouchListener = null;
private OnTouchImageViewListener touchImageViewListener = null;
public TouchImageView(Context context) {
super(context);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs) {
super(context, attrs);
sharedConstructing(context);
}
public TouchImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
sharedConstructing(context);
}
private void sharedConstructing(Context context) {
super.setClickable(true);
this.context = context;
mScaleDetector = new ScaleGestureDetector(context, new ScaleListener());
mGestureDetector = new GestureDetector(context, new GestureListener());
matrix = new Matrix();
prevMatrix = new Matrix();
m = new float[9];
normalizedScale = 1;
if (mScaleType == null) {
mScaleType = ScaleType.FIT_CENTER;
}
minScale = 1;
maxScale = 3;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
setImageMatrix(matrix);
setScaleType(ScaleType.MATRIX);
setState(State.NONE);
onDrawReady = false;
super.setOnTouchListener(new PrivateOnTouchListener());
}
@Override
public void setOnTouchListener(View.OnTouchListener l) {
userTouchListener = l;
}
public void setOnTouchImageViewListener(OnTouchImageViewListener l) {
touchImageViewListener = l;
}
public void setOnDoubleTapListener(GestureDetector.OnDoubleTapListener l) {
doubleTapListener = l;
}
@Override
public void setImageResource(int resId) {
super.setImageResource(resId);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageBitmap(Bitmap bm) {
super.setImageBitmap(bm);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageDrawable(Drawable drawable) {
super.setImageDrawable(drawable);
savePreviousImageValues();
fitImageToView();
}
@Override
public void setImageURI(Uri uri) {
super.setImageURI(uri);
savePreviousImageValues();
fitImageToView();
}
@Override
public ScaleType getScaleType() {
return mScaleType;
}
@Override
public void setScaleType(ScaleType type) {
if (type == ScaleType.FIT_START || type == ScaleType.FIT_END) {
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
if (type == ScaleType.MATRIX) {
super.setScaleType(ScaleType.MATRIX);
} else {
mScaleType = type;
if (onDrawReady) {
//
// If the image is already rendered, scaleType has been called programmatically
// and the TouchImageView should be updated with the new scaleType.
//
setZoom(this);
}
}
}
/**
* Returns false if image is in initial, unzoomed state. False, otherwise.
*
* @return true if image is zoomed
*/
public boolean isZoomed() {
return normalizedScale != 1;
}
/**
* Return a Rect representing the zoomed image.
*
* @return rect representing zoomed image
*/
public RectF getZoomedRect() {
if (mScaleType == ScaleType.FIT_XY) {
throw new UnsupportedOperationException("getZoomedRect() not supported with FIT_XY");
}
PointF topLeft = transformCoordTouchToBitmap(0, 0, true);
PointF bottomRight = transformCoordTouchToBitmap(viewWidth, viewHeight, true);
float w = getDrawable().getIntrinsicWidth();
float h = getDrawable().getIntrinsicHeight();
return new RectF(topLeft.x / w, topLeft.y / h, bottomRight.x / w, bottomRight.y / h);
}
/**
* Save the current matrix and view dimensions
* in the prevMatrix and prevView variables.
*/
private void savePreviousImageValues() {
if (matrix != null && viewHeight != 0 && viewWidth != 0) {
matrix.getValues(m);
prevMatrix.setValues(m);
prevMatchViewHeight = matchViewHeight;
prevMatchViewWidth = matchViewWidth;
prevViewHeight = viewHeight;
prevViewWidth = viewWidth;
}
}
@Override
public Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
bundle.putParcelable("instanceState", super.onSaveInstanceState());
bundle.putFloat("saveScale", normalizedScale);
bundle.putFloat("matchViewHeight", matchViewHeight);
bundle.putFloat("matchViewWidth", matchViewWidth);
bundle.putInt("viewWidth", viewWidth);
bundle.putInt("viewHeight", viewHeight);
matrix.getValues(m);
bundle.putFloatArray("matrix", m);
bundle.putBoolean("imageRendered", imageRenderedAtLeastOnce);
return bundle;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
normalizedScale = bundle.getFloat("saveScale");
m = bundle.getFloatArray("matrix");
prevMatrix.setValues(m);
prevMatchViewHeight = bundle.getFloat("matchViewHeight");
prevMatchViewWidth = bundle.getFloat("matchViewWidth");
prevViewHeight = bundle.getInt("viewHeight");
prevViewWidth = bundle.getInt("viewWidth");
imageRenderedAtLeastOnce = bundle.getBoolean("imageRendered");
super.onRestoreInstanceState(bundle.getParcelable("instanceState"));
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected void onDraw(Canvas canvas) {
onDrawReady = true;
imageRenderedAtLeastOnce = true;
if (delayedZoomVariables != null) {
setZoom(delayedZoomVariables.scale, delayedZoomVariables.focusX, delayedZoomVariables.focusY, delayedZoomVariables.scaleType);
delayedZoomVariables = null;
}
super.onDraw(canvas);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
savePreviousImageValues();
}
/**
* Get the max zoom multiplier.
*
* @return max zoom multiplier.
*/
public float getMaxZoom() {
return maxScale;
}
/**
* Set the max zoom multiplier. Default value: 3.
*
* @param max max zoom multiplier.
*/
public void setMaxZoom(float max) {
maxScale = max;
superMaxScale = SUPER_MAX_MULTIPLIER * maxScale;
}
/**
* Get the min zoom multiplier.
*
* @return min zoom multiplier.
*/
public float getMinZoom() {
return minScale;
}
/**
* Set the min zoom multiplier. Default value: 1.
*
* @param min min zoom multiplier.
*/
public void setMinZoom(float min) {
minScale = min;
superMinScale = SUPER_MIN_MULTIPLIER * minScale;
}
/**
* Get the current zoom. This is the zoom relative to the initial
* scale, not the original resource.
*
* @return current zoom multiplier.
*/
public float getCurrentZoom() {
return normalizedScale;
}
/**
* Reset zoom and translation to initial state.
*/
public void resetZoom() {
normalizedScale = 1;
fitImageToView();
}
/**
* Set zoom to the specified scale. Image will be centered by default.
*
* @param scale
*/
public void setZoom(float scale) {
setZoom(scale, 0.5f, 0.5f);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
*
* @param scale
* @param focusX
* @param focusY
*/
public void setZoom(float scale, float focusX, float focusY) {
setZoom(scale, focusX, focusY, mScaleType);
}
/**
* Set zoom to the specified scale. Image will be centered around the point
* (focusX, focusY). These floats range from 0 to 1 and denote the focus point
* as a fraction from the left and top of the view. For example, the top left
* corner of the image would be (0, 0). And the bottom right corner would be (1, 1).
*
* @param scale
* @param focusX
* @param focusY
* @param scaleType
*/
public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) {
//
// setZoom can be called before the image is on the screen, but at this point,
// image and view sizes have not yet been calculated in onMeasure. Thus, we should
// delay calling setZoom until the view has been measured.
//
if (!onDrawReady) {
delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType);
return;
}
if (scaleType != mScaleType) {
setScaleType(scaleType);
}
resetZoom();
scaleImage(scale, viewWidth / 2, viewHeight / 2, true);
matrix.getValues(m);
m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f));
m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f));
matrix.setValues(m);
fixTrans();
setImageMatrix(matrix);
}
/**
* Set zoom parameters equal to another TouchImageView. Including scale, position,
* and ScaleType.
*
* @param img TouchImageView
*/
public void setZoom(TouchImageView img) {
PointF center = img.getScrollPosition();
setZoom(img.getCurrentZoom(), center.x, center.y, img.getScaleType());
}
/**
* Return the point at the center of the zoomed image. The PointF coordinates range
* in value between 0 and 1 and the focus point is denoted as a fraction from the left
* and top of the view. For example, the top left corner of the image would be (0, 0).
* And the bottom right corner would be (1, 1).
*
* @return PointF representing the scroll position of the zoomed image.
*/
public PointF getScrollPosition() {
Drawable drawable = getDrawable();
if (drawable == null) {
return null;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
PointF point = transformCoordTouchToBitmap(viewWidth / 2, viewHeight / 2, true);
point.x /= drawableWidth;
point.y /= drawableHeight;
return point;
}
/**
* Set the focus point of the zoomed image. The focus points are denoted as a fraction from the
* left and top of the view. The focus points can range in value between 0 and 1.
*
* @param focusX
* @param focusY
*/
public void setScrollPosition(float focusX, float focusY) {
setZoom(normalizedScale, focusX, focusY);
}
/**
* Performs boundary checking and fixes the image matrix if it
* is out of bounds.
*/
private void fixTrans() {
matrix.getValues(m);
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float fixTransX = getFixTrans(transX, viewWidth, getImageWidth());
float fixTransY = getFixTrans(transY, viewHeight, getImageHeight());
if (fixTransX != 0 || fixTransY != 0) {
matrix.postTranslate(fixTransX, fixTransY);
}
}
/**
* When transitioning from zooming from focus to zoom from center (or vice versa)
* the image can become unaligned within the view. This is apparent when zooming
* quickly. When the content size is less than the view size, the content will often
* be centered incorrectly within the view. fixScaleTrans first calls fixTrans() and
* then makes sure the image is centered correctly within the view.
*/
private void fixScaleTrans() {
fixTrans();
matrix.getValues(m);
if (getImageWidth() < viewWidth) {
m[Matrix.MTRANS_X] = (viewWidth - getImageWidth()) / 2;
}
if (getImageHeight() < viewHeight) {
m[Matrix.MTRANS_Y] = (viewHeight - getImageHeight()) / 2;
}
matrix.setValues(m);
}
private float getFixTrans(float trans, float viewSize, float contentSize) {
float minTrans, maxTrans;
if (contentSize <= viewSize) {
minTrans = 0;
maxTrans = viewSize - contentSize;
} else {
minTrans = viewSize - contentSize;
maxTrans = 0;
}
if (trans < minTrans)
return -trans + minTrans;
if (trans > maxTrans)
return -trans + maxTrans;
return 0;
}
private float getFixDragTrans(float delta, float viewSize, float contentSize) {
if (contentSize <= viewSize) {
return 0;
}
return delta;
}
private float getImageWidth() {
return matchViewWidth * normalizedScale;
}
private float getImageHeight() {
return matchViewHeight * normalizedScale;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
setMeasuredDimension(0, 0);
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
viewWidth = setViewSize(widthMode, widthSize, drawableWidth);
viewHeight = setViewSize(heightMode, heightSize, drawableHeight);
//
// Set view dimensions
//
setMeasuredDimension(viewWidth, viewHeight);
//
// Fit content within view
//
fitImageToView();
}
/**
* If the normalizedScale is equal to 1, then the image is made to fit the screen. Otherwise,
* it is made to fit the screen according to the dimensions of the previous image matrix. This
* allows the image to maintain its zoom after rotation.
*/
private void fitImageToView() {
Drawable drawable = getDrawable();
if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) {
return;
}
if (matrix == null || prevMatrix == null) {
return;
}
int drawableWidth = drawable.getIntrinsicWidth();
int drawableHeight = drawable.getIntrinsicHeight();
//
// Scale image for view
//
float scaleX = (float) viewWidth / drawableWidth;
float scaleY = (float) viewHeight / drawableHeight;
switch (mScaleType) {
case CENTER:
scaleX = scaleY = 1;
break;
case CENTER_CROP:
scaleX = scaleY = Math.max(scaleX, scaleY);
break;
case CENTER_INSIDE:
scaleX = scaleY = Math.min(1, Math.min(scaleX, scaleY));
case FIT_CENTER:
scaleX = scaleY = Math.min(scaleX, scaleY);
break;
case FIT_XY:
break;
default:
//
// FIT_START and FIT_END not supported
//
throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END");
}
//
// Center the image
//
float redundantXSpace = viewWidth - (scaleX * drawableWidth);
float redundantYSpace = viewHeight - (scaleY * drawableHeight);
matchViewWidth = viewWidth - redundantXSpace;
matchViewHeight = viewHeight - redundantYSpace;
if (!isZoomed() && !imageRenderedAtLeastOnce) {
//
// Stretch and center image to fit view
//
matrix.setScale(scaleX, scaleY);
matrix.postTranslate(redundantXSpace / 2, redundantYSpace / 2);
normalizedScale = 1;
} else {
//
// These values should never be 0 or we will set viewWidth and viewHeight
// to NaN in translateMatrixAfterRotate. To avoid this, call savePreviousImageValues
// to set them equal to the current values.
//
if (prevMatchViewWidth == 0 || prevMatchViewHeight == 0) {
savePreviousImageValues();
}
prevMatrix.getValues(m);
//
// Rescale Matrix after rotation
//
m[Matrix.MSCALE_X] = matchViewWidth / drawableWidth * normalizedScale;
m[Matrix.MSCALE_Y] = matchViewHeight / drawableHeight * normalizedScale;
//
// TransX and TransY from previous matrix
//
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
//
// Width
//
float prevActualWidth = prevMatchViewWidth * normalizedScale;
float actualWidth = getImageWidth();
translateMatrixAfterRotate(Matrix.MTRANS_X, transX, prevActualWidth, actualWidth, prevViewWidth, viewWidth, drawableWidth);
//
// Height
//
float prevActualHeight = prevMatchViewHeight * normalizedScale;
float actualHeight = getImageHeight();
translateMatrixAfterRotate(Matrix.MTRANS_Y, transY, prevActualHeight, actualHeight, prevViewHeight, viewHeight, drawableHeight);
//
// Set the matrix to the adjusted scale and translate values.
//
matrix.setValues(m);
}
fixTrans();
setImageMatrix(matrix);
}
/**
* Set view dimensions based on layout params
*
* @param mode
* @param size
* @param drawableWidth
* @return
*/
private int setViewSize(int mode, int size, int drawableWidth) {
int viewSize;
switch (mode) {
case MeasureSpec.EXACTLY:
viewSize = size;
break;
case MeasureSpec.AT_MOST:
viewSize = Math.min(drawableWidth, size);
break;
case MeasureSpec.UNSPECIFIED:
viewSize = drawableWidth;
break;
default:
viewSize = size;
break;
}
return viewSize;
}
/**
* After rotating, the matrix needs to be translated. This function finds the area of image
* which was previously centered and adjusts translations so that is again the center, post-rotation.
*
* @param axis Matrix.MTRANS_X or Matrix.MTRANS_Y
* @param trans the value of trans in that axis before the rotation
* @param prevImageSize the width/height of the image before the rotation
* @param imageSize width/height of the image after rotation
* @param prevViewSize width/height of view before rotation
* @param viewSize width/height of view after rotation
* @param drawableSize width/height of drawable
*/
private void translateMatrixAfterRotate(int axis, float trans, float prevImageSize, float imageSize, int prevViewSize, int viewSize, int drawableSize) {
if (imageSize < viewSize) {
//
// The width/height of image is less than the view's width/height. Center it.
//
m[axis] = (viewSize - (drawableSize * m[Matrix.MSCALE_X])) * 0.5f;
} else if (trans > 0) {
//
// The image is larger than the view, but was not before rotation. Center it.
//
m[axis] = -((imageSize - viewSize) * 0.5f);
} else {
//
// Find the area of the image which was previously centered in the view. Determine its distance
// from the left/top side of the view as a fraction of the entire image's width/height. Use that percentage
// to calculate the trans in the new view width/height.
//
float percentage = (Math.abs(trans) + (0.5f * prevViewSize)) / prevImageSize;
m[axis] = -((percentage * imageSize) - (viewSize * 0.5f));
}
}
private void setState(State state) {
this.state = state;
}
public boolean canScrollHorizontallyFroyo(int direction) {
return canScrollHorizontally(direction);
}
@Override
public boolean canScrollHorizontally(int direction) {
matrix.getValues(m);
float x = m[Matrix.MTRANS_X];
if (getImageWidth() < viewWidth) {
return false;
} else if (x >= -1 && direction < 0) {
return false;
} else if (Math.abs(x) + viewWidth + 1 >= getImageWidth() && direction > 0) {
return false;
}
return true;
}
private void scaleImage(double deltaScale, float focusX, float focusY, boolean stretchImageToSuper) {
float lowerScale, upperScale;
if (stretchImageToSuper) {
lowerScale = superMinScale;
upperScale = superMaxScale;
} else {
lowerScale = minScale;
upperScale = maxScale;
}
float origScale = normalizedScale;
normalizedScale *= deltaScale;
if (normalizedScale > upperScale) {
normalizedScale = upperScale;
deltaScale = upperScale / origScale;
} else if (normalizedScale < lowerScale) {
normalizedScale = lowerScale;
deltaScale = lowerScale / origScale;
}
matrix.postScale((float) deltaScale, (float) deltaScale, focusX, focusY);
fixScaleTrans();
}
/**
* This function will transform the coordinates in the touch event to the coordinate
* system of the drawable that the imageview contain
*
* @param x x-coordinate of touch event
* @param y y-coordinate of touch event
* @param clipToBitmap Touch event may occur within view, but outside image content. True, to clip return value
* to the bounds of the bitmap size.
* @return Coordinates of the point touched, in the coordinate system of the original drawable.
*/
private PointF transformCoordTouchToBitmap(float x, float y, boolean clipToBitmap) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float transX = m[Matrix.MTRANS_X];
float transY = m[Matrix.MTRANS_Y];
float finalX = ((x - transX) * origW) / getImageWidth();
float finalY = ((y - transY) * origH) / getImageHeight();
if (clipToBitmap) {
finalX = Math.min(Math.max(finalX, 0), origW);
finalY = Math.min(Math.max(finalY, 0), origH);
}
return new PointF(finalX, finalY);
}
/**
* Inverse of transformCoordTouchToBitmap. This function will transform the coordinates in the
* drawable's coordinate system to the view's coordinate system.
*
* @param bx x-coordinate in original bitmap coordinate system
* @param by y-coordinate in original bitmap coordinate system
* @return Coordinates of the point in the view's coordinate system.
*/
private PointF transformCoordBitmapToTouch(float bx, float by) {
matrix.getValues(m);
float origW = getDrawable().getIntrinsicWidth();
float origH = getDrawable().getIntrinsicHeight();
float px = bx / origW;
float py = by / origH;
float finalX = m[Matrix.MTRANS_X] + getImageWidth() * px;
float finalY = m[Matrix.MTRANS_Y] + getImageHeight() * py;
return new PointF(finalX, finalY);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void compatPostOnAnimation(Runnable runnable) {
if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) {
postOnAnimation(runnable);
} else {
postDelayed(runnable, 1000 / 60);
}
}
private void printMatrixInfo() {
float[] n = new float[9];
matrix.getValues(n);
Log.d(DEBUG, "Scale: " + n[Matrix.MSCALE_X] + " TransX: " + n[Matrix.MTRANS_X] + " TransY: " + n[Matrix.MTRANS_Y]);
}
private enum State {NONE, DRAG, ZOOM, FLING, ANIMATE_ZOOM}
public interface OnTouchImageViewListener {
void onMove();
}
/**
* Gesture Listener detects a single click or long click and passes that on
* to the view's listener.
*
* @author Ortiz
*/
private class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed(MotionEvent e) {
if (doubleTapListener != null) {
return doubleTapListener.onSingleTapConfirmed(e);
}
return performClick();
}
@Override
public void onLongPress(MotionEvent e) {
performLongClick();
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (fling != null) {
//
// If a previous fling is still active, it should be cancelled so that two flings
// are not run simultaenously.
//
fling.cancelFling();
}
fling = new Fling((int) velocityX, (int) velocityY);
compatPostOnAnimation(fling);
return super.onFling(e1, e2, velocityX, velocityY);
}
@Override
public boolean onDoubleTap(MotionEvent e) {
boolean consumed = false;
if (doubleTapListener != null) {
consumed = doubleTapListener.onDoubleTap(e);
}
if (state == State.NONE) {
float targetZoom = (normalizedScale == minScale) ? maxScale : minScale;
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, e.getX(), e.getY(), false);
compatPostOnAnimation(doubleTap);
consumed = true;
}
return consumed;
}
@Override
public boolean onDoubleTapEvent(MotionEvent e) {
return doubleTapListener != null && doubleTapListener.onDoubleTapEvent(e);
}
}
/**
* Responsible for all touch events. Handles the heavy lifting of drag and also sends
* touch events to Scale Detector and Gesture Detector.
*
* @author Ortiz
*/
private class PrivateOnTouchListener implements OnTouchListener {
//
// Remember last point position for dragging
//
private PointF last = new PointF();
@Override
public boolean onTouch(View v, MotionEvent event) {
mScaleDetector.onTouchEvent(event);
mGestureDetector.onTouchEvent(event);
PointF curr = new PointF(event.getX(), event.getY());
if (state == State.NONE || state == State.DRAG || state == State.FLING) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
last.set(curr);
if (fling != null)
fling.cancelFling();
setState(State.DRAG);
break;
case MotionEvent.ACTION_MOVE:
if (state == State.DRAG) {
float deltaX = curr.x - last.x;
float deltaY = curr.y - last.y;
float fixTransX = getFixDragTrans(deltaX, viewWidth, getImageWidth());
float fixTransY = getFixDragTrans(deltaY, viewHeight, getImageHeight());
matrix.postTranslate(fixTransX, fixTransY);
fixTrans();
last.set(curr.x, curr.y);
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
setState(State.NONE);
break;
}
}
setImageMatrix(matrix);
//
// User-defined OnTouchListener
//
if (userTouchListener != null) {
userTouchListener.onTouch(v, event);
}
//
// OnTouchImageViewListener is set: TouchImageView dragged by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
//
// indicate event was handled
//
return true;
}
}
/**
* ScaleListener detects user two finger scaling and scales image.
*
* @author Ortiz
*/
private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
setState(State.ZOOM);
return true;
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
scaleImage(detector.getScaleFactor(), detector.getFocusX(), detector.getFocusY(), true);
//
// OnTouchImageViewListener is set: TouchImageView pinch zoomed by user.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
return true;
}
@Override
public void onScaleEnd(ScaleGestureDetector detector) {
super.onScaleEnd(detector);
setState(State.NONE);
boolean animateToZoomBoundary = false;
float targetZoom = normalizedScale;
if (normalizedScale > maxScale) {
targetZoom = maxScale;
animateToZoomBoundary = true;
} else if (normalizedScale < minScale) {
targetZoom = minScale;
animateToZoomBoundary = true;
}
if (animateToZoomBoundary) {
DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, viewWidth / 2, viewHeight / 2, true);
compatPostOnAnimation(doubleTap);
}
}
}
/**
* DoubleTapZoom calls a series of runnables which apply
* an animated zoom in/out graphic to the image.
*
* @author Ortiz
*/
private class DoubleTapZoom implements Runnable {
private static final float ZOOM_TIME = 500;
private long startTime;
private float startZoom, targetZoom;
private float bitmapX, bitmapY;
private boolean stretchImageToSuper;
private AccelerateDecelerateInterpolator interpolator = new AccelerateDecelerateInterpolator();
private PointF startTouch;
private PointF endTouch;
DoubleTapZoom(float targetZoom, float focusX, float focusY, boolean stretchImageToSuper) {
setState(State.ANIMATE_ZOOM);
startTime = System.currentTimeMillis();
this.startZoom = normalizedScale;
this.targetZoom = targetZoom;
this.stretchImageToSuper = stretchImageToSuper;
PointF bitmapPoint = transformCoordTouchToBitmap(focusX, focusY, false);
this.bitmapX = bitmapPoint.x;
this.bitmapY = bitmapPoint.y;
//
// Used for translating image during scaling
//
startTouch = transformCoordBitmapToTouch(bitmapX, bitmapY);
endTouch = new PointF(viewWidth / 2, viewHeight / 2);
}
@Override
public void run() {
float t = interpolate();
double deltaScale = calculateDeltaScale(t);
scaleImage(deltaScale, bitmapX, bitmapY, stretchImageToSuper);
translateImageToCenterTouchPosition(t);
fixScaleTrans();
setImageMatrix(matrix);
//
// OnTouchImageViewListener is set: double tap runnable updates listener
// with every frame.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (t < 1f) {
//
// We haven't finished zooming
//
compatPostOnAnimation(this);
} else {
//
// Finished zooming
//
setState(State.NONE);
}
}
/**
* Interpolate between where the image should start and end in order to translate
* the image so that the point that is touched is what ends up centered at the end
* of the zoom.
*
* @param t
*/
private void translateImageToCenterTouchPosition(float t) {
float targetX = startTouch.x + t * (endTouch.x - startTouch.x);
float targetY = startTouch.y + t * (endTouch.y - startTouch.y);
PointF curr = transformCoordBitmapToTouch(bitmapX, bitmapY);
matrix.postTranslate(targetX - curr.x, targetY - curr.y);
}
/**
* Use interpolator to get t
*
* @return
*/
private float interpolate() {
long currTime = System.currentTimeMillis();
float elapsed = (currTime - startTime) / ZOOM_TIME;
elapsed = Math.min(1f, elapsed);
return interpolator.getInterpolation(elapsed);
}
/**
* Interpolate the current targeted zoom and get the delta
* from the current zoom.
*
* @param t
* @return
*/
private double calculateDeltaScale(float t) {
double zoom = startZoom + t * (targetZoom - startZoom);
return zoom / normalizedScale;
}
}
/**
* Fling launches sequential runnables which apply
* the fling graphic to the image. The values for the translation
* are interpolated by the Scroller.
*
* @author Ortiz
*/
private class Fling implements Runnable {
CompatScroller scroller;
int currX, currY;
Fling(int velocityX, int velocityY) {
setState(State.FLING);
scroller = new CompatScroller(context);
matrix.getValues(m);
int startX = (int) m[Matrix.MTRANS_X];
int startY = (int) m[Matrix.MTRANS_Y];
int minX, maxX, minY, maxY;
if (getImageWidth() > viewWidth) {
minX = viewWidth - (int) getImageWidth();
maxX = 0;
} else {
minX = maxX = startX;
}
if (getImageHeight() > viewHeight) {
minY = viewHeight - (int) getImageHeight();
maxY = 0;
} else {
minY = maxY = startY;
}
scroller.fling(startX, startY, velocityX, velocityY, minX,
maxX, minY, maxY);
currX = startX;
currY = startY;
}
public void cancelFling() {
if (scroller != null) {
setState(State.NONE);
scroller.forceFinished(true);
}
}
@Override
public void run() {
//
// OnTouchImageViewListener is set: TouchImageView listener has been flung by user.
// Listener runnable updated with each frame of fling animation.
//
if (touchImageViewListener != null) {
touchImageViewListener.onMove();
}
if (scroller.isFinished()) {
scroller = null;
return;
}
if (scroller.computeScrollOffset()) {
int newX = scroller.getCurrX();
int newY = scroller.getCurrY();
int transX = newX - currX;
int transY = newY - currY;
currX = newX;
currY = newY;
matrix.postTranslate(transX, transY);
fixTrans();
setImageMatrix(matrix);
compatPostOnAnimation(this);
}
}
}
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
private class CompatScroller {
Scroller scroller;
OverScroller overScroller;
boolean isPreGingerbread;
public CompatScroller(Context context) {
if (VERSION.SDK_INT < VERSION_CODES.GINGERBREAD) {
isPreGingerbread = true;
scroller = new Scroller(context);
} else {
isPreGingerbread = false;
overScroller = new OverScroller(context);
}
}
public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY) {
if (isPreGingerbread) {
scroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
} else {
overScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY);
}
}
public void forceFinished(boolean finished) {
if (isPreGingerbread) {
scroller.forceFinished(finished);
} else {
overScroller.forceFinished(finished);
}
}
public boolean isFinished() {
if (isPreGingerbread) {
return scroller.isFinished();
} else {
return overScroller.isFinished();
}
}
public boolean computeScrollOffset() {
if (isPreGingerbread) {
return scroller.computeScrollOffset();
} else {
overScroller.computeScrollOffset();
return overScroller.computeScrollOffset();
}
}
public int getCurrX() {
if (isPreGingerbread) {
return scroller.getCurrX();
} else {
return overScroller.getCurrX();
}
}
public int getCurrY() {
if (isPreGingerbread) {
return scroller.getCurrY();
} else {
return overScroller.getCurrY();
}
}
}
private class ZoomVariables {
public float scale;
public float focusX;
public float focusY;
public ScaleType scaleType;
public ZoomVariables(float scale, float focusX, float focusY, ScaleType scaleType) {
this.scale = scale;
this.focusX = focusX;
this.focusY = focusY;
this.scaleType = scaleType;
}
}
}
| |
/*
* Copyright 2007 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.GwtIncompatible;
import com.google.errorprone.annotations.Immutable;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.StaticSourceFile;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This describes the Google-specific JavaScript coding conventions. Within Google, variable names
* are semantically significant.
*/
@Immutable
public class GoogleCodingConvention extends CodingConventions.Proxy {
private static final long serialVersionUID = 1L;
private static final String OPTIONAL_ARG_PREFIX = "opt_";
private static final String VAR_ARGS_NAME = "var_args";
private static final Pattern ENUM_KEY_PATTERN =
Pattern.compile("[A-Z0-9][A-Z0-9_]*");
private static final Pattern PACKAGE_WITH_TEST_DIR =
Pattern.compile("^(.*)/(?:test|tests|testing)/(?:[^/]+)$");
private static final Pattern GENFILES_DIR = Pattern.compile("-out/.*/(bin|genfiles)/(.*)$");
/** By default, decorate the ClosureCodingConvention. */
public GoogleCodingConvention() {
this(new ClosureCodingConvention());
}
/** Decorates a wrapped CodingConvention. */
public GoogleCodingConvention(CodingConvention convention) {
super(convention);
}
/**
* {@inheritDoc}
*
* <p>This enforces the Google const name convention, that the first character
* after the last $ must be an upper-case letter and all subsequent letters
* must be upper case. The name must be at least 2 characters long.
*
* <p>Examples:
* <pre>
* aaa Not constant - lower-case letters in the name
* A Not constant - too short
* goog$A Constant - letters after the $ are upper-case.
* AA17 Constant - digits can appear after the first letter
* goog$7A Not constant - first character after the $ must be
* upper case.
* $A Constant - doesn't have to be anything in front of the $
* </pre>
*/
@Override
public boolean isConstant(String name) {
if (name.length() <= 1) {
return false;
}
// In compiled code, '$' is often a namespace delimiter. To allow inlining
// of namespaced constants, we strip off any namespaces here.
int pos = name.lastIndexOf('$');
if (pos >= 0) {
name = name.substring(pos + 1);
if (name.isEmpty()) {
return false;
}
}
return isConstantKey(name);
}
@Override
public boolean isConstantKey(String name) {
if (name.isEmpty() || !Character.isUpperCase(name.charAt(0))) {
return false;
}
// hack way of checking that there aren't any lower-case letters
return name.toUpperCase(Locale.ROOT).equals(name);
}
/**
* {@inheritDoc}
*
* <p>This enforces Google's convention about enum key names. They must match
* the regular expression {@code [A-Z0-9][A-Z0-9_]*}.
*
* <p>Examples:
* <ul>
* <li>A</li>
* <li>213</li>
* <li>FOO_BAR</li>
* </ul>
*/
@Override
public boolean isValidEnumKey(String key) {
return ENUM_KEY_PATTERN.matcher(key).matches();
}
/**
* {@inheritDoc}
*
* <p>In Google code, parameter names beginning with {@code opt_} are
* treated as optional arguments.
*/
@Override
public boolean isOptionalParameter(Node parameter) {
return super.isOptionalParameter(parameter)
|| (parameter.isName() && parameter.getString().startsWith(OPTIONAL_ARG_PREFIX));
}
@Override
public boolean isVarArgsParameter(Node parameter) {
return super.isVarArgsParameter(parameter)
|| (parameter.isName() && VAR_ARGS_NAME.equals(parameter.getString()));
}
/**
* {@inheritDoc}
*
* <p>In Google code, any global name starting with an underscore is
* considered exported.
*/
@Override
public boolean isExported(String name, boolean local) {
return super.isExported(name, local) || (!local && name.startsWith("_"));
}
@Override
public boolean isClassFactoryCall(Node callNode) {
Node callTarget = callNode.getFirstChild();
return super.isClassFactoryCall(callNode)
|| (callTarget.isName() && callTarget.toString().equals("Polymer"));
}
/**
* {@inheritDoc}
*
* <p>In Google code, the package name of a source file is its file path.
* Exceptions: if a source file's parent directory is "test", "tests", or
* "testing", that directory is stripped from the package name.
* If a file is generated, strip the "genfiles" prefix to try
* to match the package of the generating file.
*/
@Override
@GwtIncompatible // TODO(tdeegan): Remove use of Matcher#group to make this fully GWT compatible.
public String getPackageName(StaticSourceFile source) {
String name = source.getName();
Matcher genfilesMatcher = GENFILES_DIR.matcher(name);
if (genfilesMatcher.find()) {
name = genfilesMatcher.group(2);
}
Matcher m = PACKAGE_WITH_TEST_DIR.matcher(name);
if (m.find()) {
return m.group(1);
} else {
int lastSlash = name.lastIndexOf('/');
return lastSlash == -1 ? "" : name.substring(0, lastSlash);
}
}
/**
* {@inheritDoc}
*
* <p>In Google code, private names end with an underscore, and exported
* names are never considered private (see {@link #isExported}).
*/
@Override
public boolean isPrivate(String name) {
return name.endsWith("_") && !name.endsWith("__") && !isExported(name);
}
@Override
public boolean hasPrivacyConvention() {
return true;
}
}
| |
/**
* The MIT License
* Copyright (c) 2014-2016 Nick Guletskii
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.ng200.openolympus.controller.user;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.List;
import javax.mail.MessagingException;
import javax.validation.Valid;
import org.apache.commons.mail.EmailException;
import org.apache.http.client.ClientProtocolException;
import org.ng200.openolympus.controller.user.RegistrationRestController.RegistrationResponse.Status;
import org.ng200.openolympus.dto.UserDto;
import org.ng200.openolympus.jooq.tables.pojos.User;
import org.ng200.openolympus.services.CaptchaService;
import org.ng200.openolympus.services.UserApprovalService;
import org.ng200.openolympus.services.UserService;
import org.ng200.openolympus.validation.UserDtoValidator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile;
import org.springframework.http.HttpStatus;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.validation.BindException;
import org.springframework.validation.BindingResult;
import org.springframework.validation.FieldError;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.annotation.JsonFormat;
@RestController
@Profile("web")
public class RegistrationRestController {
public static class RegistrationResponse {
@JsonFormat(shape = JsonFormat.Shape.STRING)
public static enum Status {
OK, RECAPTCHA_ERROR, BINDING_ERROR
}
private Status status;
private List<String> recaptchaErrorCodes;
private List<FieldError> fieldErrors;
private List<ObjectError> globalErrors;
public RegistrationResponse(Status status,
List<String> recaptchaErrorCodes, List<FieldError> fieldErrors,
List<ObjectError> globalErrors) {
this.status = status;
this.recaptchaErrorCodes = recaptchaErrorCodes;
this.fieldErrors = fieldErrors;
this.globalErrors = globalErrors;
}
public List<FieldError> getFieldErrors() {
return this.fieldErrors;
}
public List<ObjectError> getGlobalErrors() {
return this.globalErrors;
}
public List<String> getRecaptchaErrorCodes() {
return this.recaptchaErrorCodes;
}
public Status getStatus() {
return this.status;
}
public void setFieldErrors(List<FieldError> fieldErrors) {
this.fieldErrors = fieldErrors;
}
public void setGlobalErrors(List<ObjectError> globalErrors) {
this.globalErrors = globalErrors;
}
public void setRecaptchaErrorCodes(List<String> recaptchaErrorCodes) {
this.recaptchaErrorCodes = recaptchaErrorCodes;
}
public void setStatus(Status status) {
this.status = status;
}
}
@Value("${openolympus.userApproval.automaticallyApprove:false}")
public boolean automaticallyApprove = false;
@Autowired
private UserDtoValidator userDtoValidator;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private UserService userService;
@Autowired
private UserApprovalService userApprovalService;
@Autowired
private CaptchaService captchaService;
@ResponseStatus(value = HttpStatus.OK)
@ExceptionHandler({
BindException.class
})
public RegistrationResponse handleBindException(BindException exception) {
return new RegistrationResponse(Status.BINDING_ERROR, null,
exception.getBindingResult().getFieldErrors(),
exception.getBindingResult().getGlobalErrors());
}
@RequestMapping(value = "/api/user/register", method = RequestMethod.POST)
public RegistrationResponse registerUser(
@RequestBody @Valid final UserDto userDto,
final BindingResult bindingResult)
throws BindException, URISyntaxException,
ClientProtocolException, IOException, MessagingException,
EmailException {
final List<String> recaptchaErrorCodes = this.captchaService
.checkCaptcha(userDto.getRecaptchaResponse());
if (recaptchaErrorCodes != null && !recaptchaErrorCodes.isEmpty()) {
return new RegistrationResponse(Status.RECAPTCHA_ERROR,
recaptchaErrorCodes, null, null);
}
this.validate(userDto, bindingResult);
final User user = new User().setAddressCity(userDto.getAddressCity())
.setAddressCountry(userDto.getAddressCountry())
.setAddressLine1(userDto.getAddressLine1())
.setAddressLine2(userDto.getAddressLine2())
.setAddressState(userDto.getAddressState())
.setBirthDate(userDto.getDateOfBirth())
.setEmailAddress(userDto.getEmailAddress())
.setFirstNameLocalised(userDto.getFirstNameLocalised())
.setFirstNameMain(userDto.getFirstNameMain())
.setLandline(userDto.getLandline())
.setLastNameLocalised(userDto.getLastNameLocalised())
.setLastNameMain(userDto.getLastNameMain())
.setMiddleNameLocalised(userDto.getMiddleNameLocalised())
.setMiddleNameMain(userDto.getMiddleNameLocalised())
.setMobile(userDto.getMobile())
.setPassword(this.passwordEncoder.encode(userDto.getPassword()))
.setSchool(userDto.getSchool())
.setTeacherFirstName(userDto.getTeacherFirstName())
.setTeacherLastName(userDto.getTeacherLastName())
.setTeacherMiddleName(userDto.getTeacherMiddleName())
.setUsername(userDto.getUsername())
.setSuperuser(false).setApprovalEmailSent(false)
.setEnabled(true);
this.userService.insertUser(user);
if (this.automaticallyApprove) {
this.userApprovalService.approveUser(user);
}
return new RegistrationResponse(Status.OK, null, null, null);
}
@RequestMapping(value = "/api/user/register/validate", method = RequestMethod.POST)
private RegistrationResponse validate(
@RequestBody @Valid final UserDto userDto,
final BindingResult bindingResult)
throws BindException {
this.userDtoValidator.validate(userDto, bindingResult);
if (bindingResult.hasErrors()) {
throw new BindException(bindingResult);
}
return new RegistrationResponse(Status.OK, null, null, null);
}
}
| |
package main.java.pl.lodz.p.ftims.poid.exercise1_2;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.model.Histogram;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.model.Image;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.Operations;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.Transformable;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.basic.Brightness;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.basic.Contrast;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.basic.Negative;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.filters.basic.MeanFilter;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.filters.basic.MedianFilter;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.filters.linear.LinearFilter;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.filters.nonlinear.RosenfeldOperator;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.fourier.FourierTransform;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.fourier.filters.FourierFilter;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.fourier.filters.HighPassDetectionEdgeDirectionFilter;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.fourier.segmentation.RegionSegmentation;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.operations.histogram.AbstractFinalProbDensFunction;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.HistogramModification;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.SampleFiles;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.filters.BasicFiltersMasks;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.filters.linear.LinearFilters;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.filters.nonlinear.NonLinearFilters;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.fourier.FourierFilters;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.fourier.FourierHighPassMasks;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.samples.segmentation.Segmentation;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.utils.GnuplotUtil;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.utils.HistogramUtil;
import main.java.pl.lodz.p.ftims.poid.exercise1_2.utils.ImageUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* @author alisowsk
*/
public class MainWindow extends JFrame{
private static final int MAX_IMG_WIDTH = 200;
private static final int MAX_IMG_HEIGHT = 200;
private static final Logger LOG = LoggerFactory.getLogger(MainWindow.class);
// menu section
private JMenuBar mainMenuBar;
private JMenu helpMenuSection;
private JMenu fileMenuSection;
// paths section
private JLabel originalImagePathTextLabel;
// private JLabel transformedImagePathTextLabel; // TODO implement
private JTextField originalImagePathTextInput;
// private JTextField transformedImagePathTextInput; // TODO implement
private JButton originalImageFileChooserButton;
private JComboBox originalImageSelectComboBox;
private JFileChooser fileChooser;
// images section
private JLabel originalImageIconLabel;
private JLabel transformedImageIconLabel;
private JLabel originalImageTextLabel;
private JLabel transformedImageTextLabel;
// basic operations section
private JLabel basicOperationsTextLabel;
private JCheckBox contrastCheckbox;
private JCheckBox brightnessCheckbox;
private JCheckBox negativeCheckBox;
private JTextField brightnessTextField;
private JTextField contrastTextField;
//basic filters section
private JLabel basicFiltersMaskSizeTextLabel;
private JLabel basicFiltersTextLabel;
private JComboBox filterMaskSizeSelectComboBox;
private JCheckBox meanFilterCheckbox;
private JCheckBox medianFilterCheckbox;
//linear filters section
private JLabel linearFiltersTextField;
private JCheckBox linearFiltersCheckbox;
private JComboBox linearFiltersSelectComboBox;
//non linear filters section
private JLabel nonLinearFiltersTextField;
private JCheckBox nonLinearFiltersCheckbox;
private JComboBox nonLinearFiltersSelectComboBox;
private JTextField rosenfeldOperatorTextField;
private JLabel rosenfeldOperatorLabel;
//histogram operations section
private JLabel histogramOperationsTextLabel;
private JCheckBox histogramOperationsCheckBox;
private JComboBox histogramOperationsSelectComboBox;
private JTextField gMinTextField;
private JTextField gMaxTextField;
private JLabel gMinTextLabel;
private JLabel gMaxTextLabel;
// transform button section
private JButton startTransformButton;
// transform in frequency domain section
private JTextField freqDomMinValTextInput;
private JTextField freqDomMaxValTextInput;
private JCheckBox freqDomCheckbox;
private JCheckBox freqDomFilterCheckBox;
private JComboBox freqDomFilterSelectComboBox;
private JComboBox freqDomHighPassFilterMaskComboBox;
private JLabel freqDomHighPassFilterMaskTextLabel;
private JLabel freqDomMinValTextLabel;
private JLabel freqDomMaxValTextLabel;
private JCheckBox timeDomHannFilterCheckBox;
// segmentation section
private JCheckBox segmentationCheckbox;
private JComboBox segmentationComboBox;
private JTextField segmentationThresholdTexInput;
private JTextField segmentationMinPixForRegionTextInput;
private JLabel segmentationMinPixForRegionTextLabel;
private JLabel segmentationThresholdTextLabel;
// logic components
private Image sourceImage;
private Image resultImage;
private Operations operations;
public MainWindow(){
super("Image and Sound Processing - TUL, FTiMS 2015/2016");
getContentPane().setLayout(null);
initializeGraphicComponents();
initializeListeners();
initializeLogicComponents();
initWindow();
}
private void initializeLogicComponents() {
operations = new Operations();
}
private void initializeGraphicComponents() {
initializeMenuComponents();
initializePathSectionComponents();
initializeImagesSection();
initializeBasicOperationsSection();
initializeBasicFiltersSection();
initializeLinearFilterSection();
initializeNonLinearFiltersSection();
initializeHistogramOperationsSection();
initializeTransformButtonSection();
initializeFiltrationFrequencyDomainSection();
initializeSegmentationSection();
initializeHelperGrid();
}
private void initializeListeners(){
initializePathSectionListeners();
initializeTransformButtonListeners();
}
private void initializeTransformButtonListeners() {
startTransformButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
operations.clear();
if (negativeCheckBox.isSelected()) {
operations.addOperation(new Negative());
}
if (brightnessCheckbox.isSelected()) {
int brightnessValue = Integer.parseInt(brightnessTextField.getText());
operations.addOperation(new Brightness(brightnessValue));
}
if (contrastCheckbox.isSelected()) {
float contrastValue = Float.parseFloat(contrastTextField.getText());
operations.addOperation(new Contrast(contrastValue));
}
if (meanFilterCheckbox.isSelected()) {
int maskSize = Integer.parseInt(String.valueOf(BasicFiltersMasks.MASKS.get(filterMaskSizeSelectComboBox.getSelectedItem())));
operations.addOperation(new MeanFilter(maskSize));
}
if (medianFilterCheckbox.isSelected()) {
int maskSize = Integer.parseInt(String.valueOf(BasicFiltersMasks.MASKS.get(filterMaskSizeSelectComboBox.getSelectedItem())));
operations.addOperation(new MedianFilter(maskSize));
}
if (linearFiltersCheckbox.isSelected()) {
for (String linearFilter : LinearFilters.FILTERS.keySet()) {
if (linearFilter.equals(linearFiltersSelectComboBox.getSelectedItem())) {
operations.addOperation(new LinearFilter(LinearFilters.FILTERS.get(linearFilter)));
}
}
}
if (nonLinearFiltersCheckbox.isSelected()) {
Transformable nonLinearFilter = NonLinearFilters.FILTERS.get(nonLinearFiltersSelectComboBox.getSelectedItem());
if (nonLinearFilter instanceof RosenfeldOperator) {
RosenfeldOperator operator = (RosenfeldOperator) nonLinearFilter;
operator.setrCoefficient(Integer.parseInt(rosenfeldOperatorTextField.getText()));
}
operations.addOperation(nonLinearFilter);
}
if (histogramOperationsCheckBox.isSelected()) {
List<Histogram> histograms = HistogramUtil.prepareHistograms(sourceImage);
GnuplotUtil.saveHistograms(histograms);
for (String histogramModification : HistogramModification.VARIANTS.keySet()) {
if (histogramModification.equals(histogramOperationsSelectComboBox.getSelectedItem())) {
AbstractFinalProbDensFunction histogramOperation = HistogramModification.VARIANTS.get(histogramModification);
histogramOperation.setHistograms(histograms);//TODO
histogramOperation.setgMin(Integer.parseInt(gMinTextField.getText()));
histogramOperation.setgMax(Integer.parseInt(gMaxTextField.getText()));
operations.addOperation(histogramOperation);
}
}
}
if (freqDomCheckbox.isSelected()){
if(freqDomFilterCheckBox.isSelected()){
for (String fourierFilter : FourierFilters.FILTERS.keySet()) {
if (fourierFilter.equals(freqDomFilterSelectComboBox.getSelectedItem())) {
FourierFilter filter = FourierFilters.FILTERS.get(fourierFilter);
int minOrK=0;
int maxOrL=0;
if(freqDomMinValTextInput.getText() != null && !freqDomMinValTextInput.getText().isEmpty()){
minOrK = Integer.parseInt(freqDomMinValTextInput.getText());
}
if(freqDomMaxValTextInput.getText() != null && !freqDomMaxValTextInput.getText().isEmpty()){
maxOrL = Integer.parseInt(freqDomMaxValTextInput.getText());
}
if(filter instanceof HighPassDetectionEdgeDirectionFilter){
Image mask = null;
for (String maskName : FourierHighPassMasks.MASKS.keySet()) {
if (maskName.equals(freqDomHighPassFilterMaskComboBox.getSelectedItem())) {
mask = FourierHighPassMasks.MASKS.get(maskName);
}
}
((HighPassDetectionEdgeDirectionFilter) filter).setMask(mask);
}
filter.setMinOrK(minOrK);
filter.setMaxOrL(maxOrL);
FourierTransform fourierTransform = new FourierTransform(filter);
if(timeDomHannFilterCheckBox.isSelected()){
fourierTransform.setApplyHannWindow(true);
}
operations.addOperation(fourierTransform);
}
}
} else {
FourierTransform fourierTransform = new FourierTransform();
if(timeDomHannFilterCheckBox.isSelected()){
fourierTransform.setApplyHannWindow(true);
}
operations.addOperation(fourierTransform);
}
}
if(segmentationCheckbox.isSelected()){
for (String segmentation : Segmentation.SEGMENTATIONS.keySet()) {
if (segmentation.equals(segmentationComboBox.getSelectedItem())) {
RegionSegmentation segm = Segmentation.SEGMENTATIONS.get(segmentation);
segm.setThreshold(Integer.parseInt(segmentationThresholdTexInput.getText()));
segm.setMinimumPixelsForRegion(Integer.parseInt(segmentationMinPixForRegionTextInput.getText()));
operations.addOperation(segm);
}
}
}
resultImage = operations.processImage(sourceImage);
BufferedImage resultBufferedImage = ImageUtil.convertImageToBufferedImage(resultImage);
java.awt.Image resultBufferedScaledImage = resultBufferedImage.getScaledInstance(MAX_IMG_WIDTH, MAX_IMG_HEIGHT, java.awt.Image.SCALE_FAST);
transformedImageIconLabel.setIcon(new ImageIcon(resultBufferedScaledImage));
try {
ImageUtil.saveImageToFile(resultImage);
} catch (IOException e1) {
//TODO ex
}
}
});
}
private void initializeTransformButtonSection() {
startTransformButton = new JButton("Transform!");
startTransformButton.setBounds(808, 563, 155, 82);
getContentPane().add(startTransformButton);
}
private void initializePathSectionListeners() {
originalImageFileChooserButton.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
int returnVal = fileChooser.showOpenDialog(MainWindow.this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File file = fileChooser.getSelectedFile();
originalImagePathTextInput.setText(file.getPath());
BufferedImage bufImg = null;
try {
bufImg = ImageIO.read(file);
} catch (Exception ex) {
LOG.error("An unexpected error while reading image from file has occurred", ex);
}
java.awt.Image scaledImage = bufImg.getScaledInstance(200, 200, java.awt.Image.SCALE_FAST);
originalImageIconLabel.setIcon(new ImageIcon(scaledImage));
try {
sourceImage = ImageUtil.readImageFromFile(file);
} catch (IOException e1) {
//TODO ex
e1.printStackTrace();
}
}
}
});
originalImageSelectComboBox.addActionListener(e -> {
String path = ((SampleFiles) originalImageSelectComboBox.getSelectedItem()).getPath();
File file = new File(getClass().getClassLoader().getResource(path).getFile());
originalImagePathTextInput.setText(file.getPath());
BufferedImage bufImg = null;
try {
bufImg = ImageIO.read(file);
} catch (Exception ex) {
LOG.error("An unexpected error while reading image from file has occurred", ex);
}
java.awt.Image scaledImage = bufImg.getScaledInstance(200, 200, java.awt.Image.SCALE_FAST);
originalImageIconLabel.setIcon(new ImageIcon(scaledImage));
try {
sourceImage = ImageUtil.readImageFromFile(file);
} catch (IOException e1) {
//TODO ex
e1.printStackTrace();
}
});
}
private void initializeBasicOperationsSection() {
basicOperationsTextLabel = new JLabel("Basic operations");
basicOperationsTextLabel.setBounds(34, 302, 200, 50);
getContentPane().add(basicOperationsTextLabel);
negativeCheckBox = new JCheckBox("Negative");
negativeCheckBox.setBounds(32, 348, 133, 24);
getContentPane().add(negativeCheckBox);
brightnessCheckbox = new JCheckBox("Brightness");
brightnessCheckbox.setBounds(34, 384, 133, 24);
getContentPane().add(brightnessCheckbox);
brightnessTextField = new JTextField();
brightnessTextField.setBounds(207, 383, 92, 27);
getContentPane().add(brightnessTextField);
brightnessTextField.setColumns(10);
contrastCheckbox = new JCheckBox("Contrast");
contrastCheckbox.setBounds(34, 421, 133, 24);
getContentPane().add(contrastCheckbox);
contrastTextField = new JTextField();
contrastTextField.setBounds(207, 421, 92, 27);
getContentPane().add(contrastTextField);
}
private void initializeBasicFiltersSection(){
basicFiltersTextLabel = new JLabel("Basic filters");
basicFiltersTextLabel.setBounds(34, 477, 200, 50);
getContentPane().add(basicFiltersTextLabel);
basicFiltersMaskSizeTextLabel = new JLabel("Mask size");
basicFiltersMaskSizeTextLabel.setBounds(34, 589, 138, 30);
getContentPane().add(basicFiltersMaskSizeTextLabel);
meanFilterCheckbox = new JCheckBox("Mean filter");
meanFilterCheckbox.setBounds(34, 523, 148, 24);
getContentPane().add(meanFilterCheckbox);
medianFilterCheckbox = new JCheckBox("Median filter");
medianFilterCheckbox.setBounds(34, 554, 148, 24);
getContentPane().add(medianFilterCheckbox);
filterMaskSizeSelectComboBox = new JComboBox(BasicFiltersMasks.MASKS.keySet().toArray());
filterMaskSizeSelectComboBox.setBounds(207, 591, 92, 27);
getContentPane().add(filterMaskSizeSelectComboBox);
}
private void initializeLinearFilterSection(){
linearFiltersTextField = new JLabel("Linear filters");
linearFiltersTextField.setBounds(358, 302, 200, 50);
getContentPane().add(linearFiltersTextField);
linearFiltersCheckbox = new JCheckBox("Operator");
linearFiltersCheckbox.setBounds(370, 348, 148, 24);
getContentPane().add(linearFiltersCheckbox);
linearFiltersSelectComboBox = new JComboBox(LinearFilters.FILTERS.keySet().toArray());
linearFiltersSelectComboBox.setBounds(486, 347, 244, 27);
getContentPane().add(linearFiltersSelectComboBox);
}
private void initializeNonLinearFiltersSection(){
nonLinearFiltersTextField = new JLabel("Non linear filters");
nonLinearFiltersTextField.setBounds(365, 477, 200, 50);
getContentPane().add(nonLinearFiltersTextField);
nonLinearFiltersCheckbox = new JCheckBox("Operator");
nonLinearFiltersCheckbox.setBounds(370, 523, 148, 24);
getContentPane().add(nonLinearFiltersCheckbox);
nonLinearFiltersSelectComboBox = new JComboBox(NonLinearFilters.FILTERS.keySet().toArray());
nonLinearFiltersSelectComboBox.setBounds(486, 522, 177, 27);
getContentPane().add(nonLinearFiltersSelectComboBox);
rosenfeldOperatorLabel = new JLabel("Rosenfeld operator R");
rosenfeldOperatorLabel.setBounds(375, 548, 190, 30);
getContentPane().add(rosenfeldOperatorLabel);
rosenfeldOperatorTextField = new JTextField();
rosenfeldOperatorTextField.setColumns(10);
rosenfeldOperatorTextField.setBounds(571, 553, 92, 27);
getContentPane().add(rosenfeldOperatorTextField);
}
private void initializeHistogramOperationsSection(){
histogramOperationsTextLabel = new JLabel("Histogram operations");
histogramOperationsTextLabel.setBounds(760, 302, 200, 50);
getContentPane().add(histogramOperationsTextLabel);
histogramOperationsCheckBox = new JCheckBox("Variant");
histogramOperationsCheckBox.setBounds(760, 348, 148, 24);
getContentPane().add(histogramOperationsCheckBox);
histogramOperationsSelectComboBox = new JComboBox(HistogramModification.VARIANTS.keySet().toArray());
histogramOperationsSelectComboBox.setBounds(753, 383, 233, 27);
getContentPane().add(histogramOperationsSelectComboBox);
gMinTextField = new JTextField();
gMinTextField.setColumns(10);
gMinTextField.setBounds(894, 420, 92, 27);
getContentPane().add(gMinTextField);
gMaxTextField = new JTextField();
gMaxTextField.setColumns(10);
gMaxTextField.setBounds(894, 455, 92, 27);
getContentPane().add(gMaxTextField);
gMinTextLabel = new JLabel("g min");
gMinTextLabel.setBounds(763, 418, 118, 30);
getContentPane().add(gMinTextLabel);
gMaxTextLabel = new JLabel("g max");
gMaxTextLabel.setBounds(764, 453, 118, 30);
getContentPane().add(gMaxTextLabel);
}
private void initializePathSectionComponents() {
originalImagePathTextLabel = new JLabel("Original image path");
originalImagePathTextLabel.setBounds(12, 54, 145, 17);
getContentPane().add(originalImagePathTextLabel);
// TODO implement
// transformedImagePathTextLabel = new JLabel("Transformed image path");
// transformedImagePathTextLabel.setBounds(12, 153, 206, 17);
// getContentPane().add(transformedImagePathTextLabel);
originalImagePathTextInput = new JTextField();
originalImagePathTextInput.setBounds(12, 83, 491, 27);
originalImagePathTextInput.setEnabled(false);
originalImagePathTextInput.setColumns(10);
getContentPane().add(originalImagePathTextInput);
// TODO implement
// transformedImagePathTextInput = new JTextField();
// transformedImagePathTextInput.setColumns(10);
// transformedImagePathTextInput.setBounds(12, 171, 491, 27);
// transformedImagePathTextInput.setEnabled(false);
// getContentPane().add(transformedImagePathTextInput);
originalImageFileChooserButton = new JButton("Choose image");
originalImageFileChooserButton.setBounds(358, 42, 145, 29);
getContentPane().add(originalImageFileChooserButton);
fileChooser = new JFileChooser();
originalImageSelectComboBox = new JComboBox(SampleFiles.values());
originalImageSelectComboBox.setToolTipText("Select input image");
originalImageSelectComboBox.setBounds(148, 49, 198, 27);
getContentPane().add(originalImageSelectComboBox);
}
private void initializeImagesSection() {
originalImageIconLabel = new JLabel("");
originalImageIconLabel.setBounds(530, 80, 200, 200);
originalImageIconLabel.setBorder(BorderFactory.createLineBorder(Color.GRAY));
getContentPane().add(originalImageIconLabel);
transformedImageIconLabel = new JLabel("");
transformedImageIconLabel.setBounds(760, 80, 200, 200);
transformedImageIconLabel.setBorder(BorderFactory.createLineBorder(Color.GRAY));
getContentPane().add(transformedImageIconLabel);
originalImageTextLabel = new JLabel("Original image");
originalImageTextLabel.setBounds(530, 50, 200, 20);
getContentPane().add(originalImageTextLabel);
transformedImageTextLabel = new JLabel("Transformed image");
transformedImageTextLabel.setBounds(760, 50, 200, 20);
getContentPane().add(transformedImageTextLabel);
}
private void initializeFiltrationFrequencyDomainSection(){
freqDomCheckbox = new JCheckBox("Filtration in frequency domain");
freqDomCheckbox.setBounds(34, 124, 244, 24);
getContentPane().add(freqDomCheckbox);
freqDomFilterCheckBox = new JCheckBox("Filter");
freqDomFilterCheckBox.setBounds(61, 149, 104, 24);
getContentPane().add(freqDomFilterCheckBox);
freqDomFilterSelectComboBox = new JComboBox(FourierFilters.FILTERS.keySet().toArray());
freqDomFilterSelectComboBox.setBounds(60, 174, 162, 27);
getContentPane().add(freqDomFilterSelectComboBox);
freqDomHighPassFilterMaskTextLabel = new JLabel("High-pass filter mask");
freqDomHighPassFilterMaskTextLabel.setBounds(61, 260, 190, 30);
getContentPane().add(freqDomHighPassFilterMaskTextLabel);
freqDomHighPassFilterMaskComboBox = new JComboBox(FourierHighPassMasks.MASKS.keySet().toArray());
freqDomHighPassFilterMaskComboBox.setBounds(215, 263, 162, 27);
getContentPane().add(freqDomHighPassFilterMaskComboBox);
freqDomMinValTextLabel = new JLabel("Min (k)");
freqDomMinValTextLabel.setBounds(61, 199, 49, 30);
getContentPane().add(freqDomMinValTextLabel);
freqDomMinValTextInput = new JTextField();
freqDomMinValTextInput.setColumns(10);
freqDomMinValTextInput.setBounds(122, 201, 92, 27);
getContentPane().add(freqDomMinValTextInput);
freqDomMaxValTextLabel = new JLabel("Max (l)");
freqDomMaxValTextLabel.setBounds(61, 234, 49, 30);
getContentPane().add(freqDomMaxValTextLabel);
freqDomMaxValTextInput = new JTextField();
freqDomMaxValTextInput.setColumns(10);
freqDomMaxValTextInput.setBounds(122, 236, 92, 27);
getContentPane().add(freqDomMaxValTextInput);
timeDomHannFilterCheckBox = new JCheckBox("Apply Hann");
timeDomHannFilterCheckBox.setBounds(169, 148, 104, 24);
getContentPane().add(timeDomHannFilterCheckBox);
}
private void initializeSegmentationSection(){
segmentationCheckbox = new JCheckBox("Segmentation");
segmentationCheckbox.setBounds(341, 122, 162, 24);
getContentPane().add(segmentationCheckbox);
segmentationComboBox = new JComboBox(Segmentation.SEGMENTATIONS.keySet().toArray());
segmentationComboBox.setBounds(341, 148, 162, 27);
getContentPane().add(segmentationComboBox);
segmentationThresholdTexInput = new JTextField();
segmentationThresholdTexInput.setColumns(10);
segmentationThresholdTexInput.setBounds(411, 201, 92, 27);
getContentPane().add(segmentationThresholdTexInput);
segmentationThresholdTextLabel = new JLabel("Threshold");
segmentationThresholdTextLabel.setBounds(310, 199, 104, 30);
getContentPane().add(segmentationThresholdTextLabel);
segmentationMinPixForRegionTextInput = new JTextField();
segmentationMinPixForRegionTextInput.setColumns(10);
segmentationMinPixForRegionTextInput.setBounds(411, 236, 92, 27);
getContentPane().add(segmentationMinPixForRegionTextInput);
segmentationMinPixForRegionTextLabel = new JLabel("Min pix./reg.");
segmentationMinPixForRegionTextLabel.setBounds(310, 234, 104, 30);
getContentPane().add(segmentationMinPixForRegionTextLabel);
}
private void initializeHelperGrid() {
Component horizontalStrut = Box.createHorizontalStrut(20);
horizontalStrut.setBounds(-5, 277, 1003, 10);
getContentPane().add(horizontalStrut);
Component verticalStrut = Box.createVerticalStrut(20);
verticalStrut.setBounds(507, 18, 11, 263);
getContentPane().add(verticalStrut);
Component horizontalStrut_1 = Box.createHorizontalStrut(20);
horizontalStrut_1.setBounds(-153, 455, 894, 10);
getContentPane().add(horizontalStrut_1);
Component verticalStrut_1 = Box.createVerticalStrut(20);
verticalStrut_1.setBounds(341, 284, 5, 387);
getContentPane().add(verticalStrut_1);
Component verticalStrut_2 = Box.createVerticalStrut(20);
verticalStrut_2.setBounds(736, 284, 5, 387);
getContentPane().add(verticalStrut_2);
Component horizontalStrut_2 = Box.createHorizontalStrut(20);
horizontalStrut_2.setBounds(0, 109, 510, 10);
getContentPane().add(horizontalStrut_2);
}
private void initializeMenuComponents() {
mainMenuBar = new JMenuBar();
mainMenuBar.setBounds(0, 0, 1000, 30);
getContentPane().add(mainMenuBar);
fileMenuSection = new JMenu("File");
mainMenuBar.add(fileMenuSection);
helpMenuSection = new JMenu("Help");
mainMenuBar.add(helpMenuSection);
}
private void initWindow() {
this.setVisible(true);
this.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
this.setSize(1000,700);
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.tcp;
import com.google.common.primitives.Bytes;
import com.streamsets.pipeline.api.ErrorCode;
import com.streamsets.pipeline.api.OnRecordError;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.config.DataFormat;
import com.streamsets.pipeline.lib.parser.net.NetTestUtils;
import com.streamsets.pipeline.lib.parser.net.syslog.SyslogFramingMode;
import com.streamsets.pipeline.lib.parser.net.syslog.SyslogMessage;
import com.streamsets.pipeline.lib.parser.text.TextDataParserFactory;
import com.streamsets.pipeline.lib.tls.TlsConfigErrors;
import com.streamsets.pipeline.sdk.PushSourceRunner;
import com.streamsets.pipeline.stage.common.DataFormatErrors;
import com.streamsets.pipeline.stage.util.tls.TLSTestUtils;
import com.streamsets.testing.NetworkUtils;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.embedded.EmbeddedChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.avro.ipc.NettyTransceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang3.StringUtils;
import org.apache.flume.source.avro.AvroFlumeEvent;
import org.apache.flume.source.avro.AvroSourceProtocol;
import org.apache.flume.source.avro.Status;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.KeyPair;
import java.security.cert.Certificate;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingDeque;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.Matchers.empty;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.collection.IsMapContaining.hasKey;
import static com.streamsets.testing.Matchers.fieldWithValue;
public class TestTCPServerSource {
public static final String TEN_DELIMITED_RECORDS = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\nnine\nten\n";
public static final String SYSLOG_RECORD = "<42>Mar 24 17:18:10 10.1.2.34 Got an error";
@Test
public void syslogRecords() {
Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
TCPServerSource source = new TCPServerSource(configBean);
List<Stage.ConfigIssue> issues = new LinkedList<>();
EmbeddedChannel ch = new EmbeddedChannel(source.buildByteBufToMessageDecoderChain(issues).toArray(new ChannelHandler[0]));
ch.writeInbound(Unpooled.copiedBuffer(SYSLOG_RECORD + configBean.nonTransparentFramingSeparatorCharStr, charset));
assertSyslogRecord(ch);
assertFalse(ch.finishAndReleaseAll());
configBean.syslogFramingMode = SyslogFramingMode.OCTET_COUNTING;
EmbeddedChannel ch2 = new EmbeddedChannel(source.buildByteBufToMessageDecoderChain(issues).toArray(new ChannelHandler[0]));
ch2.writeInbound(Unpooled.copiedBuffer(SYSLOG_RECORD.length() + " " + SYSLOG_RECORD, charset));
assertSyslogRecord(ch2);
assertFalse(ch2.finishAndReleaseAll());
}
private void assertSyslogRecord(EmbeddedChannel ch) {
Object in1 = ch.readInbound();
assertThat(in1, notNullValue());
assertThat(in1, instanceOf(SyslogMessage.class));
SyslogMessage msg1 = (SyslogMessage) in1;
assertThat(msg1.getHost(), equalTo("10.1.2.34"));
assertThat(msg1.getRemainingMessage(), equalTo("Got an error"));
assertThat(msg1.getPriority(), equalTo(42));
assertThat(msg1.getFacility(), equalTo(5));
assertThat(msg1.getSeverity(), equalTo(2));
}
@Test
public void initMethod() throws Exception {
final TCPServerSourceConfig configBean = createConfigBean(Charsets.ISO_8859_1);
initSourceAndValidateIssues(configBean);
// empty ports
configBean.ports = new LinkedList<>();
initSourceAndValidateIssues(configBean, Errors.TCP_02);
// invalid ports
// too large
configBean.ports = Arrays.asList("123456789");
initSourceAndValidateIssues(configBean, Errors.TCP_03);
// not a number
configBean.ports = Arrays.asList("abcd");
initSourceAndValidateIssues(configBean, Errors.TCP_03);
// start TLS config tests
configBean.ports = randomSinglePort();
configBean.tlsConfigBean.tlsEnabled = true;
configBean.tlsConfigBean.keyStoreFilePath = "non-existent-file-path";
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_01);
File blankTempFile = File.createTempFile("blank", "txt");
blankTempFile.deleteOnExit();
configBean.tlsConfigBean.keyStoreFilePath = blankTempFile.getAbsolutePath();
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_21);
// now, try with real keystore
String hostname = TLSTestUtils.getHostname();
File testDir = new File("target", UUID.randomUUID().toString()).getAbsoluteFile();
testDir.deleteOnExit();
final File keyStore = new File(testDir, "keystore.jks");
keyStore.deleteOnExit();
Assert.assertTrue(testDir.mkdirs());
final String keyStorePassword = "keystore";
KeyPair keyPair = TLSTestUtils.generateKeyPair();
Certificate cert = TLSTestUtils.generateCertificate("CN=" + hostname, keyPair, 30);
TLSTestUtils.createKeyStore(keyStore.toString(), keyStorePassword, "web", keyPair.getPrivate(), cert);
configBean.tlsConfigBean.keyStoreFilePath = keyStore.getAbsolutePath();
configBean.tlsConfigBean.keyStorePassword = () -> "invalid-password";
initSourceAndValidateIssues(configBean, TlsConfigErrors.TLS_21);
// finally, a valid certificate/config
configBean.tlsConfigBean.keyStorePassword = () -> keyStorePassword;
initSourceAndValidateIssues(configBean);
// ack ELs
configBean.recordProcessedAckMessage = "${invalid EL)";
initSourceAndValidateIssues(configBean, Errors.TCP_30);
configBean.recordProcessedAckMessage = "${time:now()}";
configBean.batchCompletedAckMessage = "${another invalid EL]";
initSourceAndValidateIssues(configBean, Errors.TCP_31);
configBean.batchCompletedAckMessage = "${record:value('/first')}";
// syslog mode
configBean.tcpMode = TCPMode.SYSLOG;
configBean.syslogFramingMode = SyslogFramingMode.NON_TRANSPARENT_FRAMING;
configBean.nonTransparentFramingSeparatorCharStr = "";
initSourceAndValidateIssues(configBean, Errors.TCP_40);
configBean.syslogFramingMode = SyslogFramingMode.OCTET_COUNTING;
initSourceAndValidateIssues(configBean);
// separated records
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.dataFormatConfig.charset = Charsets.UTF_8.name();
initSourceAndValidateIssues(configBean, Errors.TCP_41);
configBean.recordSeparatorStr = "";
initSourceAndValidateIssues(configBean, Errors.TCP_40);
configBean.recordSeparatorStr = "x";
initSourceAndValidateIssues(configBean, DataFormatErrors.DATA_FORMAT_12);
configBean.dataFormat = DataFormat.TEXT;
initSourceAndValidateIssues(configBean);
}
@Test
public void runTextRecordsWithAck() throws StageException, IOException, ExecutionException, InterruptedException {
final String recordSeparatorStr = "\n";
final String[] expectedRecords = TEN_DELIMITED_RECORDS.split(recordSeparatorStr);
final int batchSize = expectedRecords.length;
final Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.dataFormat = DataFormat.TEXT;
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.recordSeparatorStr = recordSeparatorStr;
configBean.ports = NetworkUtils.getRandomPorts(1);
configBean.recordProcessedAckMessage = "record_ack_${record:id()}";
configBean.batchCompletedAckMessage = "batch_ack_${batchSize}";
configBean.batchSize = batchSize;
final TCPServerSource source = new TCPServerSource(configBean);
final String outputLane = "lane";
final PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.build();
final List<Record> records = new LinkedList<>();
runner.runInit();
EventLoopGroup workerGroup = new NioEventLoopGroup();
ChannelFuture channelFuture = startTcpClient(
configBean,
workerGroup,
TEN_DELIMITED_RECORDS.getBytes(charset),
true
);
runner.runProduce(new HashMap<>(), batchSize, output -> {
records.addAll(output.getRecords().get(outputLane));
runner.setStop();
});
runner.waitOnProduce();
// Wait until the connection is closed.
final Channel channel = channelFuture.channel();
TCPServerSourceClientHandler clientHandler = channel.pipeline().get(TCPServerSourceClientHandler.class);
final List<String> responses = new LinkedList<>();
for (int i = 0; i < batchSize + 1; i++) {
// one for each record, plus one for the batch
responses.add(clientHandler.getResponse());
}
channel.close();
workerGroup.shutdownGracefully();
assertThat(records, hasSize(batchSize));
final List<String> expectedAcks = new LinkedList<>();
for (int i = 0; i < records.size(); i++) {
// validate the output record value
assertThat(records.get(i).get("/text").getValueAsString(), equalTo(expectedRecords[i]));
// validate the record-level ack
expectedAcks.add(String.format("record_ack_%s", records.get(i).getHeader().getSourceId()));
}
// validate the batch-level ack
expectedAcks.add(String.format("batch_ack_%d", batchSize));
// because of the vagaries of TCP, we can't be sure that a single ack is returned in each discrete read
// this is due to the fact that the server can choose to flush the buffer in different ways, and the client
// can choose if/how to buffer on its side when reading from the channel
// therefore, we will simply combine all acks in the expected order into a single String and assert at that
// level, rather than at an individual read/expected ack level
final String combinedAcks = StringUtils.join(responses, "");
assertThat(combinedAcks, startsWith(StringUtils.join(expectedAcks, "")));
}
@Test
public void errorHandling() throws StageException, IOException, ExecutionException, InterruptedException {
final Charset charset = Charsets.ISO_8859_1;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.dataFormat = DataFormat.JSON;
configBean.tcpMode = TCPMode.DELIMITED_RECORDS;
configBean.recordSeparatorStr = "\n";
configBean.ports = NetworkUtils.getRandomPorts(1);
final TCPServerSource source = new TCPServerSource(configBean);
final String outputLane = "lane";
final PushSourceRunner toErrorRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
final List<Record> records = new LinkedList<>();
final List<Record> errorRecords = new LinkedList<>();
runAndCollectRecords(
toErrorRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
false
);
assertThat(records, empty());
assertThat(errorRecords, hasSize(1));
assertThat(
errorRecords.get(0).getHeader().getErrorCode(),
equalTo(com.streamsets.pipeline.lib.parser.Errors.DATA_PARSER_04.getCode())
);
final PushSourceRunner discardRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.DISCARD)
.build();
records.clear();
errorRecords.clear();
configBean.ports = NetworkUtils.getRandomPorts(1);
runAndCollectRecords(
discardRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
false
);
assertThat(records, empty());
assertThat(errorRecords, empty());
configBean.ports = NetworkUtils.getRandomPorts(1);
final PushSourceRunner stopPipelineRunner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.STOP_PIPELINE)
.build();
records.clear();
errorRecords.clear();
try {
runAndCollectRecords(
stopPipelineRunner,
configBean,
records,
errorRecords,
1,
outputLane,
"{\"invalid_json\": yes}\n".getBytes(charset),
true,
true
);
Assert.fail("ExecutionException should have been thrown");
} catch (ExecutionException e) {
assertThat(e.getCause(), instanceOf(RuntimeException.class));
final RuntimeException runtimeException = (RuntimeException) e.getCause();
assertThat(runtimeException.getCause(), instanceOf(StageException.class));
final StageException stageException = (StageException) runtimeException.getCause();
assertThat(stageException.getErrorCode().getCode(), equalTo(Errors.TCP_06.getCode()));
}
}
@Test
public void flumeAvroIpc() throws StageException, IOException, ExecutionException, InterruptedException {
final Charset charset = Charsets.UTF_8;
final TCPServerSourceConfig configBean = createConfigBean(charset);
configBean.tcpMode = TCPMode.FLUME_AVRO_IPC;
configBean.dataFormat = DataFormat.TEXT;
configBean.bindAddress = "0.0.0.0";
final int batchSize = 5;
final String outputLane = "output";
final TCPServerSource source = new TCPServerSource(configBean);
final PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane(outputLane)
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
runner.runInit();
runner.runProduce(Collections.emptyMap(), batchSize, out -> {
final Map<String, List<Record>> outputMap = out.getRecords();
assertThat(outputMap, hasKey(outputLane));
final List<Record> records = outputMap.get(outputLane);
assertThat(records, hasSize(batchSize));
for (int i = 0; i < batchSize; i++) {
assertThat(
records.get(i).get("/" + TextDataParserFactory.TEXT_FIELD_NAME),
fieldWithValue(getFlumeAvroIpcEventName(i))
);
}
runner.setStop();
});
final AvroSourceProtocol client = SpecificRequestor.getClient(AvroSourceProtocol.class, new NettyTransceiver(new InetSocketAddress("localhost", Integer.parseInt(configBean.ports.get(0)))));
List<AvroFlumeEvent> events = new LinkedList<>();
for (int i = 0; i < batchSize; i++) {
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<CharSequence, CharSequence>());
avroEvent.setBody(ByteBuffer.wrap(getFlumeAvroIpcEventName(i).getBytes()));
events.add(avroEvent);
}
Status status = client.appendBatch(events);
assertThat(status, equalTo(Status.OK));
runner.waitOnProduce();
}
private static String getFlumeAvroIpcEventName(int index) {
return "Avro event " + index;
}
private void runAndCollectRecords(
PushSourceRunner runner,
TCPServerSourceConfig configBean,
List<Record> records,
List<Record> errorRecords,
int batchSize,
String outputLane,
byte[] data,
boolean randomlySlice,
boolean runEmptyProduceAtEnd
) throws StageException, InterruptedException, ExecutionException {
runner.runInit();
EventLoopGroup workerGroup = new NioEventLoopGroup();
runner.runProduce(new HashMap<>(), batchSize, output -> {
records.addAll(output.getRecords().get(outputLane));
if (!runEmptyProduceAtEnd) {
runner.setStop();
}
});
ChannelFuture channelFuture = startTcpClient(
configBean,
workerGroup,
data,
randomlySlice
);
// Wait until the connection is closed.
channelFuture.channel().closeFuture().sync();
// wait for the push source runner produce to complete
runner.waitOnProduce();
errorRecords.addAll(runner.getErrorRecords());
if (runEmptyProduceAtEnd) {
runner.runProduce(new HashMap<>(), 0, output -> {
runner.setStop();
});
runner.waitOnProduce();
}
runner.runDestroy();
workerGroup.shutdownGracefully();
}
private ChannelFuture startTcpClient(
TCPServerSourceConfig configBean,
EventLoopGroup workerGroup,
byte[] data,
boolean randomlySlice
) throws
InterruptedException {
ChannelFuture channelFuture;
Bootstrap bootstrap = new Bootstrap();
bootstrap.group(workerGroup);
bootstrap.channel(NioSocketChannel.class);
bootstrap.option(ChannelOption.SO_KEEPALIVE, true);
bootstrap.handler(new ChannelInitializer() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new TCPServerSourceClientHandler(randomlySlice, data));
}
});
// Start the client.
channelFuture = bootstrap.connect("localhost", Integer.parseInt(configBean.ports.get(0))).sync();
return channelFuture;
}
private static class TCPServerSourceClientHandler extends ChannelInboundHandlerAdapter {
private final boolean randomlySlice;
private final byte[] data;
private final BlockingQueue<String> responses = new LinkedBlockingDeque<>();
private TCPServerSourceClientHandler(boolean randomlySlice, byte[] data) {
this.randomlySlice = randomlySlice;
this.data = data;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ByteBuf buf = (ByteBuf) msg;
responses.add(buf.toString(com.google.common.base.Charsets.UTF_8));
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
if (randomlySlice) {
for (List<Byte> slice : NetTestUtils.getRandomByteSlices(data)) {
ctx.writeAndFlush(Unpooled.copiedBuffer(Bytes.toArray(slice)));
}
} else {
ctx.writeAndFlush(Unpooled.copiedBuffer(data));
}
}
private String getResponse() throws InterruptedException {
return responses.take();
}
}
private static void initSourceAndValidateIssues(TCPServerSourceConfig configBean, ErrorCode... errorCodes) throws
StageException {
List<Stage.ConfigIssue> issues = initSourceAndGetIssues(configBean);
assertThat(issues, hasSize(errorCodes.length));
for (int i = 0; i < errorCodes.length; i++) {
assertThat(issues.get(i).toString(), containsString(errorCodes[i].getCode()));
}
}
private static List<Stage.ConfigIssue> initSourceAndGetIssues(TCPServerSourceConfig configBean) throws
StageException {
TCPServerSource source = new TCPServerSource(configBean);
PushSourceRunner runner = new PushSourceRunner.Builder(TCPServerDSource.class, source)
.addOutputLane("lane")
.setOnRecordError(OnRecordError.TO_ERROR)
.build();
return runner.runValidateConfigs();
}
protected static TCPServerSourceConfig createConfigBean(Charset charset) {
TCPServerSourceConfig config = new TCPServerSourceConfig();
config.batchSize = 10;
config.tlsConfigBean.tlsEnabled = false;
config.numThreads = 1;
config.syslogCharset = charset.name();
config.tcpMode = TCPMode.SYSLOG;
config.syslogFramingMode= SyslogFramingMode.NON_TRANSPARENT_FRAMING;
config.nonTransparentFramingSeparatorCharStr = "\n";
config.maxMessageSize = 4096;
config.ports = randomSinglePort();
config.maxWaitTime = 1000;
return config;
}
private static List<String> randomSinglePort() {
return Arrays.asList(String.valueOf(NetworkUtils.getRandomPort()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.OpenBitSetIterator;
import org.apache.lucene.search.DocIdSetIterator;
/**
* <code>BitDocSet</code> represents an unordered set of Lucene Document Ids
* using a BitSet. A set bit represents inclusion in the set for that document.
*
*
* @since solr 0.9
*/
public class BitDocSet extends DocSetBase {
final OpenBitSet bits;
int size; // number of docs in the set (cached for perf)
public BitDocSet() {
bits = new OpenBitSet();
}
/** Construct a BitDocSet.
* The capacity of the OpenBitSet should be at least maxDoc() */
public BitDocSet(OpenBitSet bits) {
this.bits = bits;
size=-1;
}
/** Construct a BitDocSet, and provides the number of set bits.
* The capacity of the OpenBitSet should be at least maxDoc()
*/
public BitDocSet(OpenBitSet bits, int size) {
this.bits = bits;
this.size = size;
}
/*** DocIterator using nextSetBit()
public DocIterator iterator() {
return new DocIterator() {
int pos=bits.nextSetBit(0);
public boolean hasNext() {
return pos>=0;
}
public Integer next() {
return nextDoc();
}
public void remove() {
bits.clear(pos);
}
public int nextDoc() {
int old=pos;
pos=bits.nextSetBit(old+1);
return old;
}
public float score() {
return 0.0f;
}
};
}
***/
@Override
public DocIterator iterator() {
return new DocIterator() {
private final OpenBitSetIterator iter = new OpenBitSetIterator(bits);
private int pos = iter.nextDoc();
@Override
public boolean hasNext() {
return pos != DocIdSetIterator.NO_MORE_DOCS;
}
@Override
public Integer next() {
return nextDoc();
}
@Override
public void remove() {
bits.clear(pos);
}
@Override
public int nextDoc() {
int old=pos;
pos=iter.nextDoc();
return old;
}
@Override
public float score() {
return 0.0f;
}
};
}
/**
*
* @return the <b>internal</b> OpenBitSet that should <b>not</b> be modified.
*/
@Override
public OpenBitSet getBits() {
return bits;
}
@Override
public void add(int doc) {
bits.set(doc);
size=-1; // invalidate size
}
@Override
public void addUnique(int doc) {
bits.set(doc);
size=-1; // invalidate size
}
@Override
public int size() {
if (size!=-1) return size;
return size=(int)bits.cardinality();
}
/**
* The number of set bits - size - is cached. If the bitset is changed externally,
* this method should be used to invalidate the previously cached size.
*/
public void invalidateSize() {
size=-1;
}
/** Returns true of the doc exists in the set.
* Should only be called when doc < OpenBitSet.size()
*/
@Override
public boolean exists(int doc) {
return bits.fastGet(doc);
}
@Override
public int intersectionSize(DocSet other) {
if (other instanceof BitDocSet) {
return (int)OpenBitSet.intersectionCount(this.bits, ((BitDocSet)other).bits);
} else {
// they had better not call us back!
return other.intersectionSize(this);
}
}
@Override
public boolean intersects(DocSet other) {
if (other instanceof BitDocSet) {
return bits.intersects(((BitDocSet)other).bits);
} else {
// they had better not call us back!
return other.intersects(this);
}
}
@Override
public int unionSize(DocSet other) {
if (other instanceof BitDocSet) {
// if we don't know our current size, this is faster than
// size + other.size - intersection_size
return (int)OpenBitSet.unionCount(this.bits, ((BitDocSet)other).bits);
} else {
// they had better not call us back!
return other.unionSize(this);
}
}
@Override
public int andNotSize(DocSet other) {
if (other instanceof BitDocSet) {
// if we don't know our current size, this is faster than
// size - intersection_size
return (int)OpenBitSet.andNotCount(this.bits, ((BitDocSet)other).bits);
} else {
return super.andNotSize(other);
}
}
@Override
public void setBitsOn(OpenBitSet target) {
target.union(bits);
}
@Override
public DocSet andNot(DocSet other) {
OpenBitSet newbits = (OpenBitSet)(bits.clone());
if (other instanceof BitDocSet) {
newbits.andNot(((BitDocSet)other).bits);
} else {
DocIterator iter = other.iterator();
while (iter.hasNext()) newbits.clear(iter.nextDoc());
}
return new BitDocSet(newbits);
}
@Override
public DocSet union(DocSet other) {
OpenBitSet newbits = (OpenBitSet)(bits.clone());
if (other instanceof BitDocSet) {
newbits.union(((BitDocSet)other).bits);
} else {
DocIterator iter = other.iterator();
while (iter.hasNext()) newbits.set(iter.nextDoc());
}
return new BitDocSet(newbits);
}
@Override
public long memSize() {
return (bits.getBits().length << 3) + 16;
}
@Override
protected BitDocSet clone() {
return new BitDocSet((OpenBitSet)bits.clone(), size);
}
@Override
public Filter getTopFilter() {
final OpenBitSet bs = bits;
// TODO: if cardinality isn't cached, do a quick measure of sparseness
// and return null from bits() if too sparse.
return new Filter() {
@Override
public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) {
AtomicReader reader = context.reader();
// all Solr DocSets that are used as filters only include live docs
final Bits acceptDocs2 = acceptDocs == null ? null : (reader.getLiveDocs() == acceptDocs ? null : acceptDocs);
if (context.isTopLevel) {
return BitsFilteredDocIdSet.wrap(bs, acceptDocs);
}
final int base = context.docBase;
final int maxDoc = reader.maxDoc();
final int max = base + maxDoc; // one past the max doc in this segment.
return BitsFilteredDocIdSet.wrap(new DocIdSet() {
@Override
public DocIdSetIterator iterator() {
return new DocIdSetIterator() {
int pos=base-1;
int adjustedDoc=-1;
@Override
public int docID() {
return adjustedDoc;
}
@Override
public int nextDoc() {
pos = bs.nextSetBit(pos+1);
return adjustedDoc = (pos>=0 && pos<max) ? pos-base : NO_MORE_DOCS;
}
@Override
public int advance(int target) {
if (target==NO_MORE_DOCS) return adjustedDoc=NO_MORE_DOCS;
pos = bs.nextSetBit(target+base);
return adjustedDoc = (pos>=0 && pos<max) ? pos-base : NO_MORE_DOCS;
}
@Override
public long cost() {
// we don't want to actually compute cardinality, but
// if its already been computed, we use it
if (size != -1) {
return size;
} else {
return bs.capacity();
}
}
};
}
@Override
public boolean isCacheable() {
return true;
}
@Override
public Bits bits() {
return new Bits() {
@Override
public boolean get(int index) {
return bs.fastGet(index + base);
}
@Override
public int length() {
return maxDoc;
}
};
}
}, acceptDocs2);
}
};
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.Fragment;
import com.google.devtools.build.lib.analysis.config.ConfigRuleClasses.ConfigSettingRule;
import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.AspectDefinition;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.ConfigurationFragmentPolicy;
import com.google.devtools.build.lib.packages.DependencyFilter;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.NoSuchTargetException;
import com.google.devtools.build.lib.packages.NoSuchThingException;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.TargetUtils;
import com.google.devtools.build.lib.skyframe.TransitiveTargetFunction.TransitiveTargetValueBuilder;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException2;
import com.google.devtools.common.options.Option;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Nullable;
/**
* This class builds transitive Target values such that evaluating a Target value is similar to
* running it through the LabelVisitor.
*/
public class TransitiveTargetFunction
extends TransitiveBaseTraversalFunction<TransitiveTargetValueBuilder> {
private final ConfiguredRuleClassProvider ruleClassProvider;
/**
* Maps build option names to matching config fragments. This is used to determine correct
* fragment requirements for config_setting rules, which are unique in that their dependencies
* are triggered by string representations of option names.
*/
private final Map<String, Class<? extends Fragment>> optionsToFragmentMap;
TransitiveTargetFunction(RuleClassProvider ruleClassProvider) {
this.ruleClassProvider = (ConfiguredRuleClassProvider) ruleClassProvider;
this.optionsToFragmentMap = computeOptionsToFragmentMap(this.ruleClassProvider);
}
/**
* Computes the option name --> config fragments map. Note that this mapping is technically
* one-to-many: a single option may be required by multiple fragments (e.g. Java options are
* used by both JavaConfiguration and Jvm). In such cases, we arbitrarily choose one fragment
* since that's all that's needed to satisfy the config_setting.
*/
private static Map<String, Class<? extends Fragment>> computeOptionsToFragmentMap(
ConfiguredRuleClassProvider ruleClassProvider) {
Map<String, Class<? extends Fragment>> result = new LinkedHashMap<>();
Map<Class<? extends FragmentOptions>, Integer> visitedOptionsClasses = new HashMap<>();
for (ConfigurationFragmentFactory factory : ruleClassProvider.getConfigurationFragments()) {
Set<Class<? extends FragmentOptions>> requiredOpts = factory.requiredOptions();
for (Class<? extends FragmentOptions> optionsClass : requiredOpts) {
Integer previousBest = visitedOptionsClasses.get(optionsClass);
if (previousBest != null && previousBest <= requiredOpts.size()) {
// Multiple config fragments may require the same options class, but we only need one of
// them to guarantee that class makes it into the configuration. Pick one that depends
// on as few options classes as possible (not necessarily unique).
continue;
}
visitedOptionsClasses.put(optionsClass, requiredOpts.size());
for (Field field : optionsClass.getFields()) {
if (field.isAnnotationPresent(Option.class)) {
result.put(field.getAnnotation(Option.class).name(), factory.creates());
}
}
}
}
return result;
}
@Override
SkyKey getKey(Label label) {
return TransitiveTargetValue.key(label);
}
@Override
TransitiveTargetValueBuilder processTarget(Label label, TargetAndErrorIfAny targetAndErrorIfAny) {
Target target = targetAndErrorIfAny.getTarget();
boolean packageLoadedSuccessfully = targetAndErrorIfAny.isPackageLoadedSuccessfully();
return new TransitiveTargetValueBuilder(label, target, packageLoadedSuccessfully);
}
@Override
void processDeps(
TransitiveTargetValueBuilder builder,
EventHandler eventHandler,
TargetAndErrorIfAny targetAndErrorIfAny,
Iterable<Entry<SkyKey, ValueOrException2<NoSuchPackageException, NoSuchTargetException>>>
depEntries)
throws InterruptedException {
boolean successfulTransitiveLoading = builder.isSuccessfulTransitiveLoading();
Target target = targetAndErrorIfAny.getTarget();
NestedSetBuilder<Label> transitiveRootCauses = builder.getTransitiveRootCauses();
for (Entry<SkyKey, ValueOrException2<NoSuchPackageException, NoSuchTargetException>> entry :
depEntries) {
Label depLabel = (Label) entry.getKey().argument();
TransitiveTargetValue transitiveTargetValue;
try {
transitiveTargetValue = (TransitiveTargetValue) entry.getValue().get();
if (transitiveTargetValue == null) {
continue;
}
} catch (NoSuchPackageException | NoSuchTargetException e) {
successfulTransitiveLoading = false;
transitiveRootCauses.add(depLabel);
maybeReportErrorAboutMissingEdge(target, depLabel, e, eventHandler);
continue;
}
builder.getTransitiveSuccessfulPkgs().addTransitive(
transitiveTargetValue.getTransitiveSuccessfulPackages());
builder.getTransitiveUnsuccessfulPkgs().addTransitive(
transitiveTargetValue.getTransitiveUnsuccessfulPackages());
builder.getTransitiveTargets().addTransitive(transitiveTargetValue.getTransitiveTargets());
NestedSet<Label> rootCauses = transitiveTargetValue.getTransitiveRootCauses();
if (rootCauses != null) {
successfulTransitiveLoading = false;
transitiveRootCauses.addTransitive(rootCauses);
if (transitiveTargetValue.getErrorLoadingTarget() != null) {
maybeReportErrorAboutMissingEdge(target, depLabel,
transitiveTargetValue.getErrorLoadingTarget(), eventHandler);
}
}
NestedSet<Class<? extends Fragment>> depFragments =
transitiveTargetValue.getTransitiveConfigFragments();
Collection<Class<? extends Fragment>> depFragmentsAsCollection =
depFragments.toCollection();
// The simplest collection technique would be to unconditionally add all deps' nested
// sets to the current target's nested set. But when there's large overlap between their
// fragment needs, this produces unnecessarily bloated nested sets and a lot of references
// that don't contribute anything unique to the required fragment set. So we optimize here
// by completely skipping sets that don't offer anything new. More fine-tuned optimization
// is possible, but this offers a good balance between simplicity and practical efficiency.
Set<Class<? extends Fragment>> addedConfigFragments = builder.getConfigFragmentsFromDeps();
if (!addedConfigFragments.containsAll(depFragmentsAsCollection)) {
builder.getTransitiveConfigFragments().addTransitive(depFragments);
addedConfigFragments.addAll(depFragmentsAsCollection);
}
}
builder.setSuccessfulTransitiveLoading(successfulTransitiveLoading);
}
@Override
public SkyValue computeSkyValue(TargetAndErrorIfAny targetAndErrorIfAny,
TransitiveTargetValueBuilder builder) {
Target target = targetAndErrorIfAny.getTarget();
NoSuchTargetException errorLoadingTarget = targetAndErrorIfAny.getErrorLoadingTarget();
// Get configuration fragments directly required by this rule.
if (target instanceof Rule) {
Rule rule = (Rule) target;
// Declared by the rule class:
ConfigurationFragmentPolicy configurationFragmentPolicy =
rule.getRuleClassObject().getConfigurationFragmentPolicy();
for (ConfigurationFragmentFactory factory : ruleClassProvider.getConfigurationFragments()) {
Class<? extends Fragment> fragment = factory.creates();
// isLegalConfigurationFragment considers both natively declared fragments and Skylark
// (named) fragments.
if (configurationFragmentPolicy.isLegalConfigurationFragment(fragment)) {
addFragmentIfNew(builder, fragment.asSubclass(BuildConfiguration.Fragment.class));
}
}
// Declared by late-bound attributes:
for (Attribute attr : rule.getAttributes()) {
if (attr.isLateBound()) {
addFragmentsIfNew(builder,
attr.getLateBoundDefault().getRequiredConfigurationFragments());
}
}
// config_setting rules have values like {"some_flag": "some_value"} that need the
// corresponding fragments in their configurations to properly resolve:
if (rule.getRuleClass().equals(ConfigSettingRule.RULE_NAME)) {
addFragmentsIfNew(builder,
ConfigSettingRule.requiresConfigurationFragments(rule, optionsToFragmentMap));
}
// Fragments to unconditionally include:
addFragmentIfNew(builder,
ruleClassProvider.getUniversalFragment().asSubclass(BuildConfiguration.Fragment.class));
}
return builder.build(errorLoadingTarget);
}
private void addFragmentIfNew(TransitiveTargetValueBuilder builder,
Class<? extends Fragment> fragment) {
// This only checks that the deps don't already use this fragment, not the parent rule itself.
// So duplicates are still possible. We can further optimize if needed.
if (!builder.getConfigFragmentsFromDeps().contains(fragment)) {
builder.getTransitiveConfigFragments().add(fragment);
}
}
private void addFragmentsIfNew(TransitiveTargetValueBuilder builder, Iterable<?> fragments) {
// We take Iterable<?> instead of Iterable<Class<?>> or Iterable<Class<? extends Fragment>>
// because both of the latter are passed as actual parameters and there's no way to consistently
// cast to one of them. In actuality, all values are Class<? extends Fragment>, but the values
// coming from Attribute.java don't have access to the Fragment symbol since Attribute is built
// in a different library.
for (Object fragment : fragments) {
addFragmentIfNew(builder, (Class<? extends Fragment>) fragment);
}
}
@Override
protected Collection<Label> getAspectLabels(
Rule fromRule,
Attribute attr,
Label toLabel,
ValueOrException2<NoSuchPackageException, NoSuchTargetException> toVal,
final Environment env)
throws InterruptedException {
SkyKey packageKey = PackageValue.key(toLabel.getPackageIdentifier());
try {
PackageValue pkgValue =
(PackageValue) env.getValueOrThrow(packageKey, NoSuchPackageException.class);
if (pkgValue == null) {
return ImmutableList.of();
}
Package pkg = pkgValue.getPackage();
if (pkg.containsErrors()) {
// Do nothing. This error was handled when we computed the corresponding
// TransitiveTargetValue.
return ImmutableList.of();
}
Target dependedTarget = pkgValue.getPackage().getTarget(toLabel.getName());
return AspectDefinition.visitAspectsIfRequired(fromRule, attr, dependedTarget,
DependencyFilter.ALL_DEPS).values();
} catch (NoSuchThingException e) {
// Do nothing. This error was handled when we computed the corresponding
// TransitiveTargetValue.
return ImmutableList.of();
}
}
@Override
TargetMarkerValue getTargetMarkerValue(SkyKey targetMarkerKey, Environment env)
throws NoSuchTargetException, NoSuchPackageException, InterruptedException {
return (TargetMarkerValue)
env.getValueOrThrow(
targetMarkerKey, NoSuchTargetException.class, NoSuchPackageException.class);
}
private static void maybeReportErrorAboutMissingEdge(
Target target, Label depLabel, NoSuchThingException e, EventHandler eventHandler)
throws InterruptedException {
if (e instanceof NoSuchTargetException) {
NoSuchTargetException nste = (NoSuchTargetException) e;
if (depLabel.equals(nste.getLabel())) {
eventHandler.handle(
Event.error(
TargetUtils.getLocationMaybe(target),
TargetUtils.formatMissingEdge(target, depLabel, e)));
}
} else if (e instanceof NoSuchPackageException) {
NoSuchPackageException nspe = (NoSuchPackageException) e;
if (nspe.getPackageId().equals(depLabel.getPackageIdentifier())) {
eventHandler.handle(
Event.error(
TargetUtils.getLocationMaybe(target),
TargetUtils.formatMissingEdge(target, depLabel, e)));
}
}
}
/**
* Holds values accumulated across the given target and its transitive dependencies for the
* purpose of constructing a {@link TransitiveTargetValue}.
*
* <p>Note that this class is mutable! The {@code successfulTransitiveLoading} property is
* initialized with the {@code packageLoadedSuccessfully} constructor parameter, and may be
* modified if a transitive dependency is found to be in error.
*/
static class TransitiveTargetValueBuilder {
private boolean successfulTransitiveLoading;
private final NestedSetBuilder<PackageIdentifier> transitiveSuccessfulPkgs;
private final NestedSetBuilder<PackageIdentifier> transitiveUnsuccessfulPkgs;
private final NestedSetBuilder<Label> transitiveTargets;
private final NestedSetBuilder<Class<? extends Fragment>> transitiveConfigFragments;
private final Set<Class<? extends Fragment>> configFragmentsFromDeps;
private final NestedSetBuilder<Label> transitiveRootCauses;
public TransitiveTargetValueBuilder(Label label, Target target,
boolean packageLoadedSuccessfully) {
this.transitiveSuccessfulPkgs = NestedSetBuilder.stableOrder();
this.transitiveUnsuccessfulPkgs = NestedSetBuilder.stableOrder();
this.transitiveTargets = NestedSetBuilder.stableOrder();
this.transitiveConfigFragments = NestedSetBuilder.stableOrder();
// No need to store directly required fragments that are also required by deps.
this.configFragmentsFromDeps = new LinkedHashSet<>();
this.transitiveRootCauses = NestedSetBuilder.stableOrder();
this.successfulTransitiveLoading = packageLoadedSuccessfully;
PackageIdentifier packageId = target.getPackage().getPackageIdentifier();
if (packageLoadedSuccessfully) {
transitiveSuccessfulPkgs.add(packageId);
} else {
transitiveRootCauses.add(label);
transitiveUnsuccessfulPkgs.add(packageId);
}
transitiveTargets.add(target.getLabel());
}
public NestedSetBuilder<PackageIdentifier> getTransitiveSuccessfulPkgs() {
return transitiveSuccessfulPkgs;
}
public NestedSetBuilder<PackageIdentifier> getTransitiveUnsuccessfulPkgs() {
return transitiveUnsuccessfulPkgs;
}
public NestedSetBuilder<Label> getTransitiveTargets() {
return transitiveTargets;
}
public NestedSetBuilder<Class<? extends Fragment>> getTransitiveConfigFragments() {
return transitiveConfigFragments;
}
public Set<Class<? extends Fragment>> getConfigFragmentsFromDeps() {
return configFragmentsFromDeps;
}
public NestedSetBuilder<Label> getTransitiveRootCauses() {
return transitiveRootCauses;
}
public boolean isSuccessfulTransitiveLoading() {
return successfulTransitiveLoading;
}
public void setSuccessfulTransitiveLoading(boolean successfulTransitiveLoading) {
this.successfulTransitiveLoading = successfulTransitiveLoading;
}
public SkyValue build(@Nullable NoSuchTargetException errorLoadingTarget) {
NestedSet<PackageIdentifier> successfullyLoadedPkgs = transitiveSuccessfulPkgs.build();
NestedSet<PackageIdentifier> unsuccessfullyLoadedPkgs = transitiveUnsuccessfulPkgs.build();
NestedSet<Label> loadedTargets = transitiveTargets.build();
NestedSet<Class<? extends Fragment>> configFragments = transitiveConfigFragments.build();
return successfulTransitiveLoading
? TransitiveTargetValue.successfulTransitiveLoading(successfullyLoadedPkgs,
unsuccessfullyLoadedPkgs, loadedTargets, configFragments)
: TransitiveTargetValue.unsuccessfulTransitiveLoading(successfullyLoadedPkgs,
unsuccessfullyLoadedPkgs, loadedTargets, transitiveRootCauses.build(),
errorLoadingTarget, configFragments);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.