gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.fuerve.whiteboard.shortestpath;
import java.util.List;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Implementation of an undirected, nonweighted graph.
*/
public class Graph<T> {
private int E;
private final Map<String, Vertex<T>> vertices;
private final Map<String, Set<String>> adjacencies;
/**
* Initializes a new undirected graph.
*/
public Graph() {
E = 0;
vertices = new HashMap<String, Vertex<T>>();
adjacencies = new HashMap<String, Set<String>>();
}
/**
* Gets the number of vertices in the graph.
* @return The number of vertices.
*/
public int numberOfVertices() {
return vertices.size();
}
/**
* Gets the number of edges in the graph.
* @return The number of edges.
*/
public int numberOfEdges() {
return E;
}
/**
* Checks to see whether a vertex of a given name exists in this graph.
* @param name The name of the vertex.
* @return True if a vertex of that name exists.
*/
public boolean contains(final String name) {
return vertices.containsKey(name);
}
/**
* Adds a vertex to the graph.
* @param name The name of the vertex.
* @param value The value of the vertex.
*/
public void addVertex(final String name, final T value) {
if (vertices.containsKey(name)) {
vertices.get(name).datum = value;
} else {
final Vertex<T> newNode = new Vertex<T>(name, value);
vertices.put(name, newNode);
}
}
/**
* Adds an edge between two vertices. This is an undirected graph,
* so all edges are bidirectional.
* @param v The first vertex.
* @param w The second vertex.
* @return True if the edge was added. Asking to add an edge between
* vertices that do not exist will result in a false.
*/
public boolean addEdge(final String v, final String w) {
if (vertices.containsKey(v) && vertices.containsKey(w)) {
final Vertex<T> vNode = vertices.get(v);
final Vertex<T> wNode = vertices.get(w);
establishAdjacencyList(vNode);
establishAdjacencyList(wNode);
boolean addedBoth = true;
addedBoth &= !adjacencies.get(vNode.name).contains(wNode.name);
addedBoth &= !adjacencies.get(wNode.name).contains(vNode.name);
adjacencies.get(wNode.name).add(vNode.name);
adjacencies.get(vNode.name).add(wNode.name);
if (addedBoth) {
E++;
}
return true;
} else {
return false;
}
}
/**
* Gets the names of all vertices in this graph.
* @return The names of all vertices.
*/
public Set<String> getVertices() {
return vertices.keySet();
}
/**
* Gets the set of adjacencies for a given vertex.
* @param name The name of the vertex.
* @return The set of adjacencies.
*/
public Set<String> getAdjacencies(final String name) {
return adjacencies.get(name);
}
/**
* Boiler plate to create a new adjacencies list for a given vertex.
* @param vertex The vertex in question.
*/
private void establishAdjacencyList(final Vertex<T> vertex) {
if (!adjacencies.containsKey(vertex.name)) {
adjacencies.put(vertex.name, new HashSet<String>());
}
}
/**
* Given a source and a destination, returns the shortest path in a list
* of names of each vertex in the traversal.
* @param v The source vertex.
* @param w The destination vertex.
* @return The shortest path between them, if any.
*/
public List<String> shortestPath(final String v, final String w) {
final TraversalContext context = breadthFirstSearch(v);
if (!context.marked.contains(w)) {
return null;
}
final Deque<String> path = new ArrayDeque<String>();
String x;
for (x = w; context.distanceTo.get(x) != 0; x = context.edgeTo.get(x)) {
path.push(x);
}
path.push(x);
final List<String> result = new ArrayList<String>();
for (final String node : path) {
result.add(node);
}
return result;
}
/**
* Performs a breadth-first traversal of the graph, starting from the source point.
* @param source The source from which to traverse the graph.
* @return context The traversal context.
*/
private TraversalContext breadthFirstSearch(final String source) {
final Deque<String> queue = new ArrayDeque<String>();
final TraversalContext context = new TraversalContext(source);
context.marked.add(source);
queue.add(source);
while (!queue.isEmpty()) {
final String v = queue.remove();
for (final String w : adjacencies.get(v)) {
if (!context.marked.contains(w)) {
context.edgeTo.put(w, v);
context.distanceTo.put(w, context.distanceTo.get(v) + 1);
context.marked.add(w);
queue.add(w);
}
}
}
return context;
}
/**
* Container for a single node on the graph.
* @param <T> The contained type.
*/
public static class Vertex<T> {
public String name;
public T datum;
/**
* Ctor.
* @param nname The name of the vertex.
* @param ddatum The value of the vertex.
*/
public Vertex(final String nname, final T ddatum) {
name = nname;
datum = ddatum;
}
}
/**
* Container for information about a given attempt to determine shortest path
* between two vertices.
*/
private class TraversalContext {
public String source;
public Set<String> marked;
public Map<String, String> edgeTo;
public Map<String, Integer> distanceTo;
/**
* Ctor.
* @param ssource The intended source vertex from which to traverse the graph.
*/
public TraversalContext(final String ssource) {
source = ssource;
marked = new HashSet<String>();
edgeTo = new HashMap<String, String>();
distanceTo = new HashMap<String, Integer>();
for (final String name : vertices.keySet()) {
distanceTo.put(name, Integer.MAX_VALUE);
}
distanceTo.put(source, 0);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.eviction.EvictionNode;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VMThinLRURegionEntryOffHeapIntKey extends VMThinLRURegionEntryOffHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapIntKey> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapIntKey.class, "lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
// --------------------------------------- offheap fields ---------------------------------------
/**
* All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not
* tell it is.
*/
@SuppressWarnings("unused")
@Retained
@Released
private volatile long offHeapAddress;
/**
* I needed to add this because I wanted clear to call setValue which normally can only be called
* while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the
* disk regions because they also get a rw lock in clear. Some hardware platforms do not support
* CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the
* RegionEntry and we will once again be deadlocked. I don't know if we support any of the
* hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk
* regions.
*/
private static final AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapIntKey> OFF_HEAP_ADDRESS_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapIntKey.class, "offHeapAddress");
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final int key;
public VMThinLRURegionEntryOffHeapIntKey(final RegionEntryContext context, final int key,
@Retained final Object value) {
super(context, value);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
protected Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
@Unretained
protected void setValueField(@Unretained final Object value) {
OffHeapRegionEntryHelper.setValue(this, value);
}
@Override
@Retained
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context);
}
@Override
public long getAddress() {
return OFF_HEAP_ADDRESS_UPDATER.get(this);
}
@Override
public boolean setAddress(final long expectedAddress, long newAddress) {
return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress);
}
@Override
@Released
public void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
@Override
public void returnToPool() {
// never implemented
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// --------------------------------------- eviction code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) {
// nothing needed for LRUs with no disk
}
@Override
public synchronized int updateEntrySize(final EvictionController evictionController) {
// OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size
return updateEntrySize(evictionController, getValue());
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public synchronized int updateEntrySize(final EvictionController evictionController,
final Object value) {
int oldSize = getEntrySize();
int newSize = evictionController.entrySize(getKeyForSizing(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
@Override
public boolean isRecentlyUsed() {
return areAnyBitsSet(RECENTLY_USED);
}
@Override
public void setRecentlyUsed(RegionEntryContext context) {
if (!isRecentlyUsed()) {
setBits(RECENTLY_USED);
context.incRecentlyUsed();
}
}
@Override
public void unsetRecentlyUsed() {
clearBits(~RECENTLY_USED);
}
@Override
public boolean isEvicted() {
return areAnyBitsSet(EVICTED);
}
@Override
public void setEvicted() {
setBits(EVICTED);
}
@Override
public void unsetEvicted() {
clearBits(~EVICTED);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private EvictionNode nextEvictionNode;
private EvictionNode previousEvictionNode;
private int size;
@Override
public void setNext(final EvictionNode nextEvictionNode) {
this.nextEvictionNode = nextEvictionNode;
}
@Override
public EvictionNode next() {
return this.nextEvictionNode;
}
@Override
public void setPrevious(final EvictionNode previousEvictionNode) {
this.previousEvictionNode = previousEvictionNode;
}
@Override
public EvictionNode previous() {
return this.previousEvictionNode;
}
@Override
public int getEntrySize() {
return this.size;
}
protected void setEntrySize(final int size) {
this.size = size;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKeyForSizing() {
// inline keys always report null for sizing since the size comes from the entry size
return null;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof Integer) {
return ((Integer) key).intValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
package ua.khpi.oop.pavlova06;
import java.io.Serializable;
import java.util.Arrays;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* Class includes the rrealization of a new container of strings for work with
* the util from the laboratory work #3
*
* @param MAX_ARRAY_SIZE
* is for capacity control of the container
* @param DEFAULT_CAPACITY
* is for creating a container with a default capacity
* @param EMPTY_ELEMENT_DATA
* is for creating a container with null value in a case of default
* usage
* @param size
* is the size of the array of values
* @param elementData
* is the array of strings that contains all the written in data
*
* @author pavlova-mv
*
*/
public class NewContainerOfStrings implements Iterable<String>, Serializable {
private static final long serialVersionUID = 1L;
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
private static final int DEFAULT_CAPACITY = 10;
private static final String[] EMPTY_ELEMENT_DATA = {};
private static int size;
transient String[] elementData;
/**
* Default constructor for creating an empty container.
*/
public NewContainerOfStrings() {
this.elementData = EMPTY_ELEMENT_DATA;
}
public NewContainerOfStrings(NewContainerOfStrings collection) {
this();
this.addAll(collection);
}
public boolean addAll(final NewContainerOfStrings collection) {
final Object[] collectionData = collection.toArray();
final int amountOfNewElements = collectionData.length;
this.ensureCapacity(size + amountOfNewElements);
System.arraycopy(collectionData, 0, this.elementData, size, amountOfNewElements);
size += amountOfNewElements;
return amountOfNewElements != 0;
}
/**
* Constructor for creating a container with input capacity.
*
* @param initialCapacity
* is input capacity
*/
public NewContainerOfStrings(int initialCapacity) {
if (initialCapacity < 0)
throw new IllegalArgumentException("Illegal Capacity: " + initialCapacity);
else if (initialCapacity == 0) {
this.elementData = EMPTY_ELEMENT_DATA;
} else {
this.elementData = new String[initialCapacity];
}
}
/**
* Nethod for deleting a chosen element. Container is being refactored to shift
* all the element to right after the element that was removed.
*
* @param index
* is the number of the removing element
*/
private void removeAndRefactor(int index) {
int numMoved = size - index - 1;
if (numMoved > 0)
System.arraycopy(elementData, index + 1, elementData, index, numMoved);
elementData[--size] = null;
}
/**
* Method that controls index of the chosen element not to be out of the bound
*
* @param index
* is the number of the chosen element
*/
private void checkIndex(int index) {
if (index >= size)
throw new IndexOutOfBoundsException(indexExceptionMessage(index));
}
/**
* Method that creates a message for the exception view in a case index is out
* of bound.
*
* @param index
* is the number of the chosen element
* @return message
*/
private String indexExceptionMessage(int index) {
return "Index: " + index + ", Size: " + size;
}
/**
* Method that checks the ensure of capacity
*
* @param newCapacity
* is a new value of capacity
*/
private void checkDefaultCapacityEnsuring(int newCapacity) {
if (elementData == EMPTY_ELEMENT_DATA)
newCapacity = Math.max(DEFAULT_CAPACITY, newCapacity);
checkPermittedCapasityEnsuring(newCapacity);
}
/**
* Method for revising the value of a new capacity not to be smaller than the
* size of array of strings.
*
* @param newCapacity
* is a new value of capacity
*/
private void checkPermittedCapasityEnsuring(int newCapacity) {
if (newCapacity - elementData.length > 0)
expandCapacity(newCapacity);
}
/**
* Method that expands capacity to a chosen value.
*
* @param newCapacity
* is a new value of capacity
*/
private void expandCapacity(int newCapacity) {
int currentCapacity = elementData.length;
int expandedCapacity = currentCapacity + (currentCapacity >> 1);
if (expandedCapacity - newCapacity < 0)
expandedCapacity = newCapacity;
if (expandedCapacity > MAX_ARRAY_SIZE)
expandedCapacity = tooLargeCapacity(newCapacity);
// throw new OutOfMemoryError();
elementData = Arrays.copyOf(elementData, expandedCapacity);
}
/**
* Method for case if the value of new capacity is larger than the
* MAX_ARRAY_SIZE
*
* @param newCapacity
* is a new value of capacity
* @return value of new capacity
*/
private int tooLargeCapacity(int newCapacity) {
if (newCapacity < 0)
throw new OutOfMemoryError();
return (newCapacity > MAX_ARRAY_SIZE) ? Integer.MAX_VALUE : MAX_ARRAY_SIZE;
}
/**
* Method for creating a string from the data of the container.
*
* @return data in a string
*/
public String toString() {
String containerContent = new String();
for (String string : elementData)
containerContent += string + " ";
return containerContent;
}
/**
* Method gor getting index by the string
*
* @param element
* is the string
* @return index
*/
public int getIndex(String element) {
for (int index = 0; index < size; index++)
if (element.equals(elementData[index]))
return index;
return 0;
}
/**
* Method for adding a new element in the end of the array of strings.
*
* @param newElement
* is a new string
*/
public void add(String newElement) {
if (size == elementData.length)
ensureCapacity(size + 1);
elementData[size] = newElement;
size++;
}
/**
* Method for setting data to the element
*
*/
public String set(int index, String element) {
checkIndex(index);
String oldElement = elementData[index];
elementData[index] = element;
return oldElement;
}
/**
* Method for adding a new element by the specific index
*
* @param index
* is a position for adding
* @param newElement
*/
public void add(int index, String newElement) {
checkIndex(index);
ensureCapacity(size + 1);
System.arraycopy(elementData, index, elementData, index + 1, size - index);
elementData[index] = newElement;
size++;
}
/**
* Method for removing all the data from the array of strings.
*/
public void clear() {
for (int i = 0; i < elementData.length; i++) {
elementData[i] = null;
}
}
/**
* Method for removing an element by a specific index.
*
* @param index
* is the number of a chosen element
*/
public void remove(int index) {
checkIndex(index);
elementData[index] = null;
removeAndRefactor(index);
}
/**
* Method for removing a specific element.
*
* @param string
* is a string for removing
* @return is removed?
*/
public boolean remove(String string) {
if (string == null) {
for (int index = 0; index < size; index++) {
if (elementData[index] == null) {
removeAndRefactor(index);
return true;
}
}
} else {
for (int index = 0; index < size; index++)
if (string.equals(elementData[index])) {
removeAndRefactor(index);
return true;
}
}
return false;
}
/**
* Method for creating an array of objects by the array of strings.
*
* @return array of objects
*/
public Object[] toArray() {
Object[] copy = new Object[size];
for (int index = 0; index < size; index++)
copy[index] = elementData[index];
return copy;
}
/**
* Method for getting the size of the array
*
* @return the value of size
*/
public int size() {
return size;
}
/**
* Method for revising if the specific element exists in this container.
*
* @param string
* is the element for revising
* @return is in the container?
*/
public boolean contains(String string) {
return indexOf(string) >= 0;
}
/**
* Method for checking if all the elements of another container exist in this
* container.
*
* @param container
* is another container for revising
* @return is in the container?
*/
public boolean containsAll(NewContainerOfStrings container) {
for (String e : container)
if (!contains(e))
return false;
return true;
}
/**
* Method for getting the index of the specific element.
*
* @param string
* is the chosen element
* @return the number of the element
*/
public int indexOf(String string) {
if (string == null) {
for (int i = 0; i < size; i++)
if (elementData[i] == null)
return i;
} else {
for (int i = 0; i < size; i++)
if (string.equals(elementData[i]))
return i;
}
return -1;
}
/**
* Method for capacity ensuring by the customer.
*
* @param newCapacity
* the value of a new capacity
*/
public void ensureCapacity(int newCapacity) {
checkDefaultCapacityEnsuring(newCapacity);
}
/**
* Method for getting the element by its' index
*
* @param index
* the number of the chosen element
* @return element by the index
*/
public String get(int index) {
checkIndex(index);
return elementData[index];
}
/**
* Method for creating the iterator
*/
public newIterator<String> iterator() {
return new newIterator<String>(elementData);
}
/**
* New iterator for work with the NewContainerOfStrings
*
* @author pavlova-mv
*
* @param <String>
*/
@SuppressWarnings("hiding")
public class newIterator<String> implements Iterator<String> {
private int cursor;
private int end;
/**
* Default constructor
*
* @param array
*/
public newIterator(String[] array) {
this.cursor = -1;
this.end = array.length - 1;
}
/**
* Method for revising does the next element exist.
*/
@Override
public boolean hasNext() {
return this.cursor < end;
}
/**
* Method for getting the next element of the iterable array.
*/
@SuppressWarnings("unchecked")
@Override
public String next() {
if (!this.hasNext()) {
throw new NoSuchElementException();
}
cursor++;
return (String) elementData[cursor];
}
/**
* Method for removing the last element of te array.
*/
public void remove() {
if (end == -1)
throw new IllegalStateException();
try {
NewContainerOfStrings.this.remove(elementData[end]);
if (end < cursor)
cursor--;
end = -1;
} catch (IndexOutOfBoundsException one) {
throw new ConcurrentModificationException();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.pipes;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TestMiniMRWithDFS;
import org.apache.hadoop.mapred.Utils;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
public class TestPipes extends TestCase {
private static final Log LOG =
LogFactory.getLog(TestPipes.class.getName());
private static Path cppExamples =
new Path(System.getProperty("install.c++.examples"));
static Path wordCountSimple =
new Path(cppExamples, "bin/wordcount-simple");
static Path wordCountPart =
new Path(cppExamples, "bin/wordcount-part");
static Path wordCountNoPipes =
new Path(cppExamples,"bin/wordcount-nopipe");
static Path nonPipedOutDir;
static void cleanup(FileSystem fs, Path p) throws IOException {
fs.delete(p, true);
assertFalse("output not cleaned up", fs.exists(p));
}
public void testPipes() throws IOException {
if (System.getProperty("compile.c++") == null) {
LOG.info("compile.c++ is not defined, so skipping TestPipes");
return;
}
MiniDFSCluster dfs = null;
MiniMRCluster mr = null;
Path inputPath = new Path("testing/in");
Path outputPath = new Path("testing/out");
try {
final int numSlaves = 2;
Configuration conf = new Configuration();
dfs = new MiniDFSCluster(conf, numSlaves, true, null);
mr = new MiniMRCluster(numSlaves, dfs.getFileSystem().getName(), 1);
writeInputFile(dfs.getFileSystem(), inputPath);
runProgram(mr, dfs, wordCountSimple,
inputPath, outputPath, 3, 2, twoSplitOutput, null);
cleanup(dfs.getFileSystem(), outputPath);
runProgram(mr, dfs, wordCountSimple,
inputPath, outputPath, 3, 0, noSortOutput, null);
cleanup(dfs.getFileSystem(), outputPath);
runProgram(mr, dfs, wordCountPart,
inputPath, outputPath, 3, 2, fixedPartitionOutput, null);
runNonPipedProgram(mr, dfs, wordCountNoPipes, null);
mr.waitUntilIdle();
} finally {
mr.shutdown();
dfs.shutdown();
}
}
final static String[] twoSplitOutput = new String[] {
"`and\t1\na\t1\nand\t1\nbeginning\t1\nbook\t1\nbut\t1\nby\t1\n" +
"conversation?'\t1\ndo:\t1\nhad\t2\nhaving\t1\nher\t2\nin\t1\nit\t1\n"+
"it,\t1\nno\t1\nnothing\t1\nof\t3\non\t1\nonce\t1\nor\t3\npeeped\t1\n"+
"pictures\t2\nthe\t3\nthought\t1\nto\t2\nuse\t1\nwas\t2\n",
"Alice\t2\n`without\t1\nbank,\t1\nbook,'\t1\nconversations\t1\nget\t1\n" +
"into\t1\nis\t1\nreading,\t1\nshe\t1\nsister\t2\nsitting\t1\ntired\t1\n" +
"twice\t1\nvery\t1\nwhat\t1\n"
};
final static String[] noSortOutput = new String[] {
"it,\t1\n`and\t1\nwhat\t1\nis\t1\nthe\t1\nuse\t1\nof\t1\na\t1\n" +
"book,'\t1\nthought\t1\nAlice\t1\n`without\t1\npictures\t1\nor\t1\n"+
"conversation?'\t1\n",
"Alice\t1\nwas\t1\nbeginning\t1\nto\t1\nget\t1\nvery\t1\ntired\t1\n"+
"of\t1\nsitting\t1\nby\t1\nher\t1\nsister\t1\non\t1\nthe\t1\nbank,\t1\n"+
"and\t1\nof\t1\nhaving\t1\nnothing\t1\nto\t1\ndo:\t1\nonce\t1\n",
"or\t1\ntwice\t1\nshe\t1\nhad\t1\npeeped\t1\ninto\t1\nthe\t1\nbook\t1\n"+
"her\t1\nsister\t1\nwas\t1\nreading,\t1\nbut\t1\nit\t1\nhad\t1\nno\t1\n"+
"pictures\t1\nor\t1\nconversations\t1\nin\t1\n"
};
final static String[] fixedPartitionOutput = new String[] {
"Alice\t2\n`and\t1\n`without\t1\na\t1\nand\t1\nbank,\t1\nbeginning\t1\n" +
"book\t1\nbook,'\t1\nbut\t1\nby\t1\nconversation?'\t1\nconversations\t1\n"+
"do:\t1\nget\t1\nhad\t2\nhaving\t1\nher\t2\nin\t1\ninto\t1\nis\t1\n" +
"it\t1\nit,\t1\nno\t1\nnothing\t1\nof\t3\non\t1\nonce\t1\nor\t3\n" +
"peeped\t1\npictures\t2\nreading,\t1\nshe\t1\nsister\t2\nsitting\t1\n" +
"the\t3\nthought\t1\ntired\t1\nto\t2\ntwice\t1\nuse\t1\n" +
"very\t1\nwas\t2\nwhat\t1\n",
""
};
static void writeInputFile(FileSystem fs, Path dir) throws IOException {
DataOutputStream out = fs.create(new Path(dir, "part0"));
out.writeBytes("Alice was beginning to get very tired of sitting by her\n");
out.writeBytes("sister on the bank, and of having nothing to do: once\n");
out.writeBytes("or twice she had peeped into the book her sister was\n");
out.writeBytes("reading, but it had no pictures or conversations in\n");
out.writeBytes("it, `and what is the use of a book,' thought Alice\n");
out.writeBytes("`without pictures or conversation?'\n");
out.close();
}
static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs,
Path program, Path inputPath, Path outputPath,
int numMaps, int numReduces, String[] expectedResults,
JobConf conf
) throws IOException {
Path wordExec = new Path("testing/bin/application");
JobConf job = null;
if(conf == null) {
job = mr.createJobConf();
}else {
job = new JobConf(conf);
}
job.setNumMapTasks(numMaps);
job.setNumReduceTasks(numReduces);
{
FileSystem fs = dfs.getFileSystem();
fs.delete(wordExec.getParent(), true);
fs.copyFromLocalFile(program, wordExec);
Submitter.setExecutable(job, fs.makeQualified(wordExec).toString());
Submitter.setIsJavaRecordReader(job, true);
Submitter.setIsJavaRecordWriter(job, true);
FileInputFormat.setInputPaths(job, inputPath);
FileOutputFormat.setOutputPath(job, outputPath);
RunningJob rJob = null;
if (numReduces == 0) {
rJob = Submitter.jobSubmit(job);
while (!rJob.isComplete()) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
} else {
rJob = Submitter.runJob(job);
}
assertTrue("pipes job failed", rJob.isSuccessful());
Counters counters = rJob.getCounters();
Counters.Group wordCountCounters = counters.getGroup("WORDCOUNT");
int numCounters = 0;
for (Counter c : wordCountCounters) {
System.out.println(c);
++numCounters;
}
assertTrue("No counters found!", (numCounters > 0));
}
List<String> results = new ArrayList<String>();
for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath,
new Utils.OutputFileUtils.OutputFilesFilter()))) {
results.add(TestMiniMRWithDFS.readOutput(p, job));
}
assertEquals("number of reduces is wrong",
expectedResults.length, results.size());
for(int i=0; i < results.size(); i++) {
assertEquals("pipes program " + program + " output " + i + " wrong",
expectedResults[i], results.get(i));
}
}
/**
* Run a map/reduce word count that does all of the map input and reduce
* output directly rather than sending it back up to Java.
* @param mr The mini mr cluster
* @param dfs the dfs cluster
* @param program the program to run
* @throws IOException
*/
static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs,
Path program, JobConf conf) throws IOException {
JobConf job;
if(conf == null) {
job = mr.createJobConf();
}else {
job = new JobConf(conf);
}
job.setInputFormat(WordCountInputFormat.class);
FileSystem local = FileSystem.getLocal(job);
Path testDir = new Path("file:" + System.getProperty("test.build.data"),
"pipes");
Path inDir = new Path(testDir, "input");
nonPipedOutDir = new Path(testDir, "output");
Path wordExec = new Path("testing/bin/application");
Path jobXml = new Path(testDir, "job.xml");
{
FileSystem fs = dfs.getFileSystem();
fs.delete(wordExec.getParent(), true);
fs.copyFromLocalFile(program, wordExec);
}
DataOutputStream out = local.create(new Path(inDir, "part0"));
out.writeBytes("i am a silly test\n");
out.writeBytes("you are silly\n");
out.writeBytes("i am a cat test\n");
out.writeBytes("you is silly\n");
out.writeBytes("i am a billy test\n");
out.writeBytes("hello are silly\n");
out.close();
out = local.create(new Path(inDir, "part1"));
out.writeBytes("mall world things drink java\n");
out.writeBytes("hall silly cats drink java\n");
out.writeBytes("all dogs bow wow\n");
out.writeBytes("hello drink java\n");
local.delete(nonPipedOutDir, true);
local.mkdirs(nonPipedOutDir, new FsPermission(FsAction.ALL, FsAction.ALL,
FsAction.ALL));
out.close();
out = local.create(jobXml);
job.writeXml(out);
out.close();
System.err.println("About to run: Submitter -conf " + jobXml + " -input "
+ inDir + " -output " + nonPipedOutDir + " -program "
+ dfs.getFileSystem().makeQualified(wordExec));
try {
int ret = ToolRunner.run(new Submitter(),
new String[]{"-conf", jobXml.toString(),
"-input", inDir.toString(),
"-output", nonPipedOutDir.toString(),
"-program",
dfs.getFileSystem().makeQualified(wordExec).toString(),
"-reduces", "2"});
assertEquals(0, ret);
} catch (Exception e) {
assertTrue("got exception: " + StringUtils.stringifyException(e), false);
}
}
}
| |
package org.opencb.opencga.analysis.rga;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrException;
import org.opencb.commons.datastore.core.DataResult;
import org.opencb.commons.datastore.core.FacetField;
import org.opencb.commons.datastore.core.Query;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.commons.datastore.solr.FacetQueryParser;
import org.opencb.commons.datastore.solr.SolrCollection;
import org.opencb.commons.datastore.solr.SolrManager;
import org.opencb.opencga.analysis.rga.exceptions.RgaException;
import org.opencb.opencga.analysis.rga.iterators.RgaIterator;
import org.opencb.opencga.core.config.storage.StorageConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class RgaEngine implements Closeable {
private SolrManager solrManager;
private RgaQueryParser parser;
private IndividualRgaConverter individualRgaConverter;
private GeneRgaConverter geneConverter;
private VariantRgaConverter variantConverter;
private StorageConfiguration storageConfiguration;
private static Map<String, SolrCollection> solrCollectionMap;
private Logger logger;
static {
solrCollectionMap = new HashMap<>();
}
public RgaEngine(StorageConfiguration storageConfiguration) {
this.individualRgaConverter = new IndividualRgaConverter();
this.geneConverter = new GeneRgaConverter();
this.variantConverter = new VariantRgaConverter();
this.parser = new RgaQueryParser();
this.storageConfiguration = storageConfiguration;
this.solrManager = new SolrManager(storageConfiguration.getRga().getHosts(), storageConfiguration.getRga().getMode(),
storageConfiguration.getRga().getTimeout());
logger = LoggerFactory.getLogger(RgaEngine.class);
}
public boolean isAlive(String collection) {
return solrManager.isAlive(collection);
}
public void createMainCollection(String dbName) throws RgaException {
try {
solrManager.create(dbName, this.storageConfiguration.getRga().getConfigSet());
} catch (SolrException e) {
throw new RgaException("Error creating Solr collection '" + dbName + "'", e);
}
}
public void createAuxCollection(String dbName) throws RgaException {
try {
solrManager.create(dbName, this.storageConfiguration.getRga().getConfigSet().replace("-rga-", "-rga-aux-"));
} catch (SolrException e) {
throw new RgaException("Error creating Solr collection '" + dbName + "'", e);
}
}
public void create(String dbName, String configSet) throws RgaException {
try {
solrManager.create(dbName, configSet);
} catch (SolrException e) {
throw new RgaException("Error creating Solr collection '" + dbName + "'", e);
}
}
public boolean exists(String dbName) throws RgaException {
try {
return solrManager.exists(dbName);
} catch (SolrException e) {
throw new RgaException("Error asking if Solr collection '" + dbName + "' exists", e);
}
}
public boolean existsCore(String coreName) throws RgaException {
try {
return solrManager.existsCore(coreName);
} catch (SolrException e) {
throw new RgaException("Error asking if Solr core '" + coreName + "' exists", e);
}
}
public boolean existsCollection(String collectionName) throws RgaException {
try {
return solrManager.exists(collectionName);
} catch (SolrException e) {
throw new RgaException("Error asking if Solr collection '" + collectionName + "' exists", e);
}
}
/**
* Insert a list of RGA models into the given Solr collection.
*
* @param collection Solr collection where to insert
* @param rgaModelList List of RGA models to insert
* @throws IOException IOException
* @throws SolrServerException SolrServerException
*/
public void insert(String collection, List<?> rgaModelList) throws IOException, SolrServerException {
if (CollectionUtils.isNotEmpty(rgaModelList)) {
UpdateResponse updateResponse;
updateResponse = solrManager.getSolrClient().addBeans(collection, rgaModelList);
if (updateResponse.getStatus() == 0) {
solrManager.getSolrClient().commit(collection);
}
}
}
/**
* Return an RgaDataModel iterator given a query.
*
* @param collection Collection name
* @param query Query
* @param queryOptions Query options
* @return RgaIterator.
* @throws RgaException RgaException
*/
public RgaIterator individualQuery(String collection, Query query, QueryOptions queryOptions) throws RgaException{
SolrQuery solrQuery = parser.parseQuery(query);
fixIndividualOptions(queryOptions, solrQuery);
// solrQuery.setRows(Integer.MAX_VALUE);
solrQuery.setRows(queryOptions.getInt(QueryOptions.LIMIT, Integer.MAX_VALUE));
try {
return new RgaIterator(solrManager.getSolrClient(), collection, solrQuery);
} catch (SolrServerException e) {
throw new RgaException("Error executing KnockoutByIndividual query", e);
}
}
private void fixIndividualOptions(QueryOptions queryOptions, SolrQuery solrQuery) {
if (queryOptions.containsKey(QueryOptions.INCLUDE)) {
for (String include : individualRgaConverter.getIncludeFields(queryOptions.getAsStringList(QueryOptions.INCLUDE))) {
solrQuery.addField(include);
}
} else if (queryOptions.containsKey(QueryOptions.EXCLUDE)) {
for (String include : individualRgaConverter.getIncludeFromExcludeFields(queryOptions.getAsStringList(QueryOptions.EXCLUDE))) {
solrQuery.addField(include);
}
}
}
/**
* Return an RgaDataModel iterator given a query.
*
* @param collection Collection name
* @param query Query
* @param queryOptions Query options
* @return RgaIterator.
* @throws RgaException RgaException
*/
public RgaIterator geneQuery(String collection, Query query, QueryOptions queryOptions) throws RgaException {
SolrQuery solrQuery = parser.parseQuery(query);
fixGeneOptions(queryOptions, solrQuery);
solrQuery.setRows(Integer.MAX_VALUE);
try {
return new RgaIterator(solrManager.getSolrClient(), collection, solrQuery);
} catch (SolrServerException e) {
throw new RgaException("Error executing RgaKnockoutByGene query", e);
}
}
private void fixGeneOptions(QueryOptions queryOptions, SolrQuery solrQuery) {
if (queryOptions.containsKey(QueryOptions.INCLUDE)) {
for (String include : geneConverter.getIncludeFields(queryOptions.getAsStringList(QueryOptions.INCLUDE))) {
solrQuery.addField(include);
}
} else if (queryOptions.containsKey(QueryOptions.EXCLUDE)) {
for (String include : geneConverter.getIncludeFromExcludeFields(queryOptions.getAsStringList(QueryOptions.EXCLUDE))) {
solrQuery.addField(include);
}
}
}
/**
* Return an RgaDataModel iterator given a query.
*
* @param collection Collection name
* @param query Query
* @param queryOptions Query options
* @return RgaIterator object.
* @throws RgaException RgaException
*/
public RgaIterator variantQuery(String collection, Query query, QueryOptions queryOptions) throws RgaException {
SolrQuery solrQuery = parser.parseQuery(query);
fixVariantOptions(queryOptions, solrQuery);
solrQuery.setRows(Integer.MAX_VALUE);
try {
return new RgaIterator(solrManager.getSolrClient(), collection, solrQuery);
} catch (SolrServerException e) {
throw new RgaException("Error executing KnockoutByVariant query", e);
}
}
private void fixVariantOptions(QueryOptions queryOptions, SolrQuery solrQuery) {
if (queryOptions.containsKey(QueryOptions.INCLUDE)) {
for (String include : variantConverter.getIncludeFields(queryOptions.getAsStringList(QueryOptions.INCLUDE))) {
solrQuery.addField(include);
}
} else if (queryOptions.containsKey(QueryOptions.EXCLUDE)) {
for (String include : variantConverter.getIncludeFromExcludeFields(queryOptions.getAsStringList(QueryOptions.EXCLUDE))) {
solrQuery.addField(include);
}
}
}
/**
*
* @param collection Collection name
* @param query Query
* @return Number of results
* @throws RgaException RgaException
* @throws IOException IOException
*/
public long count(String collection, Query query) throws RgaException, IOException {
SolrQuery solrQuery = parser.parseQuery(query);
SolrCollection solrCollection = getSolrCollection(collection);
try {
return solrCollection.count(solrQuery).getResults().get(0);
} catch (SolrServerException e) {
throw new RgaException("Error executing count for a given query", e);
}
}
/**
* Performs a facet query from the auxiliar collection by joining other queries that need to run over the main collection.
*
* @param collection Auxiliar collection id.
* @param externalCollection Main collection id.
* @param query Auxiliar collection query.
* @param externalQuery Main collection query.
* @param queryOptions QueryOptions (facet).
* @return A DataResult<FacetField>
* @throws RgaException RgaException.
* @throws IOException IOException.
*/
public DataResult<FacetField> joinFacetQuery(String collection, String externalCollection, Query query, Query externalQuery,
QueryOptions queryOptions) throws RgaException, IOException {
SolrQuery mainSolrQuery = parser.parseAuxQuery(query);
SolrQuery externalSolrQuery = parser.parseQuery(externalQuery);
if (externalSolrQuery.getFilterQueries() != null && externalSolrQuery.getFilterQueries().length > 0) {
String externalQueryStr = StringUtils.join(externalSolrQuery.getFilterQueries(), " AND ");
mainSolrQuery.set("v1", externalQueryStr);
mainSolrQuery.addFilterQuery("{!join from=" + RgaDataModel.VARIANTS + " to=" + AuxiliarRgaDataModel.ID
+ " fromIndex=" + externalCollection + " v=$v1}");
}
return facetedQuery(collection, mainSolrQuery, queryOptions);
}
/**
* Return faceted data from the main RGA Solr core/collection given a query.
*
* @param collection Collection name
* @param query Query
* @param queryOptions Query options (contains the facet and facetRange options)
* @return List of KnockoutByIndividual objects
* @throws RgaException RgaException
* @throws IOException IOException
*/
public DataResult<FacetField> facetedQuery(String collection, Query query, QueryOptions queryOptions)
throws RgaException, IOException {
SolrQuery solrQuery = parser.parseQuery(query);
return facetedQuery(collection, solrQuery, queryOptions);
}
/**
* Return faceted data from a Solr core/collection given a query.
*
* @param collection Collection name
* @param solrQuery SolrQuery object.
* @param queryOptions Query options (contains the facet and facetRange options)
* @return List of KnockoutByIndividual objects
* @throws RgaException RgaException
* @throws IOException IOException
*/
private DataResult<FacetField> facetedQuery(String collection, SolrQuery solrQuery, QueryOptions queryOptions) throws IOException {
StopWatch stopWatch = StopWatch.createStarted();
if (queryOptions.containsKey(QueryOptions.FACET)
&& org.apache.commons.lang3.StringUtils.isNotEmpty(queryOptions.getString(QueryOptions.FACET))) {
try {
FacetQueryParser facetQueryParser = new FacetQueryParser();
String facetQuery = parser.parseFacet(queryOptions.getString(QueryOptions.FACET));
String jsonFacet = facetQueryParser.parse(facetQuery, queryOptions);
solrQuery.set("json.facet", jsonFacet);
solrQuery.setRows(0);
solrQuery.setStart(0);
solrQuery.setFields();
logger.debug(">>>>>> Solr Facet: " + solrQuery.toString());
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Solr parse exception: " + e.getMessage(), e);
}
}
SolrCollection solrCollection = getSolrCollection(collection);
DataResult<FacetField> facetResult;
try {
facetResult = solrCollection.facet(solrQuery, null);
} catch (SolrServerException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e.getMessage(), e);
}
logger.info("Facet '{}': {} milliseconds", solrQuery.toString(), stopWatch.getTime(TimeUnit.MILLISECONDS));
return facetResult;
}
@Override
public void close() throws IOException {
solrManager.close();
}
public SolrManager getSolrManager() {
return solrManager;
}
public RgaEngine setSolrManager(SolrManager solrManager) {
this.solrManager = solrManager;
return this;
}
private SolrCollection getSolrCollection(String collection) {
if (solrCollectionMap.containsKey(collection)) {
return solrCollectionMap.get(collection);
} else {
SolrCollection solrCollection = solrManager.getCollection(collection);
solrCollectionMap.put(collection, solrCollection);
return solrCollection;
}
}
}
| |
/*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing.util;
import com.graphhopper.storage.GraphBuilder;
import com.graphhopper.storage.GraphStorage;
import com.graphhopper.util.EdgeExplorer;
import com.graphhopper.util.GHUtility;
import gnu.trove.list.array.TIntArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.*;
import static org.junit.Assert.*;
/**
* @author Peter Karich
*/
public class PrepareRoutingSubnetworksTest
{
private final EncodingManager em = new EncodingManager("car");
GraphStorage createGraph( EncodingManager eman )
{
return new GraphBuilder(eman).create();
}
GraphStorage createSubnetworkTestGraph()
{
GraphStorage g = createGraph(em);
// big network
g.edge(1, 2, 1, true);
g.edge(1, 4, 1, false);
g.edge(1, 8, 1, true);
g.edge(2, 4, 1, true);
g.edge(8, 4, 1, false);
g.edge(8, 11, 1, true);
g.edge(12, 11, 1, true);
g.edge(9, 12, 1, false);
g.edge(9, 15, 1, true);
// large network
g.edge(0, 13, 1, true);
g.edge(0, 3, 1, true);
g.edge(0, 7, 1, true);
g.edge(3, 7, 1, true);
g.edge(3, 5, 1, true);
g.edge(13, 5, 1, true);
// small network
g.edge(6, 14, 1, true);
g.edge(10, 14, 1, true);
return g;
}
@Test
public void testFindSubnetworks()
{
GraphStorage g = createSubnetworkTestGraph();
PrepareRoutingSubnetworks instance = new PrepareRoutingSubnetworks(g, em);
Map<Integer, Integer> map = instance.findSubnetworks();
assertEquals(3, map.size());
// start is at 0 => large network
assertEquals(5, (int) map.get(0));
// next smallest and unvisited node is 1 => big network
assertEquals(8, (int) map.get(1));
assertEquals(3, (int) map.get(6));
}
@Test
public void testKeepLargestNetworks()
{
GraphStorage g = createSubnetworkTestGraph();
PrepareRoutingSubnetworks instance = new PrepareRoutingSubnetworks(g, em);
Map<Integer, Integer> map = instance.findSubnetworks();
instance.keepLargeNetworks(map);
g.optimize();
assertEquals(8, g.getNodes());
assertEquals(Arrays.<String>asList(), GHUtility.getProblems(g));
map = instance.findSubnetworks();
assertEquals(1, map.size());
assertEquals(8, (int) map.get(0));
}
GraphStorage createSubnetworkTestGraph2( EncodingManager em )
{
GraphStorage g = createGraph(em);
// large network
g.edge(0, 1, 1, true);
g.edge(1, 3, 1, true);
g.edge(0, 2, 1, true);
g.edge(2, 3, 1, true);
g.edge(3, 7, 1, true);
g.edge(7, 8, 1, true);
// connecting both but do not allow CAR!
g.edge(3, 4).setDistance(1);
// small network
g.edge(4, 5, 1, true);
g.edge(5, 6, 1, true);
g.edge(4, 6, 1, true);
return g;
}
@Test
public void testRemoveSubnetworkIfOnlyOneVehicle()
{
GraphStorage g = createSubnetworkTestGraph2(em);
PrepareRoutingSubnetworks instance = new PrepareRoutingSubnetworks(g, em);
instance.setMinNetworkSize(4);
instance.doWork();
g.optimize();
assertEquals(6, g.getNodes());
assertEquals(Arrays.<String>asList(), GHUtility.getProblems(g));
EdgeExplorer explorer = g.createEdgeExplorer();
assertEquals(GHUtility.asSet(2, 1, 5), GHUtility.getNeighbors(explorer.setBaseNode(3)));
// do not remove because small network is big enough
g = createSubnetworkTestGraph2(em);
instance = new PrepareRoutingSubnetworks(g, em);
instance.setMinNetworkSize(3);
instance.doWork();
g.optimize();
assertEquals(9, g.getNodes());
// do not remove because two two vehicles
EncodingManager em2 = new EncodingManager("CAR,BIKE");
g = createSubnetworkTestGraph2(em2);
instance = new PrepareRoutingSubnetworks(g, em2);
instance.setMinNetworkSize(3);
instance.doWork();
g.optimize();
assertEquals(9, g.getNodes());
}
GraphStorage createDeadEndUnvisitedNetworkGraph( EncodingManager em )
{
GraphStorage g = createGraph(em);
// 0 <-> 1 <-> 2 <-> 3 <-> 4 <- 5 <-> 6
g.edge(0, 1, 1, true);
g.edge(1, 2, 1, true);
g.edge(2, 3, 1, true);
g.edge(3, 4, 1, true);
g.edge(5, 4, 1, false);
g.edge(5, 6, 1, true);
// 7 -> 8 <-> 9 <-> 10
g.edge(7, 8, 1, false);
g.edge(8, 9, 1, true);
g.edge(9, 10, 1, true);
return g;
}
GraphStorage createTarjanTestGraph()
{
GraphStorage g = createGraph(em);
g.edge(1, 2, 1, false);
g.edge(2, 3, 1, false);
g.edge(3, 1, 1, false);
g.edge(4, 2, 1, false);
g.edge(4, 3, 1, false);
g.edge(4, 5, 1, true);
g.edge(5, 6, 1, false);
g.edge(6, 3, 1, false);
g.edge(6, 7, 1, true);
g.edge(8, 5, 1, false);
g.edge(8, 7, 1, false);
g.edge(8, 8, 1, false);
return g;
}
@Test
public void testRemoveDeadEndUnvisitedNetworks()
{
GraphStorage g = createDeadEndUnvisitedNetworkGraph(em);
assertEquals(11, g.getNodes());
PrepareRoutingSubnetworks instance = new PrepareRoutingSubnetworks(g, em).
setMinOneWayNetworkSize(3);
int removed = instance.removeDeadEndUnvisitedNetworks(em.getEncoder("car"));
assertEquals(3, removed);
g.optimize();
assertEquals(8, g.getNodes());
}
@Test
public void testTarjan()
{
GraphStorage g = createSubnetworkTestGraph();
// Requires a single vehicle type, otherwise we throw.
final FlagEncoder flagEncoder = em.getEncoder("car");
final EdgeFilter filter = new DefaultEdgeFilter(flagEncoder, false, true);
TarjansStronglyConnectedComponentsAlgorithm tarjan = new TarjansStronglyConnectedComponentsAlgorithm(g, filter);
List<TIntArrayList> components = tarjan.findComponents();
assertEquals(4, components.size());
assertEquals(new TIntArrayList(new int[]
{
13, 5, 3, 7, 0
}), components.get(0));
assertEquals(new TIntArrayList(new int[]
{
2, 4, 12, 11, 8, 1
}), components.get(1));
assertEquals(new TIntArrayList(new int[]
{
10, 14, 6
}), components.get(2));
assertEquals(new TIntArrayList(new int[]
{
15, 9
}), components.get(3));
}
// Previous two-pass implementation failed on 1 -> 2 -> 0
@Test
public void testNodeOrderingRegression()
{
// 1 -> 2 -> 0
GraphStorage g = createGraph(em);
g.edge(1, 2, 1, false);
g.edge(2, 0, 1, false);
PrepareRoutingSubnetworks instance = new PrepareRoutingSubnetworks(g, em).
setMinOneWayNetworkSize(2);
int removed = instance.removeDeadEndUnvisitedNetworks(em.getEncoder("car"));
assertEquals(3, removed);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.test.api.identity;
import java.util.Arrays;
import java.util.List;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.api.FlowableOptimisticLockingException;
import org.flowable.engine.impl.test.PluggableFlowableTestCase;
import org.flowable.idm.api.Group;
import org.flowable.idm.api.Picture;
import org.flowable.idm.api.User;
import org.flowable.idm.engine.IdmEngineConfiguration;
import org.flowable.idm.engine.IdmEngines;
import org.flowable.idm.engine.impl.authentication.ApacheDigester;
import org.flowable.idm.engine.impl.authentication.ClearTextPasswordEncoder;
/**
* @author Frederik Heremans
*/
public class IdentityServiceTest extends PluggableFlowableTestCase {
public void testUserInfo() {
User user = identityService.newUser("testuser");
identityService.saveUser(user);
identityService.setUserInfo("testuser", "myinfo", "myvalue");
assertEquals("myvalue", identityService.getUserInfo("testuser", "myinfo"));
identityService.setUserInfo("testuser", "myinfo", "myvalue2");
assertEquals("myvalue2", identityService.getUserInfo("testuser", "myinfo"));
identityService.deleteUserInfo("testuser", "myinfo");
assertNull(identityService.getUserInfo("testuser", "myinfo"));
identityService.deleteUser(user.getId());
}
public void testCreateExistingUser() {
User user = identityService.newUser("testuser");
identityService.saveUser(user);
try {
User secondUser = identityService.newUser("testuser");
identityService.saveUser(secondUser);
fail("Exception should have been thrown");
} catch (RuntimeException re) {
// Expected exception while saving new user with the same name as an
// existing one.
}
identityService.deleteUser(user.getId());
}
public void testUpdateUser() {
// First, create a new user
User user = identityService.newUser("johndoe");
user.setFirstName("John");
user.setLastName("Doe");
user.setEmail("johndoe@alfresco.com");
identityService.saveUser(user);
// Fetch and update the user
user = identityService.createUserQuery().userId("johndoe").singleResult();
user.setEmail("updated@alfresco.com");
user.setFirstName("Jane");
user.setLastName("Donnel");
identityService.saveUser(user);
user = identityService.createUserQuery().userId("johndoe").singleResult();
assertEquals("Jane", user.getFirstName());
assertEquals("Donnel", user.getLastName());
assertEquals("updated@alfresco.com", user.getEmail());
identityService.deleteUser(user.getId());
}
public void testUserPicture() {
// First, create a new user
User user = identityService.newUser("johndoe");
identityService.saveUser(user);
String userId = user.getId();
Picture picture = new Picture("niceface".getBytes(), "image/string");
identityService.setUserPicture(userId, picture);
picture = identityService.getUserPicture(userId);
// Fetch and update the user
user = identityService.createUserQuery().userId("johndoe").singleResult();
assertTrue("byte arrays differ", Arrays.equals("niceface".getBytes(), picture.getBytes()));
assertEquals("image/string", picture.getMimeType());
// interface definition states that setting picture to null should delete it
identityService.setUserPicture(userId, null);
assertNull("it should be possible to nullify user picture", identityService.getUserPicture(userId));
user = identityService.createUserQuery().userId("johndoe").singleResult();
assertNull("it should be possible to delete user picture", identityService.getUserPicture(userId));
identityService.deleteUser(user.getId());
}
public void testUpdateGroup() {
Group group = identityService.newGroup("sales");
group.setName("Sales");
identityService.saveGroup(group);
group = identityService.createGroupQuery().groupId("sales").singleResult();
group.setName("Updated");
identityService.saveGroup(group);
group = identityService.createGroupQuery().groupId("sales").singleResult();
assertEquals("Updated", group.getName());
identityService.deleteGroup(group.getId());
}
public void findUserByUnexistingId() {
User user = identityService.createUserQuery().userId("unexistinguser").singleResult();
assertNull(user);
}
public void findGroupByUnexistingId() {
Group group = identityService.createGroupQuery().groupId("unexistinggroup").singleResult();
assertNull(group);
}
public void testCreateMembershipUnexistingGroup() {
User johndoe = identityService.newUser("johndoe");
identityService.saveUser(johndoe);
try {
identityService.createMembership(johndoe.getId(), "unexistinggroup");
fail("Expected exception");
} catch (RuntimeException re) {
// Exception expected
}
identityService.deleteUser(johndoe.getId());
}
public void testCreateMembershipUnexistingUser() {
Group sales = identityService.newGroup("sales");
identityService.saveGroup(sales);
try {
identityService.createMembership("unexistinguser", sales.getId());
fail("Expected exception");
} catch (RuntimeException re) {
// Exception expected
}
identityService.deleteGroup(sales.getId());
}
public void testCreateMembershipAlreadyExisting() {
Group sales = identityService.newGroup("sales");
identityService.saveGroup(sales);
User johndoe = identityService.newUser("johndoe");
identityService.saveUser(johndoe);
// Create the membership
identityService.createMembership(johndoe.getId(), sales.getId());
try {
identityService.createMembership(johndoe.getId(), sales.getId());
} catch (RuntimeException re) {
// Expected exception, membership already exists
}
identityService.deleteGroup(sales.getId());
identityService.deleteUser(johndoe.getId());
}
public void testSaveGroupNullArgument() {
try {
identityService.saveGroup(null);
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("group is null", ae.getMessage());
}
}
public void testSaveUserNullArgument() {
try {
identityService.saveUser(null);
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("user is null", ae.getMessage());
}
}
public void testFindGroupByIdNullArgument() {
try {
identityService.createGroupQuery().groupId(null).singleResult();
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("id is null", ae.getMessage());
}
}
public void testCreateMembershipNullArguments() {
try {
identityService.createMembership(null, "group");
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("userId is null", ae.getMessage());
}
try {
identityService.createMembership("userId", null);
fail("ActivitiException expected");
} catch (FlowableException ae) {
assertTextPresent("groupId is null", ae.getMessage());
}
}
public void testFindGroupsByUserIdNullArguments() {
try {
identityService.createGroupQuery().groupMember(null).singleResult();
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("userId is null", ae.getMessage());
}
}
public void testFindUsersByGroupUnexistingGroup() {
List<User> users = identityService.createUserQuery().memberOfGroup("unexistinggroup").list();
assertNotNull(users);
assertTrue(users.isEmpty());
}
public void testDeleteGroupNullArguments() {
try {
identityService.deleteGroup(null);
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("groupId is null", ae.getMessage());
}
}
public void testDeleteMembership() {
Group sales = identityService.newGroup("sales");
identityService.saveGroup(sales);
User johndoe = identityService.newUser("johndoe");
identityService.saveUser(johndoe);
// Add membership
identityService.createMembership(johndoe.getId(), sales.getId());
List<Group> groups = identityService.createGroupQuery().groupMember(johndoe.getId()).list();
assertEquals(1, groups.size());
assertEquals("sales", groups.get(0).getId());
// Delete the membership and check members of sales group
identityService.deleteMembership(johndoe.getId(), sales.getId());
groups = identityService.createGroupQuery().groupMember(johndoe.getId()).list();
assertTrue(groups.isEmpty());
identityService.deleteGroup("sales");
identityService.deleteUser("johndoe");
}
public void testDeleteMembershipWhenUserIsNoMember() {
Group sales = identityService.newGroup("sales");
identityService.saveGroup(sales);
User johndoe = identityService.newUser("johndoe");
identityService.saveUser(johndoe);
// Delete the membership when the user is no member
identityService.deleteMembership(johndoe.getId(), sales.getId());
identityService.deleteGroup("sales");
identityService.deleteUser("johndoe");
}
public void testDeleteMembershipUnexistingGroup() {
User johndoe = identityService.newUser("johndoe");
identityService.saveUser(johndoe);
// No exception should be thrown when group doesn't exist
identityService.deleteMembership(johndoe.getId(), "unexistinggroup");
identityService.deleteUser(johndoe.getId());
}
public void testDeleteMembershipUnexistingUser() {
Group sales = identityService.newGroup("sales");
identityService.saveGroup(sales);
// No exception should be thrown when user doesn't exist
identityService.deleteMembership("unexistinguser", sales.getId());
identityService.deleteGroup(sales.getId());
}
public void testDeleteMemberschipNullArguments() {
try {
identityService.deleteMembership(null, "group");
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("userId is null", ae.getMessage());
}
try {
identityService.deleteMembership("user", null);
fail("ActivitiException expected");
} catch (FlowableException ae) {
assertTextPresent("groupId is null", ae.getMessage());
}
}
public void testDeleteUserNullArguments() {
try {
identityService.deleteUser(null);
fail("ActivitiException expected");
} catch (FlowableIllegalArgumentException ae) {
assertTextPresent("userId is null", ae.getMessage());
}
}
public void testDeleteUserUnexistingUserId() {
// No exception should be thrown. Deleting an unexisting user should
// be ignored silently
identityService.deleteUser("unexistinguser");
}
public void testCheckPasswordNullSafe() {
assertFalse(identityService.checkPassword("userId", null));
assertFalse(identityService.checkPassword(null, "passwd"));
assertFalse(identityService.checkPassword(null, null));
}
public void testChangePassword() {
IdmEngineConfiguration idmEngineConfiguration = IdmEngines.getDefaultIdmEngine().getIdmEngineConfiguration();
idmEngineConfiguration.setPasswordEncoder(new ApacheDigester(ApacheDigester.Digester.MD5));
try {
User user = identityService.newUser("johndoe");
user.setPassword("xxx");
identityService.saveUser(user);
user = identityService.createUserQuery().userId("johndoe").list().get(0);
user.setFirstName("John Doe");
identityService.saveUser(user);
User johndoe = identityService.createUserQuery().userId("johndoe").list().get(0);
assertFalse(johndoe.getPassword().equals("xxx"));
assertEquals("John Doe", johndoe.getFirstName());
assertTrue(identityService.checkPassword("johndoe", "xxx"));
user = identityService.createUserQuery().userId("johndoe").list().get(0);
user.setPassword("yyy");
identityService.saveUser(user);
assertTrue(identityService.checkPassword("johndoe", "xxx"));
user = identityService.createUserQuery().userId("johndoe").list().get(0);
user.setPassword("yyy");
identityService.updateUserPassword(user);
assertTrue(identityService.checkPassword("johndoe", "yyy"));
identityService.deleteUser("johndoe");
} finally {
idmEngineConfiguration.setPasswordEncoder(ClearTextPasswordEncoder.getInstance());
}
}
public void testUserOptimisticLockingException() {
User user = identityService.newUser("kermit");
identityService.saveUser(user);
User user1 = identityService.createUserQuery().singleResult();
User user2 = identityService.createUserQuery().singleResult();
user1.setFirstName("name one");
identityService.saveUser(user1);
try {
user2.setFirstName("name two");
identityService.saveUser(user2);
fail("Expected an exception");
} catch (FlowableOptimisticLockingException e) {
// Expected an exception
}
identityService.deleteUser(user.getId());
}
public void testGroupOptimisticLockingException() {
Group group = identityService.newGroup("group");
identityService.saveGroup(group);
Group group1 = identityService.createGroupQuery().singleResult();
Group group2 = identityService.createGroupQuery().singleResult();
group1.setName("name one");
identityService.saveGroup(group1);
try {
group2.setName("name two");
identityService.saveGroup(group2);
fail("Expected an exception");
} catch (FlowableOptimisticLockingException e) {
// Expected an exception
}
identityService.deleteGroup(group.getId());
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static com.facebook.buck.apple.AppleResources.IS_APPLE_BUNDLE_RESOURCE_NODE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.cxx.CxxDescriptionEnhancer;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.Cell;
import com.facebook.buck.rules.DefaultBuildTargetSourcePath;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.TestBuildRuleCreationContextFactory;
import com.facebook.buck.rules.TestBuildRuleParams;
import com.facebook.buck.rules.TestBuildRuleResolver;
import com.facebook.buck.rules.TestCellBuilder;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.util.types.Either;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import java.util.Collection;
import java.util.Optional;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class AppleBuildRulesTest {
@Parameterized.Parameters(name = "useCache: {0}")
public static Collection<Object[]> data() {
return ImmutableList.of(new Object[] {false}, new Object[] {true});
}
@Parameterized.Parameter(0)
public boolean useCache;
@Test
public void testAppleLibraryIsXcodeTargetDescription() {
Cell rootCell = (new TestCellBuilder()).build();
BuildTarget libraryTarget = BuildTargetFactory.newInstance(rootCell.getRoot(), "//foo", "lib");
TargetNode<AppleLibraryDescriptionArg, ?> library =
AppleLibraryBuilder.createBuilder(libraryTarget).setSrcs(ImmutableSortedSet.of()).build();
assertTrue(AppleBuildRules.isXcodeTargetDescription(library.getDescription()));
}
@Test
public void testIosResourceIsNotXcodeTargetDescription() {
Cell rootCell = (new TestCellBuilder()).build();
BuildTarget resourceTarget = BuildTargetFactory.newInstance(rootCell.getRoot(), "//foo", "res");
TargetNode<?, ?> resourceNode =
AppleResourceBuilder.createBuilder(resourceTarget)
.setFiles(ImmutableSet.of())
.setDirs(ImmutableSet.of())
.build();
assertFalse(AppleBuildRules.isXcodeTargetDescription(resourceNode.getDescription()));
}
@Test
public void testAppleTestIsXcodeTargetTestBuildRuleType() {
BuildTarget target = BuildTargetFactory.newInstance("//foo:xctest#iphoneos-i386");
BuildTarget sandboxTarget =
BuildTargetFactory.newInstance("//foo:xctest#iphoneos-i386")
.withFlavors(CxxDescriptionEnhancer.SANDBOX_TREE_FLAVOR);
BuildRuleResolver resolver =
new TestBuildRuleResolver(
TargetGraphFactory.newInstance(
new AppleTestBuilder(sandboxTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build()));
AppleTestBuilder appleTestBuilder =
new AppleTestBuilder(target)
.setContacts(ImmutableSortedSet.of())
.setLabels(ImmutableSortedSet.of())
.setDeps(ImmutableSortedSet.of())
.setInfoPlist(FakeSourcePath.of("Info.plist"));
TargetNode<?, ?> appleTestNode = appleTestBuilder.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(ImmutableSet.of(appleTestNode));
BuildRule testRule = appleTestBuilder.build(resolver, new FakeProjectFilesystem(), targetGraph);
assertTrue(AppleBuildRules.isXcodeTargetTestBuildRule(testRule));
}
@Test
public void testAppleLibraryIsNotXcodeTargetTestBuildRuleType() {
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuildTarget buildTarget = BuildTargetFactory.newInstance("//foo:lib");
BuildRuleParams params = TestBuildRuleParams.create();
BuildRuleResolver buildRuleResolver = new TestBuildRuleResolver();
BuildRule libraryRule =
FakeAppleRuleDescriptions.LIBRARY_DESCRIPTION.createBuildRule(
TestBuildRuleCreationContextFactory.create(buildRuleResolver, projectFilesystem),
buildTarget,
params,
AppleLibraryDescriptionArg.builder().setName("lib").build());
assertFalse(AppleBuildRules.isXcodeTargetTestBuildRule(libraryRule));
}
@Test
public void testXctestIsTestBundleExtension() {
assertTrue(AppleBuildRules.isXcodeTargetTestBundleExtension(AppleBundleExtension.XCTEST));
}
@Test
public void testRecursiveTargetsIncludesBundleBinaryFromOutsideBundle() {
BuildTarget libraryTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?, ?> libraryNode = AppleLibraryBuilder.createBuilder(libraryTarget).build();
BuildTarget bundleTarget = BuildTargetFactory.newInstance("//foo:bundle");
TargetNode<?, ?> bundleNode =
AppleBundleBuilder.createBuilder(bundleTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.XCTEST))
.setBinary(libraryTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget rootTarget = BuildTargetFactory.newInstance("//foo:root");
TargetNode<?, ?> rootNode =
AppleLibraryBuilder.createBuilder(rootTarget)
.setDeps(ImmutableSortedSet.of(libraryTarget, bundleTarget))
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(ImmutableSet.of(libraryNode, bundleNode, rootNode));
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.BUILDING,
rootNode,
Optional.empty());
assertTrue(Iterables.elementsEqual(ImmutableSortedSet.of(libraryNode, bundleNode), rules));
}
}
@Test
public void exportedDepsOfDylibsAreCollectedForLinking() {
BuildTarget fooLibTarget =
BuildTargetFactory.newInstance("//foo:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> fooLibNode = AppleLibraryBuilder.createBuilder(fooLibTarget).build();
BuildTarget fooFrameworkTarget = BuildTargetFactory.newInstance("//foo:framework");
TargetNode<?, ?> fooFrameworkNode =
AppleBundleBuilder.createBuilder(fooFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(fooLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> barLibNode =
AppleLibraryBuilder.createBuilder(barLibTarget)
.setDeps(ImmutableSortedSet.of(fooFrameworkTarget))
.setExportedDeps(ImmutableSortedSet.of(fooFrameworkTarget))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?, ?> barFrameworkNode =
AppleBundleBuilder.createBuilder(barFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget rootTarget = BuildTargetFactory.newInstance("//foo:root");
TargetNode<?, ?> rootNode =
AppleLibraryBuilder.createBuilder(rootTarget)
.setDeps(ImmutableSortedSet.of(barFrameworkTarget))
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(
ImmutableSet.of(rootNode, fooLibNode, fooFrameworkNode, barLibNode, barFrameworkNode));
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.LINKING,
rootNode,
Optional.empty());
assertEquals(
ImmutableSortedSet.of(barFrameworkNode, fooFrameworkNode),
ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void resourceDepsOfDylibsAreNotIncludedInMainBundle() {
BuildTarget sharedResourceTarget = BuildTargetFactory.newInstance("//shared:resource");
TargetNode<?, ?> sharedResourceNode =
AppleResourceBuilder.createBuilder(sharedResourceTarget).build();
// no preferredLinkage, shared flavor
BuildTarget fooLibTarget =
BuildTargetFactory.newInstance("//foo:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> fooLibNode =
AppleLibraryBuilder.createBuilder(fooLibTarget)
.setDeps(ImmutableSortedSet.of(sharedResourceTarget))
.build();
// shared preferredLinkage, no flavor
BuildTarget foo2LibTarget = BuildTargetFactory.newInstance("//foo2:lib");
TargetNode<?, ?> foo2LibNode =
AppleLibraryBuilder.createBuilder(foo2LibTarget)
.setDeps(ImmutableSortedSet.of(sharedResourceTarget))
.setPreferredLinkage(NativeLinkable.Linkage.SHARED)
.build();
BuildTarget fooFrameworkTarget = BuildTargetFactory.newInstance("//foo:framework#default");
TargetNode<?, ?> fooFrameworkNode =
AppleBundleBuilder.createBuilder(fooFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(fooLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
// shared preferredLinkage overriden by static flavor should still propagate dependencies.
BuildTarget staticResourceTarget = BuildTargetFactory.newInstance("//static:resource");
TargetNode<?, ?> staticResourceNode =
AppleResourceBuilder.createBuilder(staticResourceTarget).build();
BuildTarget bazLibTarget =
BuildTargetFactory.newInstance("//baz:lib#" + CxxDescriptionEnhancer.STATIC_FLAVOR);
TargetNode<?, ?> bazLibNode =
AppleLibraryBuilder.createBuilder(bazLibTarget)
.setDeps(ImmutableSortedSet.of(staticResourceTarget))
.setPreferredLinkage(NativeLinkable.Linkage.SHARED)
.build();
BuildTarget barBinaryTarget = BuildTargetFactory.newInstance("//bar:binary");
TargetNode<?, ?> barBinaryNode =
AppleBinaryBuilder.createBuilder(barBinaryTarget)
.setDeps(ImmutableSortedSet.of(fooLibTarget, foo2LibTarget, bazLibTarget))
.build();
BuildTarget barAppTarget = BuildTargetFactory.newInstance("//bar:app");
TargetNode<?, ?> barAppNode =
AppleBundleBuilder.createBuilder(barAppTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.APP))
.setBinary(barBinaryTarget)
.setDeps(ImmutableSortedSet.of(fooFrameworkTarget))
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
ImmutableSet<TargetNode<?, ?>> targetNodes =
ImmutableSet.<TargetNode<?, ?>>builder()
.add(
sharedResourceNode,
staticResourceNode,
fooLibNode,
foo2LibNode,
bazLibNode,
fooFrameworkNode,
barBinaryNode,
barAppNode)
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(targetNodes);
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.COPYING,
fooFrameworkNode,
IS_APPLE_BUNDLE_RESOURCE_NODE);
assertEquals(ImmutableSortedSet.of(sharedResourceNode), ImmutableSortedSet.copyOf(rules));
rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.COPYING,
barAppNode,
IS_APPLE_BUNDLE_RESOURCE_NODE);
assertEquals(ImmutableSortedSet.of(staticResourceNode), ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void exportedDepsAreCollectedForCopying() {
BuildTarget fooLibTarget =
BuildTargetFactory.newInstance("//foo:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> fooLibNode = AppleLibraryBuilder.createBuilder(fooLibTarget).build();
BuildTarget fooFrameworkTarget = BuildTargetFactory.newInstance("//foo:framework");
TargetNode<?, ?> fooFrameworkNode =
AppleBundleBuilder.createBuilder(fooFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(fooLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> barLibNode =
AppleLibraryBuilder.createBuilder(barLibTarget)
.setDeps(ImmutableSortedSet.of(fooFrameworkTarget))
.setExportedDeps(ImmutableSortedSet.of(fooFrameworkTarget))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?, ?> barFrameworkNode =
AppleBundleBuilder.createBuilder(barFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget bazLibTarget =
BuildTargetFactory.newInstance("//baz:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> bazLibNode =
AppleLibraryBuilder.createBuilder(bazLibTarget)
.setDeps(ImmutableSortedSet.of(barFrameworkTarget))
.build();
BuildTarget bazFrameworkTarget = BuildTargetFactory.newInstance("//baz:framework");
TargetNode<?, ?> bazFrameworkNode =
AppleBundleBuilder.createBuilder(bazFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(bazLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
ImmutableSet<TargetNode<?, ?>> targetNodes =
ImmutableSet.<TargetNode<?, ?>>builder()
.add(
fooLibNode,
fooFrameworkNode,
barLibNode,
barFrameworkNode,
bazLibNode,
bazFrameworkNode)
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(targetNodes);
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.COPYING,
bazFrameworkNode,
Optional.empty());
assertEquals(
ImmutableSortedSet.of(barFrameworkNode, fooFrameworkNode),
ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void linkingStopsAtGenruleDep() {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't link against or copy in the static lib.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?, ?> fooLibNode = AppleLibraryBuilder.createBuilder(fooLibTarget).build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?, ?> fooGenruleNode =
GenruleBuilder.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.of(DefaultBuildTargetSourcePath.of(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> barLibNode =
AppleLibraryBuilder.createBuilder(barLibTarget)
.setDeps(ImmutableSortedSet.of(fooGenruleTarget))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?, ?> barFrameworkNode =
AppleBundleBuilder.createBuilder(barFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
ImmutableSet<TargetNode<?, ?>> targetNodes =
ImmutableSet.<TargetNode<?, ?>>builder()
.add(fooLibNode, fooGenruleNode, barLibNode, barFrameworkNode)
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(targetNodes);
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.LINKING,
barFrameworkNode,
Optional.empty());
assertEquals(ImmutableSortedSet.of(fooGenruleNode), ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void copyingStopsAtGenruleDep() {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't link against or copy in the static lib.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?, ?> fooLibNode = AppleLibraryBuilder.createBuilder(fooLibTarget).build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?, ?> fooGenruleNode =
GenruleBuilder.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.of(DefaultBuildTargetSourcePath.of(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> barLibNode =
AppleLibraryBuilder.createBuilder(barLibTarget)
.setDeps(ImmutableSortedSet.of(fooGenruleTarget))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?, ?> barFrameworkNode =
AppleBundleBuilder.createBuilder(barFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
ImmutableSet<TargetNode<?, ?>> targetNodes =
ImmutableSet.<TargetNode<?, ?>>builder()
.add(fooLibNode, fooGenruleNode, barLibNode, barFrameworkNode)
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(targetNodes);
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.COPYING,
barFrameworkNode,
Optional.empty());
assertEquals(ImmutableSortedSet.of(fooGenruleNode), ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void buildingStopsAtGenruleDepButNotAtBundleDep() {
// Pass a random static lib in a genrule and make sure a framework
// depending on the genrule doesn't build the dependencies of that genrule.
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?, ?> fooLibNode = AppleLibraryBuilder.createBuilder(fooLibTarget).build();
BuildTarget fooGenruleTarget = BuildTargetFactory.newInstance("//foo:genrule");
TargetNode<?, ?> fooGenruleNode =
GenruleBuilder.newGenruleBuilder(fooGenruleTarget)
.setOut("foo")
.setCmd("echo hi > $OUT")
.setSrcs(ImmutableList.of(DefaultBuildTargetSourcePath.of(fooLibTarget)))
.build();
BuildTarget barLibTarget =
BuildTargetFactory.newInstance("//bar:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> barLibNode =
AppleLibraryBuilder.createBuilder(barLibTarget)
.setDeps(ImmutableSortedSet.of(fooGenruleTarget))
.build();
BuildTarget barFrameworkTarget = BuildTargetFactory.newInstance("//bar:framework");
TargetNode<?, ?> barFrameworkNode =
AppleBundleBuilder.createBuilder(barFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(barLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget bazLibTarget =
BuildTargetFactory.newInstance("//baz:lib#" + CxxDescriptionEnhancer.SHARED_FLAVOR);
TargetNode<?, ?> bazLibNode =
AppleLibraryBuilder.createBuilder(bazLibTarget)
.setDeps(ImmutableSortedSet.of(barFrameworkTarget))
.build();
BuildTarget bazFrameworkTarget = BuildTargetFactory.newInstance("//baz:framework");
TargetNode<?, ?> bazFrameworkNode =
AppleBundleBuilder.createBuilder(bazFrameworkTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.FRAMEWORK))
.setBinary(bazLibTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
ImmutableSet<TargetNode<?, ?>> targetNodes =
ImmutableSet.<TargetNode<?, ?>>builder()
.add(
fooLibNode,
fooGenruleNode,
barLibNode,
barFrameworkNode,
bazLibNode,
bazFrameworkNode)
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(targetNodes);
Optional<AppleDependenciesCache> cache =
useCache ? Optional.of(new AppleDependenciesCache(targetGraph)) : Optional.empty();
for (int i = 0; i < (useCache ? 2 : 1); i++) {
Iterable<TargetNode<?, ?>> rules =
AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes(
targetGraph,
cache,
AppleBuildRules.RecursiveDependenciesMode.BUILDING,
bazFrameworkNode,
Optional.empty());
assertEquals(
ImmutableSortedSet.of(barFrameworkNode, fooGenruleNode),
ImmutableSortedSet.copyOf(rules));
}
}
@Test
public void testDependenciesCache() {
BuildTarget libraryTarget = BuildTargetFactory.newInstance("//foo:lib");
TargetNode<?, ?> libraryNode = AppleLibraryBuilder.createBuilder(libraryTarget).build();
BuildTarget bundleTarget = BuildTargetFactory.newInstance("//foo:bundle");
TargetNode<?, ?> bundleNode =
AppleBundleBuilder.createBuilder(bundleTarget)
.setExtension(Either.ofLeft(AppleBundleExtension.XCTEST))
.setBinary(libraryTarget)
.setInfoPlist(FakeSourcePath.of("Info.plist"))
.build();
BuildTarget rootTarget = BuildTargetFactory.newInstance("//foo:root");
TargetNode<?, ?> rootNode =
AppleLibraryBuilder.createBuilder(rootTarget)
.setDeps(ImmutableSortedSet.of(libraryTarget, bundleTarget))
.build();
TargetGraph targetGraph =
TargetGraphFactory.newInstance(ImmutableSet.of(libraryNode, bundleNode, rootNode));
AppleDependenciesCache cache = new AppleDependenciesCache(targetGraph);
ImmutableSortedSet<TargetNode<?, ?>> cachedDefaultDeps = cache.getDefaultDeps(rootNode);
ImmutableSortedSet<TargetNode<?, ?>> cachedExportedDeps = cache.getExportedDeps(rootNode);
assertEquals(cachedDefaultDeps, ImmutableSortedSet.of(bundleNode, libraryNode));
assertEquals(cachedExportedDeps, ImmutableSortedSet.of());
ImmutableSortedSet<TargetNode<?, ?>> defaultDeps = cache.getDefaultDeps(rootNode);
ImmutableSortedSet<TargetNode<?, ?>> exportedDeps = cache.getExportedDeps(rootNode);
assertSame(cachedDefaultDeps, defaultDeps);
assertSame(cachedExportedDeps, exportedDeps);
}
}
| |
/**
* Copyright 2007-2008 University Of Southern California
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.pegasus.planner.selector.replica;
import edu.isi.pegasus.common.logging.LogManager;
import edu.isi.pegasus.common.logging.LogManagerFactory;
import edu.isi.pegasus.common.util.PegasusURL;
import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry;
import edu.isi.pegasus.planner.classes.ReplicaLocation;
import edu.isi.pegasus.planner.common.PegRandom;
import edu.isi.pegasus.planner.common.PegasusProperties;
import edu.isi.pegasus.planner.selector.ReplicaSelector;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* The default replica selector that is used if non is specified by the user. This gives preference
* to a replica residing on the same site as the site, where it is required to be staged to. If
* there is no such replica, then a random replica is selected.
*
* <p>In order to use the replica selector implemented by this class,
*
* <pre>
* - the property pegasus.selector.replica must be set to value Default, or
* the property should be left undefined in the properties.
* </pre>
*
* @author Karan Vahi
* @version $Revision$
*/
public class Default implements ReplicaSelector {
/** A short description of the replica selector. */
private static String mDescription = "Default";
/** The handle to the logging object that is used to log the various debug messages. */
protected LogManager mLogger;
/** The properties object containing the properties passed to the planner. */
protected PegasusProperties mProps;
// priority values for different types of URL sets
private static final String FILE_URLS_PRIORITY_KEY = "100";
private static final String PREFERRED_SITE_PRIORITY_KEY = "50";
private static final String NON_PREFERRED_SITE_PRIORITY_KEY = "10";
/**
* The overloaded constructor, that is called by load method.
*
* @param properties the <code>PegasusProperties</code> object containing all the properties
* required by Pegasus.
*/
public Default(PegasusProperties properties) {
mProps = properties;
mLogger = LogManagerFactory.loadSingletonInstance(properties);
}
/**
* This chooses a location amongst all the locations returned by the replica location service.
* If a location is found with re attribute same as the preference pool, it is taken. Else a
* random location is selected and returned. If more than one location for the lfn is found at
* the preference pool, then also a random location amongst the ones at the preference pool is
* selected.
*
* @param candidates the <code>ReplicaLocation</code> object containing all the pfn's associated
* with that LFN.
* @param preferredSite the preffered site for picking up the replicas.
* @param allowLocalFileURLs indicates whether Replica Selector can select a replica on the
* local site / submit host.
* @return <code>ReplicaCatalogEntry</code> corresponding to the location selected.
*/
public ReplicaCatalogEntry selectReplica(
ReplicaLocation candidates, String preferredSite, boolean allowLocalFileURLs) {
ReplicaCatalogEntry rce;
ArrayList prefPFNs = new ArrayList();
int locSelected;
String site = null;
// create a shallow clone as we will be removing
// using Iterator.remove() methods
ReplicaLocation rl = (ReplicaLocation) candidates.clone();
mLogger.log(
"Selecting a pfn for lfn " + rl.getLFN() + "\n amongst" + rl.getPFNList(),
LogManager.DEBUG_MESSAGE_LEVEL);
for (Iterator it = rl.pfnIterator(); it.hasNext(); ) {
rce = (ReplicaCatalogEntry) it.next();
site = rce.getResourceHandle();
// check if equal to the execution pool
if (site != null && site.equals(preferredSite)) {
prefPFNs.add(rce);
// return the one with file url for ligo stuff
// is temporary till new api coded
if (rce.getPFN().startsWith(PegasusURL.FILE_URL_SCHEME)) {
// this is the one which is reqd for ligo
// return instead of break;
return rce;
}
}
// check if we need to remove a file url or not
else if (removeFileURL(rce, preferredSite, allowLocalFileURLs)) {
it.remove();
}
/*
mLogger.log(
"pool attribute not specified for the location objects" +
" in the Replica Catalog",LogManager.WARNING_MESSAGE_LEVEL);
*/
}
int noOfLocs = rl.getPFNCount();
if (noOfLocs == 0) {
// in all likelihood all the urls were file urls and none
// were associated with the preference site.
StringBuffer error = new StringBuffer();
error.append(
"Unable to select a Physical Filename (PFN) for file with logical filename (LFN) as ")
.append(rl.getLFN())
.append(" for staging to site ")
.append(preferredSite)
.append(" amongst ")
.append(candidates.getPFNList());
throw new RuntimeException(error.toString());
}
if (prefPFNs.isEmpty()) {
// select a random location from
// all the matching locations
locSelected = PegRandom.getInteger(noOfLocs - 1);
rce = (ReplicaCatalogEntry) rl.getPFN(locSelected);
} else {
// select a random location
// amongst the locations
// on the preference pool
int length = prefPFNs.size();
// System.out.println("No of locations found at pool " + prefPool + " are " + length);
locSelected = PegRandom.getInteger(length - 1);
rce = (ReplicaCatalogEntry) prefPFNs.get(locSelected);
// user has specified that
// he wants to create symbolic
// links instead of going thru the
// grid ftp server
// create symbolic links instead of going through gridftp server
// moved to Transfer Engine Karan June 8th, 2009
/*
if (mUseSymLinks) {
rce = replaceProtocolFromURL( rce );
}
*/
}
return rce;
}
/**
* This orders all valid location amongst all the locations returned by the Replica Mechanism.
* The following ordering mechanism is employed
*
* <p>- valid file URL's - all URL's from preferred site - all other URL's
*
* @param rl the <code>ReplicaLocation</code> object containing all the pfn's associated with
* that LFN.
* @param preferredSite the preffered site for picking up the replicas.
* @param allowLocalFileURLs indicates whether Replica Selector can select a replica on the
* local site / submit host.
* @return <code>ReplicaLocation</code> corresponding to the replicas selected
*/
public ReplicaLocation selectAndOrderReplicas(
ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs) {
String lfn = rl.getLFN();
ReplicaLocation result = new ReplicaLocation();
result.setLFN(rl.getLFN());
ReplicaCatalogEntry rce;
String site;
int noOfLocs = 0;
List<ReplicaCatalogEntry> preferredSiteReplicas = new LinkedList();
List<ReplicaCatalogEntry> nonPrefferdSiteReplicas = new LinkedList();
for (Iterator<ReplicaCatalogEntry> it = rl.pfnIterator(); it.hasNext(); ) {
noOfLocs++;
rce = (ReplicaCatalogEntry) it.next();
site = rce.getResourceHandle();
// check if a File URL is allowable or not
if (removeFileURL(rce, preferredSite, allowLocalFileURLs)) {
this.warnForFileURL(rce, preferredSite, allowLocalFileURLs);
continue;
}
if (rce.getPFN().startsWith(PegasusURL.FILE_URL_SCHEME)) {
// file URL's have highest priority
rce.addAttribute(ReplicaSelector.PRIORITY_KEY, FILE_URLS_PRIORITY_KEY);
result.addPFN(rce);
} else if (site != null && site.equals(preferredSite)) {
rce.addAttribute(ReplicaSelector.PRIORITY_KEY, PREFERRED_SITE_PRIORITY_KEY);
preferredSiteReplicas.add(rce);
} else if (site == null) {
mLogger.log(
" site attribute not specified for the location objects"
+ " in the Replica Catalog",
LogManager.WARNING_MESSAGE_LEVEL);
} else {
rce.addAttribute(ReplicaSelector.PRIORITY_KEY, NON_PREFERRED_SITE_PRIORITY_KEY);
nonPrefferdSiteReplicas.add(rce);
}
}
// add the preferred and non preferred replicas
for (ReplicaCatalogEntry replica : preferredSiteReplicas) {
result.addPFN(replica);
}
for (ReplicaCatalogEntry replica : nonPrefferdSiteReplicas) {
result.addPFN(replica);
}
/*
if ( result.getPFNCount() == 0 ) {
//means we have to choose a random location between 0 and (noOfLocs -1)
int locSelected = PegRandom.getInteger( noOfLocs - 1 );
rce = ( ReplicaCatalogEntry ) rl.getPFN(locSelected );
result.addPFN( rce );
}*/
return result;
}
/**
* A convenience function that determines whether we should be removing a file URL from replica
* selection or not. The file urls make sense only
*
* <pre>
* - if associated with the preference site or
* - if local File URL are allowed and rce is associated
* with local site
* </pre>
*
* @param rce the ReplicaCatalogEntry object.
* @param preferredSite the preferred site.
* @param allowLocalFileURLs indicates whether Replica Selector can select a replica on the
* local site / submit host.
* @return boolean
*/
public boolean removeFileURL(
ReplicaCatalogEntry rce, String preferredSite, boolean allowLocalFileURLs) {
return this.removeFileURL(
rce.getPFN(), rce.getResourceHandle(), preferredSite, allowLocalFileURLs);
}
/**
* A convenience function that determines whether we should be removing a file URL from replica
* selection or not. The file urls make sense only
*
* <pre>
* - if associated with the preference site or
* - if local File URL are allowed and rce is associated
* with local site
* </pre>
*
* @param pfn the file url
* @param site the site associated with the pfn.
* @param preferredSite the preferred site.
* @param allowLocalFileURLs indicates whether Replica Selector can select a replica on the
* local site / submit host.
* @return boolean
*/
protected boolean removeFileURL(
String pfn, String site, String preferredSite, boolean allowLocalFileURLs) {
boolean result = false;
if (!pfn.startsWith(PegasusURL.FILE_URL_SCHEME)) {
// not a file url . dont remove
return result;
}
if (site == null) {
// remove the url and continue
// nothing can be done
result = true;
} else if (!site.equalsIgnoreCase(preferredSite)) {
// the URL is not from a preferred site.
// we can still use it if local file urls are allowed
// and url is from a local site.
result = !(allowLocalFileURLs && site.equals(LOCAL_SITE_HANDLE));
}
return result;
}
/**
* Returns a short description of the replica selector.
*
* @return string corresponding to the description.
*/
public String description() {
return mDescription;
}
/**
* * Warn for file URL not being selected because of mismatch in site attributes
*
* @param rce ReplicaCatalogEntry
* @param destinationSite destinationSite
* @param allowLocalFileURLs boolean
*/
protected void warnForFileURL(
ReplicaCatalogEntry rce, String destinationSite, boolean allowLocalFileURLs) {
StringBuilder sb = new StringBuilder();
sb.append("File URL ")
.append(rce)
.append(" not included as the site attribute (")
.append(rce.getResourceHandle())
.append(") is a mismatch to the destination site for transfer (")
.append(destinationSite)
.append("). allowLocalFileURLs: ")
.append(allowLocalFileURLs);
mLogger.log(sb.toString(), LogManager.WARNING_MESSAGE_LEVEL);
}
}
| |
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.util.EcoreAdapterFactory;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.EEFRuntimePlugin;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
// End of user code
/**
*
*
*/
public class EndPointPropertyPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, EndPointPropertyPropertiesEditionPart {
protected Text name;
protected Text value;
protected EMFComboViewer scope;
protected EMFComboViewer valueType;
// Start of user code for valueExpression widgets declarations
// End of user code
/**
* For {@link ISection} use only.
*/
public EndPointPropertyPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public EndPointPropertyPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent);
Form form = scrolledForm.getForm();
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return scrolledForm;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence endPointPropertyStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = endPointPropertyStep.addStep(EsbViewsRepository.EndPointProperty.Properties.class);
propertiesStep.addStep(EsbViewsRepository.EndPointProperty.Properties.name);
propertiesStep.addStep(EsbViewsRepository.EndPointProperty.Properties.value);
propertiesStep.addStep(EsbViewsRepository.EndPointProperty.Properties.scope);
propertiesStep.addStep(EsbViewsRepository.EndPointProperty.Properties.valueType);
propertiesStep.addStep(EsbViewsRepository.EndPointProperty.Properties.valueExpression);
composer = new PartComposer(endPointPropertyStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.EndPointProperty.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.EndPointProperty.Properties.name) {
return createNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.EndPointProperty.Properties.value) {
return createValueText(widgetFactory, parent);
}
if (key == EsbViewsRepository.EndPointProperty.Properties.scope) {
return createScopeEMFComboViewer(widgetFactory, parent);
}
if (key == EsbViewsRepository.EndPointProperty.Properties.valueType) {
return createValueTypeEMFComboViewer(widgetFactory, parent);
}
// Start of user code for valueExpression addToPart creation
// End of user code
return parent;
}
};
composer.compose(view);
}
/**
*
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.EndPointPropertyPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
Composite propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
protected Composite createNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.EndPointProperty.Properties.name, EsbMessages.EndPointPropertyPropertiesEditionPart_NameLabel);
name = widgetFactory.createText(parent, ""); //$NON-NLS-1$
name.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData nameData = new GridData(GridData.FILL_HORIZONTAL);
name.setLayoutData(nameData);
name.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.EndPointProperty.Properties.name,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.EndPointProperty.Properties.name,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, name.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
name.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EndPointPropertyPropertiesEditionPartForm.this, EsbViewsRepository.EndPointProperty.Properties.name, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, name.getText()));
}
}
});
EditingUtils.setID(name, EsbViewsRepository.EndPointProperty.Properties.name);
EditingUtils.setEEFtype(name, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.EndPointProperty.Properties.name, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createNameText
// End of user code
return parent;
}
protected Composite createValueText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.EndPointProperty.Properties.value, EsbMessages.EndPointPropertyPropertiesEditionPart_ValueLabel);
value = widgetFactory.createText(parent, ""); //$NON-NLS-1$
value.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData valueData = new GridData(GridData.FILL_HORIZONTAL);
value.setLayoutData(valueData);
value.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.EndPointProperty.Properties.value,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, value.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.EndPointProperty.Properties.value,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, value.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
EndPointPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
value.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EndPointPropertyPropertiesEditionPartForm.this, EsbViewsRepository.EndPointProperty.Properties.value, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, value.getText()));
}
}
});
EditingUtils.setID(value, EsbViewsRepository.EndPointProperty.Properties.value);
EditingUtils.setEEFtype(value, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.EndPointProperty.Properties.value, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createValueText
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createScopeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.EndPointProperty.Properties.scope, EsbMessages.EndPointPropertyPropertiesEditionPart_ScopeLabel);
scope = new EMFComboViewer(parent);
scope.setContentProvider(new ArrayContentProvider());
scope.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData scopeData = new GridData(GridData.FILL_HORIZONTAL);
scope.getCombo().setLayoutData(scopeData);
scope.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
scope.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EndPointPropertyPropertiesEditionPartForm.this, EsbViewsRepository.EndPointProperty.Properties.scope, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getScope()));
}
});
scope.setID(EsbViewsRepository.EndPointProperty.Properties.scope);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.EndPointProperty.Properties.scope, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createScopeEMFComboViewer
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createValueTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.EndPointProperty.Properties.valueType, EsbMessages.EndPointPropertyPropertiesEditionPart_ValueTypeLabel);
valueType = new EMFComboViewer(parent);
valueType.setContentProvider(new ArrayContentProvider());
valueType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData valueTypeData = new GridData(GridData.FILL_HORIZONTAL);
valueType.getCombo().setLayoutData(valueTypeData);
valueType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
valueType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EndPointPropertyPropertiesEditionPartForm.this, EsbViewsRepository.EndPointProperty.Properties.valueType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getValueType()));
}
});
valueType.setID(EsbViewsRepository.EndPointProperty.Properties.valueType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.EndPointProperty.Properties.valueType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createValueTypeEMFComboViewer
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#getName()
*
*/
public String getName() {
return name.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#setName(String newValue)
*
*/
public void setName(String newValue) {
if (newValue != null) {
name.setText(newValue);
} else {
name.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.name);
if (eefElementEditorReadOnlyState && name.isEnabled()) {
name.setEnabled(false);
name.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !name.isEnabled()) {
name.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#getValue()
*
*/
public String getValue() {
return value.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#setValue(String newValue)
*
*/
public void setValue(String newValue) {
if (newValue != null) {
value.setText(newValue);
} else {
value.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.value);
if (eefElementEditorReadOnlyState && value.isEnabled()) {
value.setEnabled(false);
value.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !value.isEnabled()) {
value.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#getScope()
*
*/
public Enumerator getScope() {
Enumerator selection = (Enumerator) ((StructuredSelection) scope.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#initScope(Object input, Enumerator current)
*/
public void initScope(Object input, Enumerator current) {
scope.setInput(input);
scope.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.scope);
if (eefElementEditorReadOnlyState && scope.isEnabled()) {
scope.setEnabled(false);
scope.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !scope.isEnabled()) {
scope.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#setScope(Enumerator newValue)
*
*/
public void setScope(Enumerator newValue) {
scope.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.scope);
if (eefElementEditorReadOnlyState && scope.isEnabled()) {
scope.setEnabled(false);
scope.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !scope.isEnabled()) {
scope.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#getValueType()
*
*/
public Enumerator getValueType() {
Enumerator selection = (Enumerator) ((StructuredSelection) valueType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#initValueType(Object input, Enumerator current)
*/
public void initValueType(Object input, Enumerator current) {
valueType.setInput(input);
valueType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.valueType);
if (eefElementEditorReadOnlyState && valueType.isEnabled()) {
valueType.setEnabled(false);
valueType.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !valueType.isEnabled()) {
valueType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EndPointPropertyPropertiesEditionPart#setValueType(Enumerator newValue)
*
*/
public void setValueType(Enumerator newValue) {
valueType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EndPointProperty.Properties.valueType);
if (eefElementEditorReadOnlyState && valueType.isEnabled()) {
valueType.setEnabled(false);
valueType.setToolTipText(EsbMessages.EndPointProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !valueType.isEnabled()) {
valueType.setEnabled(true);
}
}
// Start of user code for valueExpression specific getters and setters implementation
@Override
public NamespacedProperty getValueExpression() {
// TODO Auto-generated method stub
return null;
}
@Override
public void setValueExpression(NamespacedProperty namespacedProperty) {
// TODO Auto-generated method stub
}
// End of user code
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.EndPointProperty_Part_Title;
}
// Start of user code additional methods
// End of user code
}
| |
import java.util.ArrayList;
import static org.junit.Assert.*;
import org.junit.Test;
public class TableInheritTester {
@Test
// Constructor tests with acceptable input
public void test_ConstructorDesk() {
Table t1 = new Desk();
assertEquals(3, t1.chairCount);
assertEquals(0, t1.personCount);
assertEquals(0, t1.people.size());
Table t2 = new Desk(100);
assertEquals(100, t2.chairCount);
assertEquals(0, t2.personCount);
assertEquals(0, t2.people.size());
}
@Test
// Constructor tests with acceptable input
public void test_ConstructorHochTable() {
Table t1 = new HochTable();
assertEquals(6, t1.chairCount);
assertEquals(0, t1.personCount);
assertEquals(0, t1.people.size());
Table t2 = new HochTable(100);
assertEquals(100, t2.chairCount);
assertEquals(0, t2.personCount);
assertEquals(0, t2.people.size());
}
@Test
// Constructor tests with acceptable input
public void test_ConstructorSuperFriendlyHochTable() {
Table t1 = new SuperFriendlyHochTable();
assertEquals(10, t1.chairCount);
assertEquals(0, t1.personCount);
assertEquals(0, t1.people.size());
Table t2 = new SuperFriendlyHochTable(100);
assertEquals(100, t2.chairCount);
assertEquals(0, t2.personCount);
assertEquals(0, t2.people.size());
}
@Test
// Constructor test with unacceptable input
public void test_ConstructorErrorDesk(){
try{
Table t1 = new Desk(-5);
// it is an error (failure) if it executes the next line.
fail();
}
catch(IllegalArgumentException e){
}
}
@Test
// Constructor test with unacceptable input
public void test_ConstructorErrorHochTable(){
try{
Table t1 = new HochTable(-5);
// it is an error (failure) if it executes the next line.
fail();
}
catch(IllegalArgumentException e){
}
}
@Test
// Constructor test with unacceptable input
public void test_ConstructorErrorSuperFriendlyHochTable(){
try{
Table t1 = new SuperFriendlyHochTable(-5);
// it is an error (failure) if it executes the next line.
fail();
}
catch(IllegalArgumentException e){
}
}
@Test
// emptySeat test
public void test_EmptySeatTable(){
Table t0 = new Table(0);
assertFalse(t0.emptySeat());
Table t1 = new Table(1);
assertTrue(t1.emptySeat());
}
@Test
// emptySeat test
public void test_EmptySeatHochTable(){
Table t0 = new HochTable(0);
assertTrue(t0.emptySeat());
Table t1 = new HochTable(1);
assertTrue(t1.emptySeat());
}
@Test
// emptySeat test
public void test_EmptySeatSuperFriendlyHochTable(){
Table t0 = new SuperFriendlyHochTable(0);
assertTrue(t0.emptySeat());
Table t1 = new SuperFriendlyHochTable(1);
assertTrue(t1.emptySeat());
}
@Test
// emptySeat test
public void test_EmptySeatDesk(){
Table t0 = new Desk(0);
assertFalse(t0.emptySeat());
Table t1 = new Desk(1);
assertTrue(t1.emptySeat());
}
@Test
// addPerson(String name) test
public void test_AddPersonDesk(){
Table t1 = new Desk(2);
String salutation;
salutation = t1.addPerson("Beth");
assertEquals("Welcome Beth", salutation);
salutation = t1.addPerson("Colleen");
assertEquals("Welcome Colleen", salutation);
salutation = t1.addPerson("Wally");
assertEquals("Sorry - there is no space for you Wally", salutation);
assertEquals(2, t1.personCount);
assertEquals("[Beth, Colleen]", t1.people.toString());
}
@Test
// addPerson(String name) test
public void test_AddPersonHochTable(){
Table t1 = new HochTable(2);
assertEquals(2, t1.chairCount);
String salutation;
salutation = t1.addPerson("Beth");
assertEquals("Welcome Beth!", salutation);
salutation = t1.addPerson("Colleen");
assertEquals("Welcome Colleen!", salutation);
salutation = t1.addPerson("Wally");
assertEquals("Welcome Wally!", salutation);
assertEquals(3, t1.personCount);
assertEquals(3, t1.chairCount);
assertEquals("[Beth, Colleen, Wally]", t1.people.toString());
}
@Test
// addPerson(String name) test
public void test_AddPersonSuperFriendlyHochTable(){
Table t1 = new SuperFriendlyHochTable(2);
assertEquals(2, t1.chairCount);
String salutation;
salutation = t1.addPerson("Beth");
assertEquals("Hello Beth!!!!!", salutation);
salutation = t1.addPerson("Colleen");
assertEquals("Hello Beth, Colleen!!!!!", salutation);
salutation = t1.addPerson("Wally");
assertEquals("Hello Beth, Colleen, Wally!!!!!", salutation);
assertEquals(3, t1.personCount);
assertEquals(3, t1.chairCount);
assertEquals("[Beth, Colleen, Wally]", t1.people.toString());
}
@Test
// addPerson(String name) test
public void test_RemovePersonDesk(){
Table t1 = new Desk(3);
String salutation;
t1.addPerson("Beth");
salutation = t1.removePerson("Alien");
assertEquals("Weird! Alien was never here!", salutation);
t1.addPerson("Colleen");
t1.addPerson("Wally");
salutation = t1.removePerson("Colleen");
assertEquals("Bye Colleen", salutation);
salutation = t1.removePerson("Colleen");
assertEquals("Weird! Colleen was never here!", salutation);
t1.removePerson("Beth");
salutation = t1.removePerson("Wally");
assertEquals("(Silence - no one is here to say goodbye)", salutation);
}
@Test
// addPerson(String name) test
public void test_RemovePersonHochTable(){
Table t1 = new HochTable(2);
String salutation;
t1.addPerson("Beth");
salutation = t1.removePerson("Alien");
assertEquals("Weird! Alien was never here!", salutation);
t1.addPerson("Colleen");
t1.addPerson("Wally");
salutation = t1.removePerson("Colleen");
assertEquals("Bye Colleen", salutation);
salutation = t1.removePerson("Colleen");
assertEquals("Weird! Colleen was never here!", salutation);
t1.removePerson("Beth");
salutation = t1.removePerson("Wally");
assertEquals("(Silence - no one is here to say goodbye)", salutation);
}
@Test
// addPerson(String name) test
public void test_RemovePersonSuperFriendlyHochTable(){
Table t1 = new SuperFriendlyHochTable(2);
String salutation;
t1.addPerson("Beth");
salutation = t1.removePerson("Alien");
assertEquals("Weird! Alien was never here!", salutation);
t1.addPerson("Colleen");
t1.addPerson("Wally");
salutation = t1.removePerson("Colleen");
assertEquals("Bye Colleen", salutation);
salutation = t1.removePerson("Colleen");
assertEquals("Weird! Colleen was never here!", salutation);
t1.removePerson("Beth");
salutation = t1.removePerson("Wally");
assertEquals("(Silence - no one is here to say goodbye)", salutation);
}
@Test
// addPowerSupply() and addPersonAndLaptop
public void test_DeskSpecificThings(){
Desk d1 = new Desk();
assertEquals(0, d1.laptopCount);
assertEquals(0, d1.powerOutletCount);
d1.addPowerSupply();
assertEquals(0, d1.laptopCount);
assertEquals(6, d1.powerOutletCount);
d1.addPowerSupply();
assertEquals(0, d1.laptopCount);
assertEquals(12, d1.powerOutletCount);
String salutation;
salutation = d1.addPersonAndLaptop("Beth");
assertEquals("Welcome Beth", salutation);
assertEquals(1, d1.laptopCount);
assertEquals(11, d1.powerOutletCount);
}
@Test
// test classStartingSoon()
public void test_ClassStartingSoon(){
HochTable t1 = new HochTable(5);
t1.addPerson("Beth");
t1.addPerson("Colleen");
t1.addPerson("Dodds");
t1.addPerson("Geoff");
t1.addPerson("Julie");
t1.addPerson("Ran");
t1.addPerson("Eliot");
t1.classStartingSoon();
assertEquals(0, t1.personCount);
assertEquals("[]", t1.people.toString());
}
@Test
// test classStartingSoon()
public void test_ClassStartingSoonFriendly(){
HochTable t1 = new SuperFriendlyHochTable(5);
t1.addPerson("Beth");
t1.addPerson("Colleen");
t1.addPerson("Dodds");
t1.addPerson("Geoff");
t1.addPerson("Julie");
t1.addPerson("Ran");
t1.addPerson("Eliot");
t1.classStartingSoon();
assertEquals(0, t1.personCount);
assertEquals("[]", t1.people.toString());
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.input.jogl;
import com.jme3.cursors.plugins.JmeCursor;
import com.jme3.input.MouseInput;
import com.jme3.input.RawInputListener;
import com.jme3.input.event.MouseButtonEvent;
import com.jme3.input.event.MouseMotionEvent;
import com.jogamp.common.nio.Buffers;
import com.jogamp.newt.Display.PointerIcon;
import com.jogamp.newt.event.MouseEvent;
import com.jogamp.newt.event.MouseListener;
import com.jogamp.newt.opengl.GLWindow;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.logging.Logger;
import com.jogamp.nativewindow.util.Dimension;
import com.jogamp.nativewindow.util.DimensionImmutable;
import com.jogamp.nativewindow.util.PixelFormat;
import com.jogamp.nativewindow.util.PixelRectangle;
import com.jogamp.nativewindow.util.Point;
import com.jogamp.newt.event.WindowAdapter;
import com.jogamp.newt.event.WindowEvent;
public class NewtMouseInput implements MouseInput, MouseListener {
public static int WHEEL_AMP = 40; // arbitrary... Java's mouse wheel seems to report something a lot lower than lwjgl's
private static final Logger logger = Logger.getLogger(NewtMouseInput.class.getName());
private boolean visible = true;
private RawInputListener listener;
private GLWindow component;
private final ArrayList<MouseButtonEvent> eventQueue = new ArrayList<MouseButtonEvent>();
private final ArrayList<MouseButtonEvent> eventQueueCopy = new ArrayList<MouseButtonEvent>();
private int lastEventX;
private int lastEventY;
private int lastEventWheel;
private int wheelPos;
private Point location;
private Point centerLocation;
private Point lastKnownLocation;
private Point lockPosition;
private boolean isRecentering;
private boolean cursorMoved;
private int eventsSinceRecenter;
private volatile int mousePressedX;
private volatile int mousePressedY;
public NewtMouseInput() {
location = new Point();
centerLocation = new Point();
lastKnownLocation = new Point();
lockPosition = new Point();
}
public void setInputSource(GLWindow comp) {
if (component != null) {
component.removeMouseListener(this);
eventQueue.clear();
wheelPos = 0;
isRecentering = false;
eventsSinceRecenter = 0;
lastEventX = 0;
lastEventY = 0;
lastEventWheel = 0;
location = new Point();
centerLocation = new Point();
lastKnownLocation = new Point();
lockPosition = new Point();
}
component = comp;
component.addMouseListener(this);
component.addWindowListener(new WindowAdapter(){
@Override
public void windowGainedFocus(WindowEvent e) {
setCursorVisible(visible);
}
@Override
public void windowLostFocus(WindowEvent e) {
//without those lines,
//on Linux (OpenBox) the mouse is not restored if invisible (eg via Alt-Tab)
component.setPointerVisible(true);
component.confinePointer(false);
}
});
}
@Override
public void initialize() {
}
@Override
public void destroy() {
}
@Override
public boolean isInitialized() {
return true;
}
@Override
public void setInputListener(RawInputListener listener) {
this.listener = listener;
}
@Override
public long getInputTimeNanos() {
return System.nanoTime();
}
@Override
public void setCursorVisible(boolean visible) {
this.visible = visible;
component.setPointerVisible(visible);
lockPosition.set(lastKnownLocation.getX(), lastKnownLocation.getY());
hack_confinePointer();
}
private void hack_confinePointer() {
if (component.hasFocus() && !component.isPointerVisible()) {
recenterMouse(component);
}
}
@Override
public void update() {
if (!component.hasFocus()) return;
if (cursorMoved) {
int newX = location.getX();
int newY = location.getY();
int newWheel = wheelPos;
// invert DY
int actualX = lastKnownLocation.getX();
int actualY = component.getSurfaceHeight() - lastKnownLocation.getY();
MouseMotionEvent evt = new MouseMotionEvent(actualX, actualY,
newX - lastEventX,
lastEventY - newY,
wheelPos, lastEventWheel - wheelPos);
listener.onMouseMotionEvent(evt);
lastEventX = newX;
lastEventY = newY;
lastEventWheel = newWheel;
cursorMoved = false;
}
synchronized (eventQueue) {
eventQueueCopy.clear();
eventQueueCopy.addAll(eventQueue);
eventQueue.clear();
}
int size = eventQueueCopy.size();
for (int i = 0; i < size; i++) {
listener.onMouseButtonEvent(eventQueueCopy.get(i));
}
}
@Override
public int getButtonCount() {
return 3;
}
@Override
public void mouseClicked(MouseEvent awtEvt) {
// MouseButtonEvent evt = new MouseButtonEvent(getJMEButtonIndex(arg0), false);
// listener.onMouseButtonEvent(evt);
}
@Override
public void mousePressed(MouseEvent newtEvt) {
mousePressedX = newtEvt.getX();
mousePressedY = component.getSurfaceHeight() - newtEvt.getY();
MouseButtonEvent evt = new MouseButtonEvent(getJMEButtonIndex(newtEvt), true, mousePressedX, mousePressedY);
evt.setTime(newtEvt.getWhen());
synchronized (eventQueue) {
eventQueue.add(evt);
}
}
@Override
public void mouseReleased(MouseEvent awtEvt) {
MouseButtonEvent evt = new MouseButtonEvent(getJMEButtonIndex(awtEvt), false, awtEvt.getX(), component.getSurfaceHeight() - awtEvt.getY());
evt.setTime(awtEvt.getWhen());
synchronized (eventQueue) {
eventQueue.add(evt);
}
}
@Override
public void mouseEntered(MouseEvent awtEvt) {
hack_confinePointer();
}
@Override
public void mouseExited(MouseEvent awtEvt) {
hack_confinePointer();
}
@Override
public void mouseWheelMoved(MouseEvent awtEvt) {
//FIXME not sure this is the right way to handle this case
// [0] should be used when the shift key is down
float dwheel = awtEvt.getRotation()[1];
wheelPos += dwheel * WHEEL_AMP;
cursorMoved = true;
}
@Override
public void mouseDragged(MouseEvent awtEvt) {
mouseMoved(awtEvt);
}
@Override
public void mouseMoved(MouseEvent awtEvt) {
if (isRecentering) {
// MHenze (cylab) Fix Issue 35:
// As long as the MouseInput is in recentering mode, nothing is done until the mouse is entered in the component
// by the events generated by the robot. If this happens, the last known location is resetted.
if ((lockPosition.getX() == awtEvt.getX() && lockPosition.getY() == awtEvt.getY()) || eventsSinceRecenter++ == 5) {
lastKnownLocation.setX(awtEvt.getX());
lastKnownLocation.setY(awtEvt.getY());
isRecentering = false;
}
} else {
// MHenze (cylab) Fix Issue 35:
// Compute the delta and absolute coordinates and recenter the mouse if necessary
int dx = awtEvt.getX() - lastKnownLocation.getX();
int dy = awtEvt.getY() - lastKnownLocation.getY();
location.setX(location.getX() + dx);
location.setY(location.getY() + dy);
hack_confinePointer();
lastKnownLocation.setX(awtEvt.getX());
lastKnownLocation.setY(awtEvt.getY());
cursorMoved = true;
}
}
// MHenze (cylab) Fix Issue 35: A method to generate recenter the mouse to allow the InputSystem to "grab" the mouse
private void recenterMouse(final GLWindow component) {
eventsSinceRecenter = 0;
isRecentering = true;
component.warpPointer(lockPosition.getX(), lockPosition.getY());
}
private int getJMEButtonIndex(MouseEvent awtEvt) {
int index;
switch (awtEvt.getButton()) {
default:
case MouseEvent.BUTTON1: //left
index = MouseInput.BUTTON_LEFT;
break;
case MouseEvent.BUTTON2: //middle
index = MouseInput.BUTTON_MIDDLE;
break;
case MouseEvent.BUTTON3: //right
index = MouseInput.BUTTON_RIGHT;
break;
case MouseEvent.BUTTON4:
case MouseEvent.BUTTON5:
case MouseEvent.BUTTON6:
case MouseEvent.BUTTON7:
case MouseEvent.BUTTON8:
case MouseEvent.BUTTON9:
//FIXME
index = 0;
break;
}
return index;
}
@Override
public void setNativeCursor(JmeCursor cursor) {
final ByteBuffer pixels = Buffers.copyIntBufferAsByteBuffer(cursor.getImagesData());
final DimensionImmutable size = new Dimension(cursor.getWidth(), cursor.getHeight());
final PixelFormat pixFormat = PixelFormat.RGBA8888;
final PixelRectangle.GenericPixelRect rec = new PixelRectangle.GenericPixelRect(pixFormat, size, 0, true, pixels);
final PointerIcon joglCursor = component.getScreen().getDisplay().createPointerIcon(rec, cursor.getXHotSpot(), cursor.getHeight() - cursor.getYHotSpot());
component.setPointerIcon(joglCursor);
}
}
| |
/**
* Copyright (C) 2012-2013 The named-regexp Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.code.regexp;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.PatternSyntaxException;
/**
* A compiled representation of a regular expression. This is a wrapper
* for the java.util.regex.Pattern with support for named capturing
* groups. The named groups are specified with "(?<name>exp)", which
* is identical to Java 7 named groups.
*
* @since 0.1.9
*/
public class Pattern implements Serializable {
/**
* Determines if a de-serialized file is compatible with this class.
*
* Maintainers must change this value if and only if the new version
* of this class is not compatible with old versions. See Sun docs
* for <a href=http://java.sun.com/products/jdk/1.1/docs/guide
* /serialization/spec/version.doc.html> details. </a>
*
* Not necessary to include in first version of the class, but
* included here as a reminder of its importance.
*/
private static final long serialVersionUID = 1L;
/** Pattern to match group names */
private static final String NAME_PATTERN = "[^!=].*?";
/** Pattern to match named capture groups in a pattern string */
private static final java.util.regex.Pattern NAMED_GROUP_PATTERN = java.util.regex.Pattern.compile("\\(\\?<(" + NAME_PATTERN + ")>", java.util.regex.Pattern.DOTALL);
/** Pattern to match back references for named capture groups */
private static final java.util.regex.Pattern BACKREF_NAMED_GROUP_PATTERN = java.util.regex.Pattern.compile("\\\\k<(" + NAME_PATTERN + ")>", java.util.regex.Pattern.DOTALL);
/** Pattern to match properties for named capture groups in a replacement string */
private static final java.util.regex.Pattern PROPERTY_PATTERN = java.util.regex.Pattern.compile("\\$\\{(" + NAME_PATTERN + ")\\}", java.util.regex.Pattern.DOTALL);
/** index of group within patterns above where group name is captured */
private static final int INDEX_GROUP_NAME = 1;
/** @see {@link java.util.regex.Pattern#UNIX_LINES} */
public static final int UNIX_LINES = java.util.regex.Pattern.UNIX_LINES;
/** @see {@link java.util.regex.Pattern#CASE_INSENSITIVE */
public static final int CASE_INSENSITIVE = java.util.regex.Pattern.CASE_INSENSITIVE;
/** @see {@link java.util.regex.Pattern#COMMENTS} */
public static final int COMMENTS = java.util.regex.Pattern.COMMENTS;
/** @see {@link java.util.regex.Pattern#MULTILINE} */
public static final int MULTILINE = java.util.regex.Pattern.MULTILINE;
/** @see {@link java.util.regex.Pattern#LITERAL} */
public static final int LITERAL = java.util.regex.Pattern.LITERAL;
/** @see {@link java.util.regex.Pattern#DOTALL} */
public static final int DOTALL = java.util.regex.Pattern.DOTALL;
/** @see {@link java.util.regex.Pattern#UNICODE_CASE} */
public static final int UNICODE_CASE = java.util.regex.Pattern.UNICODE_CASE;
/** @see @link {@link java.util.regex.Pattern#CANON_EQ} */
public static final int CANON_EQ = java.util.regex.Pattern.CANON_EQ;
private java.util.regex.Pattern pattern;
private String namedPattern;
private List<String> groupNames;
private Map<String,List<GroupInfo> > groupInfo;
/**
* Constructs a named pattern with the given regular expression and flags
*
* @param regex the expression to be compiled
* @param flags Match flags, a bit mask that may include:
* <ul>
* <li>{@link java.util.regex.Pattern#CASE_INSENSITIVE}</li>
* <li>{@link java.util.regex.Pattern#MULTILINE}</li>
* <li>{@link java.util.regex.Pattern#DOTALL}</li>
* <li>{@link java.util.regex.Pattern#UNICODE_CASE}</li>
* <li>{@link java.util.regex.Pattern#CANON_EQ}</li>
* <li>{@link java.util.regex.Pattern#UNIX_LINES}</li>
* <li>{@link java.util.regex.Pattern#LITERAL}</li>
* <li>{@link java.util.regex.Pattern#COMMENTS}</li>
* </ul>
*/
protected Pattern(String regex, int flags) {
namedPattern = regex;
// group info must be parsed before building the standard pattern
// because the pattern relies on group info to determine the indexes
// of named back-references
groupInfo = extractGroupInfo(regex);
pattern = buildStandardPattern(regex, flags);
}
/**
* Compiles the given regular expression into a pattern
*
* @param regex the expression to be compiled
* @return the pattern
*/
public static Pattern compile(String regex) {
return new Pattern(regex, 0);
}
/**
* Compiles the given regular expression into a pattern with the given flags
*
* @param regex the expression to be compiled
* @param flags Match flags, a bit mask that may include:
* <ul>
* <li>{@link java.util.regex.Pattern#CASE_INSENSITIVE}</li>
* <li>{@link java.util.regex.Pattern#MULTILINE}</li>
* <li>{@link java.util.regex.Pattern#DOTALL}</li>
* <li>{@link java.util.regex.Pattern#UNICODE_CASE}</li>
* <li>{@link java.util.regex.Pattern#CANON_EQ}</li>
* <li>{@link java.util.regex.Pattern#UNIX_LINES}</li>
* <li>{@link java.util.regex.Pattern#LITERAL}</li>
* <li>{@link java.util.regex.Pattern#COMMENTS}</li>
* </ul>
* @return the pattern
*/
public static Pattern compile(String regex, int flags) {
return new Pattern(regex, flags);
}
/**
* Gets the group index of a named capture group
*
* @param groupName name of capture group
* @return group index or -1 if not found
*/
public int indexOf(String groupName) {
return indexOf(groupName, 0);
}
/**
* Gets the group index of a named capture group at the
* specified index. If only one instance of the named
* group exists, use index 0.
*
* @param groupName name of capture group
* @param index the instance index of the named capture group within
* the pattern; e.g., index is 2 for the third instance
* @return group index or -1 if not found
* @throws IndexOutOfBoundsException if instance index is out of bounds
*/
public int indexOf(String groupName, int index) {
int idx = -1;
if (groupInfo.containsKey(groupName)) {
List<GroupInfo> list = groupInfo.get(groupName);
idx = list.get(index).groupIndex();
}
return idx;
}
/**
* Returns this pattern's match flags
*
* @return The match flags specified when this pattern was compiled
*/
public int flags() {
return pattern.flags();
}
/**
* Creates a matcher that will match the given input against this pattern.
*
* @param input The character sequence to be matched
* @return A new matcher for this pattern
*/
public Matcher matcher(CharSequence input) {
return new Matcher(this, input);
}
/**
* Returns the wrapped {@link java.util.regex.Pattern}
* @return the pattern
*/
public java.util.regex.Pattern pattern() {
return pattern;
}
/**
* Returns the regular expression from which this pattern was compiled.
*
* @return The source of this pattern
*/
public String standardPattern() {
return pattern.pattern();
}
/**
* Returns the original regular expression (including named groups)
*
* @return The regular expression
*/
public String namedPattern() {
return namedPattern;
}
/**
* Gets the names of all capture groups
*
* @return the list of names
*/
public List<String> groupNames() {
if (groupNames == null) {
groupNames = new ArrayList<String>(groupInfo.keySet());
}
return Collections.unmodifiableList(groupNames);
}
/**
* Gets the names and group info (group index and string position
* within the named pattern) of all named capture groups
*
* @return a map of group names and their info
*/
public Map<String, List<GroupInfo> > groupInfo() {
return Collections.unmodifiableMap(groupInfo);
}
/**
* Replaces group-name properties (e.g., <b><code>${named}</code></b>) in
* a replacement pattern with the equivalent reference that uses the
* corresponding group index (e.g., <b><code>$2</code></b>). If the string
* contains literal "$", it must be escaped with slash or else this call
* will attempt to parse it as a group-name property.
*
* This is meant to be used to transform the parameter for:
* <ul>
* <li>{@link Matcher#replaceAll(String)}</li>
* <li>{@link Matcher#replaceFirst(String)}</li>
* <li>{@link Matcher#appendReplacement(StringBuffer, String)}</li>
* </ul>
* @param replacementPattern the input string to be evaluated
* @return the modified string
* @throws PatternSyntaxException group name was not found
*/
public String replaceProperties(String replacementPattern) {
return replaceGroupNameWithIndex(
new StringBuilder(replacementPattern),
PROPERTY_PATTERN,
"$"
).toString();
}
/**
* Splits the given input sequence around matches of this pattern.
*
* <p>The array returned by this method contains each substring of the
* input sequence that is terminated by another subsequence that matches
* this pattern or is terminated by the end of the input sequence. The
* substrings in the array are in the order in which they occur in the
* input. If this pattern does not match any subsequence of the input
* then the resulting array has just one element, namely the input
* sequence in string form.</p>
*
* <p>The limit parameter controls the number of times the pattern is
* applied and therefore affects the length of the resulting array. If
* the limit n is greater than zero then the pattern will be applied
* at most n - 1 times, the array's length will be no greater than n,
* and the array's last entry will contain all input beyond the last
* matched delimiter. If n is non-positive then the pattern will be
* applied as many times as possible and the array can have any length.
* If n is zero then the pattern will be applied as many times as
* possible, the array can have any length, and trailing empty strings
* will be discarded.</p>
*
* @param input The character sequence to be split
* @param limit The result threshold, as described above
* @return The array of strings computed by splitting the input around
* matches of this pattern
*/
public String[] split(CharSequence input, int limit) {
return pattern.split(input, limit);
}
/**
* Splits the given input sequence around matches of this pattern.
*
* @param input The character sequence to be split
* @return The array of strings computed by splitting the input around
* matches of this pattern
*/
public String[] split(CharSequence input) {
return pattern.split(input);
}
/**
* Returns a string representation of this pattern
*
* @return the string
*/
public String toString() {
return namedPattern;
}
/**
* Determines if the character at the specified position
* of a string is escaped
*
* @param s string to evaluate
* @param pos the position of the character to evaluate
* @return true if the character is escaped; otherwise false
*/
static private boolean isEscapedChar(String s, int pos) {
return isSlashEscapedChar(s, pos) || isQuoteEscapedChar(s, pos);
}
/**
* Determines if the character at the specified position
* of a string is escaped with a backslash
*
* @param s string to evaluate
* @param pos the position of the character to evaluate
* @return true if the character is escaped; otherwise false
*/
static private boolean isSlashEscapedChar(String s, int pos) {
// Count the backslashes preceding this position. If it's
// even, there is no escape and the slashes are just literals.
// If it's odd, one of the slashes (the last one) is escaping
// the character at the given position.
int numSlashes = 0;
while (pos > 0 && (s.charAt(pos - 1) == '\\')) {
pos--;
numSlashes++;
}
return numSlashes % 2 != 0;
}
/**
* Determines if the character at the specified position
* of a string is quote-escaped (between \\Q and \\E)
*
* @param s string to evaluate
* @param pos the position of the character to evaluate
* @return true if the character is quote-escaped; otherwise false
*/
static private boolean isQuoteEscapedChar(String s, int pos) {
boolean openQuoteFound = false;
boolean closeQuoteFound = false;
// find last non-escaped open-quote
String s2 = s.substring(0, pos);
int posOpen = pos;
while ((posOpen = s2.lastIndexOf("\\Q", posOpen - 1)) != -1) {
if (!isSlashEscapedChar(s2, posOpen)) {
openQuoteFound = true;
break;
}
}
if (openQuoteFound) {
// search remainder of string (after open-quote) for a close-quote;
// no need to check that it's slash-escaped because it can't be
// (the escape character itself is part of the literal when quoted)
if (s2.indexOf("\\E", posOpen) != -1) {
closeQuoteFound = true;
}
}
return openQuoteFound && !closeQuoteFound;
}
/**
* Determines if a string's character is within a regex character class
*
* @param s string to evaluate
* @param pos the position of the character to evaluate
* @return true if the character is inside a character class; otherwise false
*/
static private boolean isInsideCharClass(String s, int pos) {
boolean openBracketFound = false;
boolean closeBracketFound = false;
// find last non-escaped open-bracket
String s2 = s.substring(0, pos);
int posOpen = pos;
while ((posOpen = s2.lastIndexOf('[', posOpen - 1)) != -1) {
if (!isEscapedChar(s2, posOpen)) {
openBracketFound = true;
break;
}
}
if (openBracketFound) {
// search remainder of string (after open-bracket) for a close-bracket
String s3 = s.substring(posOpen, pos);
int posClose = -1;
while ((posClose = s3.indexOf(']', posClose + 1)) != -1) {
if (!isEscapedChar(s3, posClose)) {
closeBracketFound = true;
break;
}
}
}
return openBracketFound && !closeBracketFound;
}
/**
* Determines if the parenthesis at the specified position
* of a string is for a non-capturing group, which is one of
* the flag specifiers (e.g., (?s) or (?m) or (?:pattern).
* If the parenthesis is followed by "?", it must be a non-
* capturing group unless it's a named group (which begins
* with "?<"). Make sure not to confuse it with the lookbehind
* construct ("?<=" or "?<!").
*
* @param s string to evaluate
* @param pos the position of the parenthesis to evaluate
* @return true if the parenthesis is non-capturing; otherwise false
*/
static private boolean isNoncapturingParen(String s, int pos) {
//int len = s.length();
boolean isLookbehind = false;
// code-coverage reports show that pos and the text to
// check never exceed len in this class, so it's safe
// to not test for it, which resolves uncovered branches
// in Cobertura
/*if (pos >= 0 && pos + 4 < len)*/ {
String pre = s.substring(pos, pos+4);
isLookbehind = pre.equals("(?<=") || pre.equals("(?<!");
}
return /*(pos >= 0 && pos + 2 < len) &&*/
s.charAt(pos + 1) == '?' &&
(isLookbehind || s.charAt(pos + 2) != '<');
}
/**
* Counts the open-parentheses to the left of a string position,
* excluding escaped parentheses
*
* @param s string to evaluate
* @param pos ending position of string; characters to the left
* of this position are evaluated
* @return number of open parentheses
*/
static private int countOpenParens(String s, int pos) {
java.util.regex.Pattern p = java.util.regex.Pattern.compile("\\(");
java.util.regex.Matcher m = p.matcher(s.subSequence(0, pos));
int numParens = 0;
while (m.find()) {
// ignore parentheses inside character classes: [0-9()a-f]
// which are just literals
if (isInsideCharClass(s, m.start())) {
continue;
}
// ignore escaped parens
if (isEscapedChar(s, m.start())) continue;
if (!isNoncapturingParen(s, m.start())) {
numParens++;
}
}
return numParens;
}
/**
* Parses info on named capture groups from a pattern
*
* @param namedPattern regex the regular expression pattern to parse
* @return list of group info for all named groups
*/
static public Map<String,List<GroupInfo> > extractGroupInfo(String namedPattern) {
Map<String,List<GroupInfo> > groupInfo = new LinkedHashMap<String,List<GroupInfo> >();
java.util.regex.Matcher matcher = NAMED_GROUP_PATTERN.matcher(namedPattern);
while(matcher.find()) {
int pos = matcher.start();
// ignore escaped paren
if (isEscapedChar(namedPattern, pos)) continue;
String name = matcher.group(INDEX_GROUP_NAME);
int groupIndex = countOpenParens(namedPattern, pos);
List<GroupInfo> list;
if (groupInfo.containsKey(name)) {
list = groupInfo.get(name);
} else {
list = new ArrayList<GroupInfo>();
}
list.add(new GroupInfo(groupIndex, pos));
groupInfo.put(name, list);
}
return groupInfo;
}
/**
* Replaces strings matching a pattern with another string. If the string
* to be replaced is escaped with a slash, it is skipped.
*
* @param input the string to evaluate
* @param pattern the pattern that matches the string to be replaced
* @param replacement the string to replace the target
* @return the modified string (original instance of {@code input})
*/
static private StringBuilder replace(StringBuilder input, java.util.regex.Pattern pattern, String replacement) {
java.util.regex.Matcher m = pattern.matcher(input);
while (m.find()) {
if (isEscapedChar(input.toString(), m.start())) {
continue;
}
// since we're replacing the original string being matched,
// we have to reset the matcher so that it searches the new
// string
input.replace(m.start(), m.end(), replacement);
m.reset(input);
}
return input;
}
/**
* Replaces referenced group names with the reference to the corresponding group
* index (e.g., <b><code>\k<named></code></b>} to <b><code>\k2</code></b>};
* <b><code>${named}</code></b> to <b><code>$2</code></b>}).
* This assumes the group names have already been parsed from the pattern.
*
* @param input the string to evaluate
* @param pattern the pattern that matches the string to be replaced
* @param prefix string to prefix to the replacement (e.g., "$" or "\\")
* @return the modified string (original instance of {@code input})
* @throws PatternSyntaxException group name was not found
*/
private StringBuilder replaceGroupNameWithIndex(StringBuilder input, java.util.regex.Pattern pattern, String prefix) {
java.util.regex.Matcher m = pattern.matcher(input);
while (m.find()) {
if (isEscapedChar(input.toString(), m.start())) {
continue;
}
int index = indexOf(m.group(INDEX_GROUP_NAME));
if (index >= 0) {
index++;
} else {
throw new PatternSyntaxException("unknown group name", input.toString(), m.start(INDEX_GROUP_NAME));
}
// since we're replacing the original string being matched,
// we have to reset the matcher so that it searches the new
// string
input.replace(m.start(), m.end(), prefix + index);
m.reset(input);
}
return input;
}
/**
* Builds a {@code java.util.regex.Pattern} from a given regular expression
* pattern (which may contain named groups) and flags
*
* @param namedPattern the expression to be compiled
* @param flags Match flags, a bit mask that may include:
* <ul>
* <li>{@link java.util.regex.Pattern#CASE_INSENSITIVE}</li>
* <li>{@link java.util.regex.Pattern#MULTILINE}</li>
* <li>{@link java.util.regex.Pattern#DOTALL}</li>
* <li>{@link java.util.regex.Pattern#UNICODE_CASE}</li>
* <li>{@link java.util.regex.Pattern#CANON_EQ}</li>
* <li>{@link java.util.regex.Pattern#UNIX_LINES}</li>
* <li>{@link java.util.regex.Pattern#LITERAL}</li>
* <li>{@link java.util.regex.Pattern#COMMENTS}</li>
* </ul>
* @return the standard {@code java.util.regex.Pattern}
*/
private java.util.regex.Pattern buildStandardPattern(String namedPattern, Integer flags) {
// replace the named-group construct with left-paren but
// make sure we're actually looking at the construct (ignore escapes)
StringBuilder s = new StringBuilder(namedPattern);
s = replace(s, NAMED_GROUP_PATTERN, "(");
s = replaceGroupNameWithIndex(s, BACKREF_NAMED_GROUP_PATTERN, "\\");
return java.util.regex.Pattern.compile(s.toString(), flags);
}
/**
* Compares the keys and values of two group-info maps
*
* @param a the first map to compare
* @param b the other map to compare
* @return {@code true} if the first map contains all of the other map's keys and values; {@code false} otherwise
*/
private boolean groupInfoMatches(Map<String, List<GroupInfo>> a, Map<String, List<GroupInfo>> b) {
if (a == null && b == null) {
return true;
}
boolean isMatch = false;
if (a != null && b != null) {
if (a.isEmpty() && b.isEmpty()) {
isMatch = true;
} else if (a.size() == b.size()) {
for (Entry<String, List<GroupInfo>> entry : a.entrySet()) {
List<GroupInfo> otherList = b.get(entry.getKey());
isMatch = (otherList != null);
if (!isMatch) {
break;
}
List<GroupInfo> thisList = entry.getValue();
isMatch = otherList.containsAll(thisList) && thisList.containsAll(otherList);
if (!isMatch) {
break;
}
}
}
}
return isMatch;
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof Pattern)) {
return false;
}
Pattern other = (Pattern)obj;
boolean groupNamesMatch = (groupNames == null && other.groupNames == null) ||
(groupNames != null && !Collections.disjoint(groupNames, other.groupNames));
boolean groupInfoMatch = groupNamesMatch && groupInfoMatches(groupInfo, other.groupInfo);
return groupNamesMatch
&& groupInfoMatch
&& namedPattern.equals(other.namedPattern)
&& pattern.flags() == other.pattern.flags()
;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
int hash = namedPattern.hashCode() ^ pattern.hashCode();
if (groupInfo != null) {
hash ^= groupInfo.hashCode();
}
if (groupNames != null) {
hash ^= groupNames.hashCode();
}
return hash;
}
}
| |
package org.drip.analytics.holset;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*
* GENERATED on Fri Jan 11 19:54:07 EST 2013 ---- DO NOT DELETE
*/
/*!
* Copyright (C) 2013 Lakshmi Krishnamurthy
* Copyright (C) 2012 Lakshmi Krishnamurthy
* Copyright (C) 2011 Lakshmi Krishnamurthy
*
* This file is part of CreditAnalytics, a free-software/open-source library for
* fixed income analysts and developers - http://www.credit-trader.org
*
* CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus
* towards the needs of the bonds and credit products community.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class GFRHoliday implements org.drip.analytics.holset.LocationHoliday {
public GFRHoliday()
{
}
public java.lang.String getHolidayLoc()
{
return "GFR";
}
public org.drip.analytics.holiday.Locale getHolidaySet()
{
org.drip.analytics.holiday.Locale lh = new
org.drip.analytics.holiday.Locale();
lh.addStaticHoliday ("24-DEC-2004", "Christmas Eve");
lh.addStaticHoliday ("25-MAR-2005", "Good Friday");
lh.addStaticHoliday ("14-APR-2006", "Good Friday");
lh.addStaticHoliday ("21-MAR-2008", "Good Friday");
lh.addStaticHoliday ("10-APR-2009", "Good Friday");
lh.addStaticHoliday ("03-JUL-2009", "Good Friday");
lh.addStaticHoliday ("24-DEC-2010", "Christmas Eve");
lh.addStaticHoliday ("22-APR-2011", "Good Friday");
lh.addStaticHoliday ("06-APR-2012", "Good Friday");
lh.addStaticHoliday ("29-MAR-2013", "Good Friday");
lh.addStaticHoliday ("18-APR-2014", "Good Friday");
lh.addStaticHoliday ("03-APR-2015", "Good Friday");
lh.addStaticHoliday ("03-JUL-2015", "Independence Day Observed");
lh.addStaticHoliday ("25-MAR-2016", "Good Friday");
lh.addStaticHoliday ("14-APR-2017", "Good Friday");
lh.addStaticHoliday ("30-MAR-2018", "Good Friday");
lh.addStaticHoliday ("19-APR-2019", "Good Friday");
lh.addStaticHoliday ("10-APR-2020", "Good Friday");
lh.addStaticHoliday ("03-JUL-2020", "Independence Day Observed");
lh.addStaticHoliday ("02-APR-2021", "Good Friday");
lh.addStaticHoliday ("24-DEC-2021", "Christmas Day Observed");
lh.addStaticHoliday ("15-APR-2022", "Good Friday");
lh.addStaticHoliday ("07-APR-2023", "Good Friday");
lh.addStaticHoliday ("29-MAR-2024", "Good Friday");
lh.addStaticHoliday ("18-APR-2025", "Good Friday");
lh.addStaticHoliday ("03-APR-2026", "Good Friday");
lh.addStaticHoliday ("03-JUL-2026", "Independence Day Observed");
lh.addStaticHoliday ("26-MAR-2027", "Good Friday");
lh.addStaticHoliday ("24-DEC-2027", "Christmas Day Observed");
lh.addStaticHoliday ("14-APR-2028", "Good Friday");
lh.addStaticHoliday ("30-MAR-2029", "Good Friday");
lh.addStaticHoliday ("19-APR-2030", "Good Friday");
lh.addStaticHoliday ("11-APR-2031", "Good Friday");
lh.addStaticHoliday ("26-MAR-2032", "Good Friday");
lh.addStaticHoliday ("24-DEC-2032", "Christmas Day Observed");
lh.addStaticHoliday ("15-APR-2033", "Good Friday");
lh.addStaticHoliday ("07-APR-2034", "Good Friday");
lh.addStaticHoliday ("23-MAR-2035", "Good Friday");
lh.addStaticHoliday ("11-APR-2036", "Good Friday");
lh.addStaticHoliday ("03-APR-2037", "Good Friday");
lh.addStaticHoliday ("03-JUL-2037", "Independence Day Observed");
lh.addStaticHoliday ("23-APR-2038", "Good Friday");
lh.addStaticHoliday ("24-DEC-2038", "Christmas Day Observed");
lh.addStaticHoliday ("08-APR-2039", "Good Friday");
lh.addStaticHoliday ("30-MAR-2040", "Good Friday");
lh.addStaticHoliday ("19-APR-2041", "Good Friday");
lh.addStaticHoliday ("04-APR-2042", "Good Friday");
lh.addStaticHoliday ("27-MAR-2043", "Good Friday");
lh.addStaticHoliday ("03-JUL-2043", "Independence Day Observed");
lh.addStaticHoliday ("15-APR-2044", "Good Friday");
lh.addStaticHoliday ("07-APR-2045", "Good Friday");
lh.addStaticHoliday ("23-MAR-2046", "Good Friday");
lh.addStaticHoliday ("12-APR-2047", "Good Friday");
lh.addStaticHoliday ("03-APR-2048", "Good Friday");
lh.addStaticHoliday ("03-JUL-2048", "Independence Day Observed");
lh.addStaticHoliday ("16-APR-2049", "Good Friday");
lh.addStaticHoliday ("24-DEC-2049", "Christmas Day Observed");
lh.addStaticHoliday ("08-APR-2050", "Good Friday");
lh.addStaticHoliday ("31-MAR-2051", "Good Friday");
lh.addStaticHoliday ("19-APR-2052", "Good Friday");
lh.addStaticHoliday ("04-APR-2053", "Good Friday");
lh.addStaticHoliday ("27-MAR-2054", "Good Friday");
lh.addStaticHoliday ("03-JUL-2054", "Independence Day Observed");
lh.addStaticHoliday ("16-APR-2055", "Good Friday");
lh.addStaticHoliday ("24-DEC-2055", "Christmas Day Observed");
lh.addStaticHoliday ("31-MAR-2056", "Good Friday");
lh.addStaticHoliday ("20-APR-2057", "Good Friday");
lh.addStaticHoliday ("12-APR-2058", "Good Friday");
lh.addStaticHoliday ("28-MAR-2059", "Good Friday");
lh.addStaticHoliday ("16-APR-2060", "Good Friday");
lh.addStaticHoliday ("08-APR-2061", "Good Friday");
lh.addStaticHoliday ("24-MAR-2062", "Good Friday");
lh.addStaticHoliday ("13-APR-2063", "Good Friday");
lh.addStaticHoliday ("04-APR-2064", "Good Friday");
lh.addStaticHoliday ("27-MAR-2065", "Good Friday");
lh.addStaticHoliday ("09-APR-2066", "Good Friday");
lh.addStaticHoliday ("01-APR-2067", "Good Friday");
lh.addStaticHoliday ("20-APR-2068", "Good Friday");
lh.addStaticHoliday ("12-APR-2069", "Good Friday");
lh.addStaticHoliday ("28-MAR-2070", "Good Friday");
lh.addStaticHoliday ("17-APR-2071", "Good Friday");
lh.addStaticHoliday ("08-APR-2072", "Good Friday");
lh.addStaticHoliday ("24-MAR-2073", "Good Friday");
lh.addStaticHoliday ("13-APR-2074", "Good Friday");
lh.addStaticHoliday ("05-APR-2075", "Good Friday");
lh.addStaticHoliday ("17-APR-2076", "Good Friday");
lh.addStaticHoliday ("09-APR-2077", "Good Friday");
lh.addStaticHoliday ("01-APR-2078", "Good Friday");
lh.addStaticHoliday ("21-APR-2079", "Good Friday");
lh.addStaticHoliday ("05-APR-2080", "Good Friday");
lh.addStaticHoliday ("28-MAR-2081", "Good Friday");
lh.addStaticHoliday ("17-APR-2082", "Good Friday");
lh.addStaticHoliday ("02-APR-2083", "Good Friday");
lh.addStaticHoliday ("24-MAR-2084", "Good Friday");
lh.addStandardWeekend();
return lh;
}
}
| |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.tab.state;
import android.content.Context;
import android.os.StrictMode;
import android.os.SystemClock;
import androidx.annotation.IntDef;
import androidx.annotation.MainThread;
import androidx.annotation.VisibleForTesting;
import androidx.core.util.AtomicFile;
import org.chromium.base.Callback;
import org.chromium.base.ContextUtils;
import org.chromium.base.Log;
import org.chromium.base.StreamUtil;
import org.chromium.base.StrictModeContext;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.base.supplier.Supplier;
import org.chromium.base.task.AsyncTask;
import org.chromium.base.task.PostTask;
import org.chromium.base.task.SequencedTaskRunner;
import org.chromium.base.task.TaskTraits;
import org.chromium.content_public.browser.UiThreadTaskTraits;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
/**
* {@link PersistedTabDataStorage} which uses a file for the storage
*/
public class FilePersistedTabDataStorage implements PersistedTabDataStorage {
private static final String TAG = "FilePTDS";
protected static final Callback<Integer> NO_OP_CALLBACK = new Callback<Integer>() {
@Override
public void onResult(Integer result) {}
};
protected static final int DECREMENT_SEMAPHORE_VAL = 1;
private static final String sBaseDirName = "persisted_tab_data_storage";
private static class BaseStorageDirectoryHolder {
private static File sDirectory;
static {
StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskWrites();
try {
sDirectory =
ContextUtils.getApplicationContext().getDir(sBaseDirName, Context.MODE_PRIVATE);
} finally {
StrictMode.setThreadPolicy(oldPolicy);
}
}
}
private SequencedTaskRunner mSequencedTaskRunner;
private boolean mFirstOperationRecorded;
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
protected LinkedList<StorageRequest> mQueue = new LinkedList<>();
protected FilePersistedTabDataStorage() {
mSequencedTaskRunner =
PostTask.createSequencedTaskRunner(TaskTraits.USER_BLOCKING_MAY_BLOCK);
}
@MainThread
@Override
public void save(int tabId, String dataId, Supplier<ByteBuffer> dataSupplier) {
save(tabId, dataId, dataSupplier, NO_OP_CALLBACK);
}
// Callback used for test synchronization between save, restore and delete operations
@MainThread
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
protected void save(int tabId, String dataId, Supplier<ByteBuffer> dataSupplier,
Callback<Integer> callback) {
// TODO(crbug.com/1059637) we should introduce a retry mechanisms
addSaveRequest(new FileSaveRequest(tabId, dataId, dataSupplier, callback));
processNextItemOnQueue();
}
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
protected void addSaveRequest(FileSaveRequest fileSaveRequest) {
// FileSaveRequest for the same tabid/data id will get overwritten
// by new FileSaveRequest so remove if it exists in the queue.
mQueue.remove(fileSaveRequest);
mQueue.add(fileSaveRequest);
}
@MainThread
@Override
public void restore(int tabId, String dataId, Callback<ByteBuffer> callback) {
addStorageRequestAndProcessNext(new FileRestoreRequest(tabId, dataId, callback));
}
@MainThread
@Override
public ByteBuffer restore(int tabId, String dataId) {
return new FileRestoreRequest(tabId, dataId, null).executeSyncTask();
}
@MainThread
@Override
public void delete(int tabId, String dataId) {
delete(tabId, dataId, NO_OP_CALLBACK);
}
// Callback used for test synchronization between save, restore and delete operations
@MainThread
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
protected void delete(int tabId, String dataId, Callback<Integer> callback) {
addStorageRequestAndProcessNext(new FileDeleteRequest(tabId, dataId, callback));
}
protected void addStorageRequestAndProcessNext(StorageRequest storageRequest) {
mQueue.add(storageRequest);
processNextItemOnQueue();
}
/**
* @return {@link File} serialized {@link CriticalPersistedTabData} is stored in
* @param tabId tab identifier
* @param dataId type of data stored for the {@link Tab}
*/
protected static File getFile(int tabId, String dataId) {
return new File(getOrCreateBaseStorageDirectory(),
String.format(Locale.ENGLISH, "%d%s", tabId, dataId));
}
public static File getOrCreateBaseStorageDirectory() {
return BaseStorageDirectoryHolder.sDirectory;
}
/**
* Request for saving, restoring and deleting {@link PersistedTabData}
*/
protected abstract class StorageRequest<T> {
protected final int mTabId;
protected final String mDataId;
protected final File mFile;
/**
* @param tabId identifier for the {@link Tab}
* @param dataId identifier for the {@link PersistedTabData}
*/
StorageRequest(int tabId, String dataId) {
mTabId = tabId;
mDataId = dataId;
mFile = FilePersistedTabDataStorage.getFile(tabId, dataId);
}
/**
* @return unique identifier for the StorageRequest
*/
String getRequestId() {
return String.format(Locale.ENGLISH, "%d_%s", mTabId, mDataId);
}
/**
* AsyncTask to execute the StorageRequest
*/
abstract AsyncTask getAsyncTask();
/**
* Execute the task synchronously
*/
abstract T executeSyncTask();
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null) return false;
if (!(other instanceof StorageRequest)) return false;
StorageRequest otherStorageRequest = (StorageRequest) other;
return mTabId == otherStorageRequest.mTabId
&& mDataId.equals(otherStorageRequest.mDataId)
&& mFile.equals(otherStorageRequest.mFile);
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + mTabId;
result = 31 * result + mDataId.hashCode();
result = 31 * result + mFile.hashCode();
return result;
}
/**
* @return type of storage request (save, restore or delete)
*/
abstract @StorageRequestType int getStorageRequestType();
}
/**
* Request to save {@link PersistedTabData}
*/
protected class FileSaveRequest extends StorageRequest<Void> {
protected Supplier<ByteBuffer> mDataSupplier;
protected Callback<Integer> mCallback;
/**
* @param tabId identifier for the {@link Tab}
* @param dataId identifier for the {@link PersistedTabData}
* @param dataSupplier {@link Supplier} containing data to be saved
*/
FileSaveRequest(int tabId, String dataId, Supplier<ByteBuffer> dataSupplier,
Callback<Integer> callback) {
super(tabId, dataId);
mDataSupplier = dataSupplier;
mCallback = callback;
}
@Override
public Void executeSyncTask() {
ByteBuffer data = mDataSupplier.get();
if (data == null) {
mDataSupplier = null;
return null;
}
FileOutputStream outputStream = null;
boolean success = false;
try {
long startTime = SystemClock.elapsedRealtime();
outputStream = new FileOutputStream(mFile);
FileChannel fileChannel = outputStream.getChannel();
fileChannel.write(data);
success = true;
RecordHistogram.recordTimesHistogram(
String.format(Locale.US, "Tabs.PersistedTabData.Storage.SaveTime.%s",
getUmaTag()),
SystemClock.elapsedRealtime() - startTime);
} catch (FileNotFoundException e) {
Log.e(TAG,
String.format(Locale.ENGLISH,
"FileNotFoundException while attempting to save file %s "
+ "Details: %s",
mFile, e.getMessage()));
} catch (IOException e) {
Log.e(TAG,
String.format(Locale.ENGLISH,
"IOException while attempting to save for file %s. "
+ " Details: %s",
mFile, e.getMessage()));
} finally {
StreamUtil.closeQuietly(outputStream);
}
RecordHistogram.recordBooleanHistogram(
"Tabs.PersistedTabData.Storage.Save." + getUmaTag(), success);
return null;
}
@Override
public AsyncTask getAsyncTask() {
return new AsyncTask<Void>() {
@Override
protected Void doInBackground() {
return executeSyncTask();
}
@Override
protected void onPostExecute(Void result) {
PostTask.runOrPostTask(UiThreadTaskTraits.DEFAULT,
() -> { mCallback.onResult(DECREMENT_SEMAPHORE_VAL); });
processNextItemOnQueue();
}
};
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FileSaveRequest)) return false;
return super.equals(other);
}
@Override
@StorageRequestType
int getStorageRequestType() {
return StorageRequestType.SAVE;
}
}
/**
* Request to delete a saved {@link PersistedTabData}
*/
private class FileDeleteRequest extends StorageRequest<Void> {
private byte[] mData;
private Callback<Integer> mCallback;
/**
* @param tabId identifier for the {@link Tab}
* @param dataId identifier for the {@link PersistedTabData}
*/
FileDeleteRequest(int tabId, String dataId, Callback<Integer> callback) {
super(tabId, dataId);
mCallback = callback;
}
@Override
public Void executeSyncTask() {
boolean exists = mFile.exists();
RecordHistogram.recordBooleanHistogram(
"Tabs.PersistedTabData.Storage.Exists." + getUmaTag(), exists);
if (!exists) {
return null;
}
boolean success = mFile.delete();
RecordHistogram.recordBooleanHistogram(
"Tabs.PersistedTabData.Storage.Delete." + getUmaTag(), success);
if (!success) {
Log.e(TAG, String.format(Locale.ENGLISH, "Error deleting file %s", mFile));
}
return null;
}
@Override
public AsyncTask getAsyncTask() {
return new AsyncTask<Void>() {
@Override
protected Void doInBackground() {
return executeSyncTask();
}
@Override
protected void onPostExecute(Void result) {
PostTask.runOrPostTask(UiThreadTaskTraits.DEFAULT,
() -> { mCallback.onResult(DECREMENT_SEMAPHORE_VAL); });
processNextItemOnQueue();
}
};
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FileDeleteRequest)) return false;
return super.equals(other);
}
@Override
@StorageRequestType
int getStorageRequestType() {
return StorageRequestType.DELETE;
}
}
/**
* Request to restore saved serialized {@link PersistedTabData}
*/
protected class FileRestoreRequest extends StorageRequest<ByteBuffer> {
protected Callback<ByteBuffer> mCallback;
/**
* @param tabId identifier for the {@link Tab}
* @param dataId identifier for the {@link PersistedTabData}
* @param callback - callback to return the retrieved serialized
* {@link PersistedTabData} in
*/
FileRestoreRequest(int tabId, String dataId, Callback<ByteBuffer> callback) {
super(tabId, dataId);
mCallback = callback;
}
@Override
public ByteBuffer executeSyncTask() {
boolean success = false;
ByteBuffer res = null;
FileInputStream fileInputStream = null;
try {
long startTime = SystemClock.elapsedRealtime();
AtomicFile atomicFile = new AtomicFile(mFile);
fileInputStream = atomicFile.openRead();
FileChannel channel = fileInputStream.getChannel();
res = channel.map(MapMode.READ_ONLY, channel.position(), channel.size());
success = true;
RecordHistogram.recordTimesHistogram(
String.format(Locale.US, "Tabs.PersistedTabData.Storage.LoadTime.%s",
getUmaTag()),
SystemClock.elapsedRealtime() - startTime);
} catch (FileNotFoundException e) {
Log.e(TAG,
String.format(Locale.ENGLISH,
"FileNotFoundException while attempting to restore "
+ " %s. Details: %s",
mFile, e.getMessage()));
} catch (IOException e) {
Log.e(TAG,
String.format(Locale.ENGLISH,
"IOException while attempting to restore "
+ "%s. Details: %s",
mFile, e.getMessage()));
}
RecordHistogram.recordBooleanHistogram(
"Tabs.PersistedTabData.Storage.Restore." + getUmaTag(), success);
return res;
}
@Override
public AsyncTask getAsyncTask() {
return new AsyncTask<ByteBuffer>() {
@Override
protected ByteBuffer doInBackground() {
return executeSyncTask();
}
@Override
protected void onPostExecute(ByteBuffer res) {
PostTask.runOrPostTask(
UiThreadTaskTraits.DEFAULT, () -> { mCallback.onResult(res); });
processNextItemOnQueue();
}
};
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FileRestoreRequest)) return false;
return super.equals(other);
}
@Override
@StorageRequestType
int getStorageRequestType() {
return StorageRequestType.RESTORE;
}
}
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
@IntDef({StorageRequestType.SAVE, StorageRequestType.RESTORE, StorageRequestType.DELETE})
@Retention(RetentionPolicy.SOURCE)
@interface StorageRequestType {
int SAVE = 0;
int RESTORE = 1;
int DELETE = 2;
int NUM_ENTRIES = 3;
}
@VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
protected void processNextItemOnQueue() {
if (mQueue.isEmpty()) return;
StorageRequest storageRequest = mQueue.poll();
// First operation should be a restore (to restore the active tab) - any other
// operations coming in before the restore will block restoration of the active
// tab and hurt startup latency.
if (!mFirstOperationRecorded) {
RecordHistogram.recordEnumeratedHistogram("Tabs.PersistedTabData.Storage.Save."
+ getUmaTag() + ".FirstStorageRequestType",
storageRequest.getStorageRequestType(), StorageRequestType.NUM_ENTRIES);
mFirstOperationRecorded = true;
}
storageRequest.getAsyncTask().executeOnTaskRunner(mSequencedTaskRunner);
}
@Override
public String getUmaTag() {
return "File";
}
@Override
public void performMaintenance(List<Integer> tabIds, String dataId) {
assert false : "Maintenance is not available in FilePersistedTabDataStorage";
}
/**
* Determines if a {@link Tab} is incognito or not based on the existence of the
* corresponding {@link CriticalPersistedTabData} file. This involves a disk access
* and will be slow. This method can be called from the UI thread.
* @param tabId identifier for the {@link Tab}
* @return true/false if the {@link Tab} is incognito based on the existence of the
* CriticalPersistedTabData file and null if it is not known if the
* {@link Tab} is incognito or not.
*/
public static Boolean isIncognito(int tabId) {
try (StrictModeContext ignored = StrictModeContext.allowDiskReads()) {
String regularId =
PersistedTabDataConfiguration.get(CriticalPersistedTabData.class, false)
.getId();
File regularFile = FilePersistedTabDataStorage.getFile(tabId, regularId);
if (regularFile.exists()) {
return false;
}
String incognitoId =
PersistedTabDataConfiguration.get(CriticalPersistedTabData.class, true).getId();
File incognitoFile = FilePersistedTabDataStorage.getFile(tabId, incognitoId);
if (incognitoFile.exists()) {
return true;
}
return null;
}
}
}
| |
package org.docksidestage.sqlite.dbflute.readonly.bsbhv;
import java.util.List;
import org.dbflute.*;
import org.dbflute.bhv.*;
import org.dbflute.bhv.readable.*;
import org.dbflute.bhv.referrer.*;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.HpSLSFunction;
import org.dbflute.cbean.result.*;
import org.dbflute.exception.*;
import org.dbflute.optional.OptionalEntity;
import org.dbflute.outsidesql.executor.*;
import org.docksidestage.sqlite.dbflute.readonly.exbhv.*;
import org.docksidestage.sqlite.dbflute.readonly.bsbhv.loader.*;
import org.docksidestage.sqlite.dbflute.readonly.exentity.*;
import org.docksidestage.sqlite.dbflute.readonly.bsentity.dbmeta.*;
import org.docksidestage.sqlite.dbflute.readonly.cbean.*;
/**
* The behavior of MEMBER_SERVICE as TABLE. <br>
* <pre>
* [primary key]
* MEMBER_SERVICE_ID
*
* [column]
* MEMBER_SERVICE_ID, MEMBER_ID, SERVICE_POINT_COUNT, SERVICE_RANK_CODE, REGISTER_DATETIME, REGISTER_USER, UPDATE_DATETIME, UPDATE_USER, VERSION_NO
*
* [sequence]
*
*
* [identity]
* MEMBER_SERVICE_ID
*
* [version-no]
* VERSION_NO
*
* [foreign table]
* MEMBER, SERVICE_RANK
*
* [referrer table]
*
*
* [foreign property]
* member, serviceRank
*
* [referrer property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class RoyBsMemberServiceBhv extends AbstractBehaviorReadable<RoyMemberService, RoyMemberServiceCB> {
// ===================================================================================
// Definition
// ==========
/*df:beginQueryPath*/
/*df:endQueryPath*/
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public RoyMemberServiceDbm asDBMeta() { return RoyMemberServiceDbm.getInstance(); }
/** {@inheritDoc} */
public String asTableDbName() { return "MEMBER_SERVICE"; }
// ===================================================================================
// New Instance
// ============
/** {@inheritDoc} */
public RoyMemberServiceCB newConditionBean() { return new RoyMemberServiceCB(); }
// ===================================================================================
// Count Select
// ============
/**
* Select the count of uniquely-selected records by the condition-bean. {IgnorePagingCondition, IgnoreSpecifyColumn}<br>
* SpecifyColumn is ignored but you can use it only to remove text type column for union's distinct.
* <pre>
* <span style="color: #70226C">int</span> count = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectCount</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @return The count for the condition. (NotMinus)
*/
public int selectCount(CBCall<RoyMemberServiceCB> cbLambda) {
return facadeSelectCount(createCB(cbLambda));
}
/**
* Select the count of uniquely-selected records by the condition-bean. {IgnorePagingCondition, IgnoreSpecifyColumn}<br>
* SpecifyColumn is ignored but you can use it only to remove text type column for union's distinct.
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().setFoo...(value);
* <span style="color: #70226C">int</span> count = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectCount</span>(cb);
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @return The count for the condition. (NotMinus)
*/
public int selectCount(RoyMemberServiceCB cb) {
return facadeSelectCount(cb);
}
// ===================================================================================
// Entity Select
// =============
/**
* Select the entity by the condition-bean. <br>
* It returns not-null optional entity, so you should ... <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, alwaysPresent().</span> <br>
* <span style="color: #AD4747; font-size: 120%">If it might be no data, isPresent() and orElse(), ...</span>
* <pre>
* <span style="color: #3F7E5E">// if the data always exists as your business rule</span>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }).<span style="color: #CC4747">alwaysPresent</span>(<span style="color: #553000">memberService</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present, or exception</span>
* ... = <span style="color: #553000">memberService</span>.get...
* });
*
* <span style="color: #3F7E5E">// if it might be no data, ...</span>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectEntity</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }).<span style="color: #CC4747">ifPresent</span>(<span style="color: #553000">memberService</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present</span>
* ... = <span style="color: #553000">memberService</span>.get...
* }).<span style="color: #994747">orElse</span>(() <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if not present</span>
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @return The optional entity selected by the condition. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMemberService> selectEntity(CBCall<RoyMemberServiceCB> cbLambda) {
return facadeSelectEntity(createCB(cbLambda));
}
/**
* Select the entity by the condition-bean. <br>
* It returns not-null optional entity, so you should ... <br>
* <span style="color: #AD4747; font-size: 120%">If the data always exists as your business rule, alwaysPresent().</span> <br>
* <span style="color: #AD4747; font-size: 120%">If it might be no data, get() after check by isPresent() or orElse(), ...</span>
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().set...
*
* <span style="color: #3F7E5E">// if the data always exists as your business rule</span>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #DD4747">selectEntity</span>(cb)}).<span style="color: #CC4747">alwaysPresent</span>(memberService <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present, or exception</span>
* ... = memberService.get...
* });
*
* <span style="color: #3F7E5E">// if it might be no data, ...</span>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectEntity</span>(cb).<span style="color: #CC4747">ifPresent</span>(memberService <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if present</span>
* ... = memberService.get...
* }).<span style="color: #994747">orElse</span>(() <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #3F7E5E">// called if not present</span>
* });
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @return The optional entity selected by the condition. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMemberService> selectEntity(RoyMemberServiceCB cb) {
return facadeSelectEntity(cb);
}
protected OptionalEntity<RoyMemberService> facadeSelectEntity(RoyMemberServiceCB cb) {
return doSelectOptionalEntity(cb, typeOfSelectedEntity());
}
protected <ENTITY extends RoyMemberService> OptionalEntity<ENTITY> doSelectOptionalEntity(RoyMemberServiceCB cb, Class<? extends ENTITY> tp) {
return createOptionalEntity(doSelectEntity(cb, tp), cb);
}
protected Entity doReadEntity(ConditionBean cb) { return facadeSelectEntity(downcast(cb)).orElse(null); }
/**
* Select the entity by the condition-bean with deleted check. <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, this method is good.</span>
* <pre>
* RoyMemberService <span style="color: #553000">memberService</span> = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectEntityWithDeletedCheck</span>(cb <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> cb.acceptPK(1));
* ... = <span style="color: #553000">memberService</span>.get...(); <span style="color: #3F7E5E">// the entity always be not null</span>
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @return The entity selected by the condition. (NotNull: if no data, throws exception)
* @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found)
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public RoyMemberService selectEntityWithDeletedCheck(CBCall<RoyMemberServiceCB> cbLambda) {
return facadeSelectEntityWithDeletedCheck(createCB(cbLambda));
}
/**
* Select the entity by the condition-bean with deleted check. <br>
* <span style="color: #AD4747; font-size: 120%">If the data is always present as your business rule, this method is good.</span>
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().set...;
* RoyMemberService memberService = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectEntityWithDeletedCheck</span>(cb);
* ... = memberService.get...(); <span style="color: #3F7E5E">// the entity always be not null</span>
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @return The entity selected by the condition. (NotNull: if no data, throws exception)
* @throws EntityAlreadyDeletedException When the entity has already been deleted. (not found)
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public RoyMemberService selectEntityWithDeletedCheck(RoyMemberServiceCB cb) {
return facadeSelectEntityWithDeletedCheck(cb);
}
/**
* Select the entity by the primary-key value.
* @param memberServiceId : PK, ID, NotNull, INTEGER(2000000000, 10). (NotNull)
* @return The optional entity selected by the PK. (NotNull: if no data, empty entity)
* @throws EntityAlreadyDeletedException When get(), required() of return value is called and the value is null, which means entity has already been deleted (not found).
* @throws EntityDuplicatedException When the entity has been duplicated.
* @throws SelectEntityConditionNotFoundException When the condition for selecting an entity is not found.
*/
public OptionalEntity<RoyMemberService> selectByPK(Integer memberServiceId) {
return facadeSelectByPK(memberServiceId);
}
protected OptionalEntity<RoyMemberService> facadeSelectByPK(Integer memberServiceId) {
return doSelectOptionalByPK(memberServiceId, typeOfSelectedEntity());
}
protected <ENTITY extends RoyMemberService> ENTITY doSelectByPK(Integer memberServiceId, Class<? extends ENTITY> tp) {
return doSelectEntity(xprepareCBAsPK(memberServiceId), tp);
}
protected <ENTITY extends RoyMemberService> OptionalEntity<ENTITY> doSelectOptionalByPK(Integer memberServiceId, Class<? extends ENTITY> tp) {
return createOptionalEntity(doSelectByPK(memberServiceId, tp), memberServiceId);
}
protected RoyMemberServiceCB xprepareCBAsPK(Integer memberServiceId) {
assertObjectNotNull("memberServiceId", memberServiceId);
return newConditionBean().acceptPK(memberServiceId);
}
// ===================================================================================
// List Select
// ===========
/**
* Select the list as result bean.
* <pre>
* ListResultBean<RoyMemberService> <span style="color: #553000">memberServiceList</span> = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectList</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...;
* <span style="color: #553000">cb</span>.query().addOrderBy...;
* });
* <span style="color: #70226C">for</span> (RoyMemberService <span style="color: #553000">memberService</span> : <span style="color: #553000">memberServiceList</span>) {
* ... = <span style="color: #553000">memberService</span>.get...;
* }
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @return The result bean of selected list. (NotNull: if no data, returns empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public ListResultBean<RoyMemberService> selectList(CBCall<RoyMemberServiceCB> cbLambda) {
return facadeSelectList(createCB(cbLambda));
}
/**
* Select the list as result bean.
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().set...;
* cb.query().addOrderBy...;
* ListResultBean<RoyMemberService> <span style="color: #553000">memberServiceList</span> = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectList</span>(cb);
* <span style="color: #70226C">for</span> (RoyMemberService memberService : <span style="color: #553000">memberServiceList</span>) {
* ... = memberService.get...;
* }
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @return The result bean of selected list. (NotNull: if no data, returns empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public ListResultBean<RoyMemberService> selectList(RoyMemberServiceCB cb) {
return facadeSelectList(cb);
}
@Override
protected boolean isEntityDerivedMappable() { return true; }
// ===================================================================================
// Page Select
// ===========
/**
* Select the page as result bean. <br>
* (both count-select and paging-select are executed)
* <pre>
* PagingResultBean<RoyMemberService> <span style="color: #553000">page</span> = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectPage</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* <span style="color: #553000">cb</span>.query().addOrderBy...
* <span style="color: #553000">cb</span>.<span style="color: #CC4747">paging</span>(20, 3); <span style="color: #3F7E5E">// 20 records per a page and current page number is 3</span>
* });
* <span style="color: #70226C">int</span> allRecordCount = <span style="color: #553000">page</span>.getAllRecordCount();
* <span style="color: #70226C">int</span> allPageCount = <span style="color: #553000">page</span>.getAllPageCount();
* <span style="color: #70226C">boolean</span> isExistPrePage = <span style="color: #553000">page</span>.isExistPrePage();
* <span style="color: #70226C">boolean</span> isExistNextPage = <span style="color: #553000">page</span>.isExistNextPage();
* ...
* <span style="color: #70226C">for</span> (RoyMemberService memberService : <span style="color: #553000">page</span>) {
* ... = memberService.get...;
* }
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @return The result bean of selected page. (NotNull: if no data, returns bean as empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public PagingResultBean<RoyMemberService> selectPage(CBCall<RoyMemberServiceCB> cbLambda) {
return facadeSelectPage(createCB(cbLambda));
}
/**
* Select the page as result bean. <br>
* (both count-select and paging-select are executed)
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().setFoo...(value);
* cb.query().addOrderBy_Bar...();
* cb.<span style="color: #CC4747">paging</span>(20, 3); <span style="color: #3F7E5E">// 20 records per a page and current page number is 3</span>
* PagingResultBean<RoyMemberService> <span style="color: #553000">page</span> = <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectPage</span>(cb);
* <span style="color: #70226C">int</span> allRecordCount = <span style="color: #553000">page</span>.getAllRecordCount();
* <span style="color: #70226C">int</span> allPageCount = <span style="color: #553000">page</span>.getAllPageCount();
* <span style="color: #70226C">boolean</span> isExistPrePage = <span style="color: #553000">page</span>.isExistPrePage();
* <span style="color: #70226C">boolean</span> isExistNextPage = <span style="color: #553000">page</span>.isExistNextPage();
* ...
* <span style="color: #70226C">for</span> (RoyMemberService memberService : <span style="color: #553000">page</span>) {
* ... = memberService.get...();
* }
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @return The result bean of selected page. (NotNull: if no data, returns bean as empty list)
* @throws DangerousResultSizeException When the result size is over the specified safety size.
*/
public PagingResultBean<RoyMemberService> selectPage(RoyMemberServiceCB cb) {
return facadeSelectPage(cb);
}
// ===================================================================================
// Cursor Select
// =============
/**
* Select the cursor by the condition-bean.
* <pre>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectCursor</span>(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* }, <span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* ... = <span style="color: #553000">member</span>.getMemberName();
* });
* </pre>
* @param cbLambda The callback for condition-bean of RoyMemberService. (NotNull)
* @param entityLambda The handler of entity row of RoyMemberService. (NotNull)
*/
public void selectCursor(CBCall<RoyMemberServiceCB> cbLambda, EntityRowHandler<RoyMemberService> entityLambda) {
facadeSelectCursor(createCB(cbLambda), entityLambda);
}
/**
* Select the cursor by the condition-bean.
* <pre>
* RoyMemberServiceCB cb = <span style="color: #70226C">new</span> RoyMemberServiceCB();
* cb.query().set...
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectCursor</span>(cb, <span style="color: #553000">member</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* ... = <span style="color: #553000">member</span>.getMemberName();
* });
* </pre>
* @param cb The condition-bean of RoyMemberService. (NotNull)
* @param entityRowHandler The handler of entity row of RoyMemberService. (NotNull)
*/
public void selectCursor(RoyMemberServiceCB cb, EntityRowHandler<RoyMemberService> entityRowHandler) {
facadeSelectCursor(cb, entityRowHandler);
}
// ===================================================================================
// Scalar Select
// =============
/**
* Select the scalar value derived by a function from uniquely-selected records. <br>
* You should call a function method after this method called like as follows:
* <pre>
* <span style="color: #0000C0">memberServiceBhv</span>.<span style="color: #CC4747">selectScalar</span>(Date.class).max(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.specify().<span style="color: #CC4747">column...</span>; <span style="color: #3F7E5E">// required for the function</span>
* <span style="color: #553000">cb</span>.query().set...
* });
* </pre>
* @param <RESULT> The type of result.
* @param resultType The type of result. (NotNull)
* @return The scalar function object to specify function for scalar value. (NotNull)
*/
public <RESULT> HpSLSFunction<RoyMemberServiceCB, RESULT> selectScalar(Class<RESULT> resultType) {
return facadeScalarSelect(resultType);
}
// ===================================================================================
// Sequence
// ========
@Override
protected Number doReadNextVal() {
String msg = "This table is NOT related to sequence: " + asTableDbName();
throw new UnsupportedOperationException(msg);
}
// ===================================================================================
// Load Referrer
// =============
/**
* Load referrer for the list by the referrer loader.
* <pre>
* List<Member> <span style="color: #553000">memberList</span> = <span style="color: #0000C0">memberBhv</span>.selectList(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">cb</span>.query().set...
* });
* memberBhv.<span style="color: #CC4747">load</span>(<span style="color: #553000">memberList</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -> {</span>
* <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span>
* <span style="color: #3F7E5E">//});</span>
*
* <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span>
* <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span>
* <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span>
* });
* <span style="color: #70226C">for</span> (Member member : <span style="color: #553000">memberList</span>) {
* List<Purchase> purchaseList = member.<span style="color: #CC4747">getPurchaseList()</span>;
* <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) {
* ...
* }
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has order by FK before callback.
* @param memberServiceList The entity list of memberService. (NotNull)
* @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull)
*/
public void load(List<RoyMemberService> memberServiceList, ReferrerLoaderHandler<RoyLoaderOfMemberService> loaderLambda) {
xassLRArg(memberServiceList, loaderLambda);
loaderLambda.handle(new RoyLoaderOfMemberService().ready(memberServiceList, _behaviorSelector));
}
/**
* Load referrer for the entity by the referrer loader.
* <pre>
* Member <span style="color: #553000">member</span> = <span style="color: #0000C0">memberBhv</span>.selectEntityWithDeletedCheck(<span style="color: #553000">cb</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> <span style="color: #553000">cb</span>.acceptPK(1));
* <span style="color: #0000C0">memberBhv</span>.<span style="color: #CC4747">load</span>(<span style="color: #553000">member</span>, <span style="color: #553000">memberLoader</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">memberLoader</span>.<span style="color: #CC4747">loadPurchase</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.setupSelect...
* <span style="color: #553000">purchaseCB</span>.query().set...
* <span style="color: #553000">purchaseCB</span>.query().addOrderBy...
* }); <span style="color: #3F7E5E">// you can also load nested referrer from here</span>
* <span style="color: #3F7E5E">//}).withNestedReferrer(purchaseLoader -> {</span>
* <span style="color: #3F7E5E">// purchaseLoader.loadPurchasePayment(...);</span>
* <span style="color: #3F7E5E">//});</span>
*
* <span style="color: #3F7E5E">// you can also pull out foreign table and load its referrer</span>
* <span style="color: #3F7E5E">// (setupSelect of the foreign table should be called)</span>
* <span style="color: #3F7E5E">//memberLoader.pulloutMemberStatus().loadMemberLogin(...)</span>
* });
* List<Purchase> purchaseList = <span style="color: #553000">member</span>.<span style="color: #CC4747">getPurchaseList()</span>;
* <span style="color: #70226C">for</span> (Purchase purchase : purchaseList) {
* ...
* }
* </pre>
* About internal policy, the value of primary key (and others too) is treated as case-insensitive. <br>
* The condition-bean, which the set-upper provides, has order by FK before callback.
* @param memberService The entity of memberService. (NotNull)
* @param loaderLambda The callback to handle the referrer loader for actually loading referrer. (NotNull)
*/
public void load(RoyMemberService memberService, ReferrerLoaderHandler<RoyLoaderOfMemberService> loaderLambda) {
xassLRArg(memberService, loaderLambda);
loaderLambda.handle(new RoyLoaderOfMemberService().ready(xnewLRAryLs(memberService), _behaviorSelector));
}
// ===================================================================================
// Pull out Relation
// =================
/**
* Pull out the list of foreign table 'RoyMember'.
* @param memberServiceList The list of memberService. (NotNull, EmptyAllowed)
* @return The list of foreign table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyMember> pulloutMember(List<RoyMemberService> memberServiceList)
{ return helpPulloutInternally(memberServiceList, "member"); }
/**
* Pull out the list of foreign table 'RoyServiceRank'.
* @param memberServiceList The list of memberService. (NotNull, EmptyAllowed)
* @return The list of foreign table. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<RoyServiceRank> pulloutServiceRank(List<RoyMemberService> memberServiceList)
{ return helpPulloutInternally(memberServiceList, "serviceRank"); }
// ===================================================================================
// Extract Column
// ==============
/**
* Extract the value list of (single) primary key memberServiceId.
* @param memberServiceList The list of memberService. (NotNull, EmptyAllowed)
* @return The list of the column value. (NotNull, EmptyAllowed, NotNullElement)
*/
public List<Integer> extractMemberServiceIdList(List<RoyMemberService> memberServiceList)
{ return helpExtractListInternally(memberServiceList, "memberServiceId"); }
// ===================================================================================
// OutsideSql
// ==========
/**
* Prepare the all facade executor of outside-SQL to execute it.
* <pre>
* <span style="color: #3F7E5E">// main style</span>
* memberServiceBhv.outideSql().selectEntity(pmb); <span style="color: #3F7E5E">// optional</span>
* memberServiceBhv.outideSql().selectList(pmb); <span style="color: #3F7E5E">// ListResultBean</span>
* memberServiceBhv.outideSql().selectPage(pmb); <span style="color: #3F7E5E">// PagingResultBean</span>
* memberServiceBhv.outideSql().selectPagedListOnly(pmb); <span style="color: #3F7E5E">// ListResultBean</span>
* memberServiceBhv.outideSql().selectCursor(pmb, handler); <span style="color: #3F7E5E">// (by handler)</span>
* memberServiceBhv.outideSql().execute(pmb); <span style="color: #3F7E5E">// int (updated count)</span>
* memberServiceBhv.outideSql().call(pmb); <span style="color: #3F7E5E">// void (pmb has OUT parameters)</span>
*
* <span style="color: #3F7E5E">// traditional style</span>
* memberServiceBhv.outideSql().traditionalStyle().selectEntity(path, pmb, entityType);
* memberServiceBhv.outideSql().traditionalStyle().selectList(path, pmb, entityType);
* memberServiceBhv.outideSql().traditionalStyle().selectPage(path, pmb, entityType);
* memberServiceBhv.outideSql().traditionalStyle().selectPagedListOnly(path, pmb, entityType);
* memberServiceBhv.outideSql().traditionalStyle().selectCursor(path, pmb, handler);
* memberServiceBhv.outideSql().traditionalStyle().execute(path, pmb);
*
* <span style="color: #3F7E5E">// options</span>
* memberServiceBhv.outideSql().removeBlockComment().selectList()
* memberServiceBhv.outideSql().removeLineComment().selectList()
* memberServiceBhv.outideSql().formatSql().selectList()
* </pre>
* <p>The invoker of behavior command should be not null when you call this method.</p>
* @return The new-created all facade executor of outside-SQL. (NotNull)
*/
public OutsideSqlAllFacadeExecutor<RoyMemberServiceBhv> outsideSql() {
return doOutsideSql();
}
// ===================================================================================
// Optimistic Lock Info
// ====================
@Override
protected boolean hasVersionNoValue(Entity et) { return downcast(et).getVersionNo() != null; }
// ===================================================================================
// Type Helper
// ===========
protected Class<? extends RoyMemberService> typeOfSelectedEntity() { return RoyMemberService.class; }
protected Class<RoyMemberService> typeOfHandlingEntity() { return RoyMemberService.class; }
protected Class<RoyMemberServiceCB> typeOfHandlingConditionBean() { return RoyMemberServiceCB.class; }
}
| |
package ru.job4j.map;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
/**
* Created by tgenman on 4/24/18.
* @param <K> Key
* @param <V> Value
*/
public class SimpleHashMap<K, V> {
/** Size of table. */
private int sizeOfTable = 100;
/** Container of Buckets. */
private Object[] containerOfBuckets = new Object[sizeOfTable];
/** Amount elements in map. */
private int countOfElements = 0;
/** Coefficient to increasing map. */
private static final double COEFFICIENT_OF_CAPACITY_MAP = 0.75;
/** Rate of increasing map. */
private static final int RATE_OF_INCREASING_MAP = 2;
/**
* Size.
* @return int
*/
public int size() {
return countOfElements;
}
/**
* Insert.
* @param key K
* @param value V
* @return boolean result of inserting.
*/
public boolean insert(K key, V value) {
boolean result = false;
if (containerOfBuckets[getNumberOfBucket(key)] == null) {
containerOfBuckets[getNumberOfBucket(key)] = new Pair(key, value);
result = true;
countOfElements++;
checkNeedOfIncreaseMap();
}
return result;
}
/**
* Get Value by key.
* @param key K
* @return V
*/
public V get(K key) {
if (key == null) return null;
V result = null;
if (containerOfBuckets[getNumberOfBucket(key)] != null) {
result = ((Pair) containerOfBuckets[getNumberOfBucket(key)]).getValue();
}
return result;
}
/**
* delete.
* @param key K
* @return boolean
*/
public boolean delete(K key) {
boolean result = false;
if (key == null) return false;
if (containerOfBuckets[getNumberOfBucket(key)] != null) {
containerOfBuckets[getNumberOfBucket(key)] = null;
result = true;
}
return result;
}
/**
* Contains.
* @param key K
* @return boolean
*/
public boolean contains(K key) {
if (key == null) return false;
Iterator<Pair> iterator = this.iterator();
while (iterator.hasNext()) {
if (key.equals(iterator.next().getKey())) {
return true;
}
}
return false;
}
/**
* Get numbers of bucket.
* @param key K
* @return int
*/
private int getNumberOfBucket(K key) {
return (int) key.hashCode() % sizeOfTable;
}
/**
* Check needs to increase map.
*/
private void checkNeedOfIncreaseMap() {
if (sizeOfTable % countOfElements >= COEFFICIENT_OF_CAPACITY_MAP) {
increaseContainerOfBuckets();
}
}
/**
* Increase containerOfBuckets.
*/
private void increaseContainerOfBuckets() {
sizeOfTable = sizeOfTable * RATE_OF_INCREASING_MAP;
Object[] buffer = new Object[sizeOfTable];
for (int i = 0; i < containerOfBuckets.length; i++) {
buffer[i * 2] = containerOfBuckets[i];
}
containerOfBuckets = buffer;
}
/**
* Iterator.
* @return iterator
*/
public Iterator<Pair> iterator() {
return new MapIterator<Pair>();
}
/**
* MapIterator.
* @param <E>
*/
private class MapIterator<E> implements Iterator<E> {
/** nextIndex. */
private int nextIndex;
/** last returned. */
private int lastReturned = -1;
/**
* Constructor.
*/
MapIterator() {
this.nextIndex = -1;
for (int i = 0; i < sizeOfTable; i++) {
if (containerOfBuckets[i] != null) {
this.nextIndex = i;
}
}
}
@Override
public boolean hasNext() {
return nextIndex != -1;
}
@Override
public E next() {
if (!hasNext()) throw new NoSuchElementException();
this.lastReturned = this.nextIndex;
E result = (E) containerOfBuckets[nextIndex];
this.nextIndex = -1;
for (int i = lastReturned + 1; i < sizeOfTable; i++) {
if (containerOfBuckets[i] != null) {
this.nextIndex = i;
}
}
return result;
}
}
/**
* Pair class.
*/
private class Pair implements Map.Entry<K, V> {
/** key. */
private final K key;
/** Value. */
private V value;
/**
* Pair.
* @param key K
* @param value V
*/
private Pair(K key, V value) {
this.key = key;
this.value = value;
}
/**
* Getter Key.
* @return Key
*/
public K getKey() {
return key;
}
/**
* Getter Value.
* @return Value
*/
public V getValue() {
return value;
}
@Override
public V setValue(V value) {
final V oldValue = this.value;
this.value = value;
return oldValue;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
Map.Entry<K, V> pair = (Map.Entry<K, V>) o;
if (key != null ? !key.equals(pair.getKey()) : pair.getKey() != null) return false;
return !(value != null ? !value.equals(pair.getValue()) : pair.getValue() != null);
}
@Override
public int hashCode() {
return (key == null ? 0 : key.hashCode())
^ (value == null ? 0 : value.hashCode());
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.CoveringQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import static org.elasticsearch.common.network.InetAddresses.forString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class CandidateQueryTests extends ESSingleNodeTestCase {
private Directory directory;
private IndexWriter indexWriter;
private DocumentMapper documentMapper;
private DirectoryReader directoryReader;
private MapperService mapperService;
private PercolatorFieldMapper fieldMapper;
private PercolatorFieldMapper.FieldType fieldType;
private List<Query> queries;
private PercolateQuery.QueryStore queryStore;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(PercolatorPlugin.class);
}
@Before
public void init() throws Exception {
directory = newDirectory();
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
config.setMergePolicy(NoMergePolicy.INSTANCE);
indexWriter = new IndexWriter(directory, config);
String indexName = "test";
IndexService indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("int_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject()
.startObject("half_float_field").field("type", "half_float").endObject()
.startObject("float_field").field("type", "float").endObject()
.startObject("double_field").field("type", "double").endObject()
.startObject("ip_field").field("type", "ip").endObject()
.startObject("field").field("type", "keyword").endObject()
.endObject().endObject().endObject().string();
documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true);
String queryField = "query_field";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject()
.endObject().endObject().string();
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();
queries = new ArrayList<>();
queryStore = ctx -> docId -> this.queries.get(docId);
}
@After
public void deinit() throws Exception {
directoryReader.close();
directory.close();
}
public void testDuel() throws Exception {
List<Function<String, Query>> queryFunctions = new ArrayList<>();
queryFunctions.add((id) -> new PrefixQuery(new Term("field", id)));
queryFunctions.add((id) -> new WildcardQuery(new Term("field", id + "*")));
queryFunctions.add((id) -> new CustomQuery(new Term("field", id)));
queryFunctions.add((id) -> new SpanTermQuery(new Term("field", id)));
queryFunctions.add((id) -> new TermQuery(new Term("field", id)));
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.MUST);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
if (randomBoolean()) {
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
if (randomBoolean()) {
builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT);
}
return builder.build();
});
queryFunctions.add((id) -> {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4));
builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD);
return builder.build();
});
queryFunctions.add((id) -> new MatchAllDocsQuery());
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));
int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
List<ParseContext.Document> documents = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
addQuery(query, documents);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", id, Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
Iterable<? extends IndexableField> doc = Collections.singleton(new StringField("field", "value", Field.Store.NO));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
// Empty percolator doc:
memoryIndex = new MemoryIndex();
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testDuelSpecificQueries() throws Exception {
List<ParseContext.Document> documents = new ArrayList<>();
CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128);
commonTermsQuery.add(new Term("field", "quick"));
commonTermsQuery.add(new Term("field", "brown"));
commonTermsQuery.add(new Term("field", "fox"));
addQuery(commonTermsQuery, documents);
BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(new Term[]{new Term("field", "quick"),
new Term("field", "brown"), new Term("field", "fox")}, 1.0f);
addQuery(blendedTermQuery, documents);
SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true)
.addClause(new SpanTermQuery(new Term("field", "quick")))
.addClause(new SpanTermQuery(new Term("field", "brown")))
.addClause(new SpanTermQuery(new Term("field", "fox")))
.build();
addQuery(spanNearQuery, documents);
SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true)
.addClause(new SpanTermQuery(new Term("field", "the")))
.addClause(new SpanTermQuery(new Term("field", "lazy")))
.addClause(new SpanTermQuery(new Term("field", "doc")))
.build();
SpanOrQuery spanOrQuery = new SpanOrQuery(
spanNearQuery,
spanNearQuery2
);
addQuery(spanOrQuery, documents);
SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery);
addQuery(spanNotQuery, documents);
long lowerLong = randomIntBetween(0, 256);
long upperLong = lowerLong + randomIntBetween(0, 32);
addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
Document document = new Document();
document.add(new TextField("field", "the quick brown fox jumps over the lazy dog", Field.Store.NO));
long randomLong = randomIntBetween((int) lowerLong, (int) upperLong);
document.add(new LongPoint("long_field", randomLong));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testRangeQueries() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
addQuery(IntPoint.newRangeQuery("int_field", 0, 5), docs);
addQuery(LongPoint.newRangeQuery("long_field", 5L, 10L), docs);
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", 10, 15), docs);
addQuery(FloatPoint.newRangeQuery("float_field", 15, 20), docs);
addQuery(DoublePoint.newRangeQuery("double_field", 20, 25), docs);
addQuery(InetAddressPoint.newRangeQuery("ip_field", forString("192.168.0.1"), forString("192.168.0.10")), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
Version v = Version.V_6_1_0;
MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
TopDocs topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new LongPoint("long_field", 7L)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)),
new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1L, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(2, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new FloatPoint("float_field", 17)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(3, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new DoublePoint("double_field", 21)), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(4, topDocs.scoreDocs[0].doc);
memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new InetAddressPoint("ip_field",
forString("192.168.0.4"))), new WhitespaceAnalyzer());
percolateSearcher = memoryIndex.createSearcher();
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
topDocs = shardSearcher.search(query, 1);
assertEquals(1, topDocs.totalHits);
assertEquals(1, topDocs.scoreDocs.length);
assertEquals(5, topDocs.scoreDocs[0].doc);
}
public void testDuelRangeQueries() throws Exception {
List<ParseContext.Document> documents = new ArrayList<>();
int lowerInt = randomIntBetween(0, 256);
int upperInt = lowerInt + randomIntBetween(0, 32);
addQuery(IntPoint.newRangeQuery("int_field", lowerInt, upperInt), documents);
long lowerLong = randomIntBetween(0, 256);
long upperLong = lowerLong + randomIntBetween(0, 32);
addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents);
float lowerHalfFloat = randomIntBetween(0, 256);
float upperHalfFloat = lowerHalfFloat + randomIntBetween(0, 32);
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", lowerHalfFloat, upperHalfFloat), documents);
float lowerFloat = randomIntBetween(0, 256);
float upperFloat = lowerFloat + randomIntBetween(0, 32);
addQuery(FloatPoint.newRangeQuery("float_field", lowerFloat, upperFloat), documents);
double lowerDouble = randomDoubleBetween(0, 256, true);
double upperDouble = lowerDouble + randomDoubleBetween(0, 32, true);
addQuery(DoublePoint.newRangeQuery("double_field", lowerDouble, upperDouble), documents);
int lowerIpPart = randomIntBetween(0, 255);
int upperIpPart = randomIntBetween(lowerIpPart, 255);
addQuery(InetAddressPoint.newRangeQuery("ip_field", forString("192.168.1." + lowerIpPart),
forString("192.168.1." + upperIpPart)), documents);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
int randomInt = randomIntBetween(lowerInt, upperInt);
Iterable<? extends IndexableField> doc = Collections.singleton(new IntPoint("int_field", randomInt));
MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
TopDocs result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(0));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new IntPoint("int_field", randomInt()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
long randomLong = randomIntBetween((int) lowerLong, (int) upperLong);
doc = Collections.singleton(new LongPoint("long_field", randomLong));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(1));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new LongPoint("long_field", randomLong()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
float randomHalfFloat = randomIntBetween((int) lowerHalfFloat, (int) upperHalfFloat);
doc = Collections.singleton(new HalfFloatPoint("half_float_field", randomHalfFloat));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(2));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new HalfFloatPoint("half_float_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
float randomFloat = randomIntBetween((int) lowerFloat, (int) upperFloat);
doc = Collections.singleton(new FloatPoint("float_field", randomFloat));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(3));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new FloatPoint("float_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
double randomDouble = randomDoubleBetween(lowerDouble, upperDouble, true);
doc = Collections.singleton(new DoublePoint("double_field", randomDouble));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(4));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new DoublePoint("double_field", randomFloat()));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new InetAddressPoint("ip_field",
forString("192.168.1." + randomIntBetween(lowerIpPart, upperIpPart))));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
result = executeQuery(queryStore, memoryIndex, shardSearcher);
assertThat(result.scoreDocs.length, equalTo(1));
assertThat(result.scoreDocs[0].doc, equalTo(5));
duelRun(queryStore, memoryIndex, shardSearcher);
doc = Collections.singleton(new InetAddressPoint("ip_field",
forString("192.168.1." + randomIntBetween(0, 255))));
memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
public void testPercolateSmallAndLargeDocument() throws Exception {
List<ParseContext.Document> docs = new ArrayList<>();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST);
builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST);
builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST);
addQuery(builder.build(), docs);
builder = new BooleanQuery.Builder();
builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST);
builder.add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST);
addQuery(builder.build(), docs);
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
shardSearcher.setQueryCache(null);
Version v = Version.CURRENT;
try (RAMDirectory directory = new RAMDirectory()) {
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
Document document = new Document();
document.add(new StringField("field", "value1", Field.Store.NO));
document.add(new StringField("field", "value2", Field.Store.NO));
iw.addDocument(document);
document = new Document();
document.add(new StringField("field", "value5", Field.Store.NO));
document.add(new StringField("field", "value6", Field.Store.NO));
iw.addDocument(document);
document = new Document();
document.add(new StringField("field", "value3", Field.Store.NO));
document.add(new StringField("field", "value4", Field.Store.NO));
iw.addDocument(document);
}
try (IndexReader ir = DirectoryReader.open(directory)){
IndexSearcher percolateSearcher = new IndexSearcher(ir);
PercolateQuery query = (PercolateQuery)
fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class));
TopDocs topDocs = shardSearcher.search(query, 10);
assertEquals(2L, topDocs.totalHits);
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
assertEquals(2L, topDocs.totalHits);
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(0, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
}
}
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
try (RAMDirectory directory = new RAMDirectory()) {
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
Document document = new Document();
for (int i = 0; i < 1024; i++) {
int fieldNumber = 2 + i;
document.add(new StringField("field", "value" + fieldNumber, Field.Store.NO));
}
iw.addDocument(document);
}
try (IndexReader ir = DirectoryReader.open(directory)){
IndexSearcher percolateSearcher = new IndexSearcher(ir);
PercolateQuery query = (PercolateQuery)
fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v);
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
TopDocs topDocs = shardSearcher.search(query, 10);
assertEquals(2L, topDocs.totalHits);
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
assertEquals(2L, topDocs.totalHits);
assertEquals(2, topDocs.scoreDocs.length);
assertEquals(1, topDocs.scoreDocs[0].doc);
assertEquals(2, topDocs.scoreDocs[1].doc);
}
}
}
private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
boolean requireScore = randomBoolean();
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery("_name", queryStore,
Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT);
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
TopDocs topDocs = shardSearcher.search(query, 10);
Query controlQuery = new ControlQuery(memoryIndex, queryStore);
controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery);
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10);
assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits));
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score));
if (requireScore) {
Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc);
Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc);
assertThat(explain1.isMatch(), equalTo(explain2.isMatch()));
assertThat(explain1.getValue(), equalTo(explain2.getValue()));
}
}
}
private void addQuery(Query query, List<ParseContext.Document> docs) throws IOException {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), documentMapper, null, null);
fieldMapper.processQuery(query, parseContext);
docs.add(parseContext.doc());
queries.add(query);
}
private TopDocs executeQuery(PercolateQuery.QueryStore queryStore,
MemoryIndex memoryIndex,
IndexSearcher shardSearcher) throws IOException {
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery("_name", queryStore,
Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT);
return shardSearcher.search(percolateQuery, 10);
}
private static final class CustomQuery extends Query {
private final Term term;
private CustomQuery(Term term) {
this.term = term;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
return new TermQuery(term);
}
@Override
public String toString(String field) {
return "custom{" + field + "}";
}
@Override
public boolean equals(Object obj) {
return sameClassAs(obj);
}
@Override
public int hashCode() {
return classHash();
}
}
private static final class ControlQuery extends Query {
private final MemoryIndex memoryIndex;
private final PercolateQuery.QueryStore queryStore;
private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore queryStore) {
this.memoryIndex = memoryIndex;
this.queryStore = queryStore;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) {
return new Weight(this) {
float _score;
@Override
public void extractTerms(Set<Term> terms) {}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context);
if (scorer != null) {
int result = scorer.iterator().advance(doc);
if (result == doc) {
return Explanation.match(scorer.score(), "ControlQuery");
}
}
return Explanation.noMatch("ControlQuery");
}
@Override
public String toString() {
return "weight(" + ControlQuery.this + ")";
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc());
CheckedFunction<Integer, Query, IOException> leaf = queryStore.getQueries(context);
FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) {
@Override
protected boolean match(int doc) {
try {
Query query = leaf.apply(doc);
float score = memoryIndex.search(query);
if (score != 0f) {
if (needsScores) {
_score = score;
}
return true;
} else {
return false;
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
return new FilterScorer(new ConstantScoreScorer(this, 1f, memoryIndexIterator)) {
@Override
public float score() throws IOException {
return _score;
}
};
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false; // doesn't matter
}
};
}
@Override
public String toString(String field) {
return "control{" + field + "}";
}
@Override
public boolean equals(Object obj) {
return sameClassAs(obj);
}
@Override
public int hashCode() {
return classHash();
}
}
}
| |
package run;
import common.enums.Browsers;
import common.enums.markers.SeleniumTest;
import common.enums.markers.SeleniumTestGroup;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.opera.OperaDriver;
import org.openqa.selenium.safari.SafariDriver;
import org.reflections.Reflections;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tests.base.AbstractTest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static run.RunConstants.*;
/**
* Created by Ugene Reshetnyak on 12.11.2015.
*/
public class Main {
private static final Logger logger = LoggerFactory.getLogger(Main.class);
private static final String TEST_PACKAGE = "tests";
private static CommandLine cmd;
public static void main(String[] args) {
System.out.println(Arrays.asList(args));
initOptions(args);
if (cmd == null) {
System.out.println("Incorrect arguments!!!");
System.out.println();
printHelp();
return;
}
if (cmd.hasOption(HELP)) {
printHelp();
return;
} else if(cmd.hasOption(HELP_GRADLE)){
printGradleHelp();
return;
}else if (cmd.hasOption(TEST_LIST)) {
printTestList();
return;
}
WebDriver driver = null;
//Check browser as first argument
if (cmd.hasOption(BROWSER)) {
Browsers browser = Arrays.asList(Browsers.values()).stream().filter(item -> item.name().equals(cmd.getOptionValue(BROWSER))).findFirst().orElse(null);
if (browser != null) {
switch (browser) {
case CHROME:
driver = new ChromeDriver();
break;
case OPERA:
driver = new OperaDriver();
break;
case SAFARI:
driver = new SafariDriver();
break;
case IE:
driver = new InternetExplorerDriver();
break;
case FIREFOX:
default:
driver = new FirefoxDriver();
break;
}
}
}
List<String> selectedTestGroups = new ArrayList<>();
List<String> selectedTests = new ArrayList<>();
//Check tests as second argument
if (!cmd.hasOption(ALL)) {
//if tests set as groups
if (cmd.hasOption(GROUPS)) {
if (0 == cmd.getOptionValues(GROUPS).length) {
System.out.println("At least one test group expected!!!");
return;
}
selectedTestGroups = Arrays.asList(cmd.getOptionValues(GROUPS));
}
//if tests set as testName
if (cmd.hasOption(TESTS)) {
//if tests are not specified
if (0 == cmd.getOptionValues(TESTS).length) {
System.out.println("At least one test expected!!!");
return;
}
selectedTests = Arrays.asList(cmd.getOptionValues(TESTS));
}
}
//stop on error exist
boolean stopOnError = cmd.hasOption(STOP_ON_ERROR);
final List<Class<? extends AbstractTest>> tests = getAllTests();
int failedTestCount = 0;
int runTestCount = 0;
try {
for (Class<? extends AbstractTest> test : tests) {
boolean testAllowed = true;
if (!selectedTestGroups.isEmpty()) {
testAllowed = selectedTestGroups.stream().filter(group ->
group.equals(test.getAnnotation(SeleniumTestGroup.class).name())
).findFirst().isPresent();
}
if (!selectedTests.isEmpty()) {
boolean testAllowedByName = selectedTests.stream().filter(testItem ->
testItem.equals(test.getAnnotation(SeleniumTest.class).name())
).findFirst().isPresent();
testAllowed = selectedTestGroups.isEmpty() ? testAllowedByName : testAllowed || testAllowedByName;
}
if (testAllowed) {
runTestCount++;
if (test.newInstance().run(driver)) {
failedTestCount++;
if (stopOnError) {
System.out.println();
System.out.println("Failed on test " + test.getSimpleName());
break;
}
}
}
}
} catch (InstantiationException | IllegalAccessException e) {
logger.error(e.getMessage(), e);
} finally {
System.out.println();
System.out.println(" ====== ====== ====== Result ====== ====== ======");
System.out.println("Executed " + runTestCount + " test(s)");
System.out.println("Successfully: " + (runTestCount - failedTestCount));
System.out.println("Failed: " + failedTestCount);
System.out.println(" ====== ====== ====== End ====== ====== ======");
}
}
private static void initOptions(String[] args) {
Options options = new Options();
options.addOption(Option.builder(HELP).longOpt(HELP_LONG).hasArg(false).build());
options.addOption(Option.builder(HELP_GRADLE).longOpt(HELP_GRADLE_LONG).hasArg(false).build());
options.addOption(Option.builder(TEST_LIST).longOpt(TEST_LIST_LONG).hasArg(false).build());
options.addOption(Option.builder(BROWSER).longOpt(BROWSER_LONG).hasArg(true).build());
options.addOption(Option.builder(ALL).hasArg(false).build());
options.addOption(Option.builder(GROUPS).longOpt(GROUPS_LONG).hasArgs().build());
options.addOption(Option.builder(TESTS).longOpt(TESTS_LONG).hasArgs().build());
options.addOption(Option.builder(STOP_ON_ERROR).longOpt(STOP_ON_ERROR_LONG).hasArg(false).build());
try {
CommandLineParser parser = new DefaultParser();
cmd = parser.parse(options, args);
} catch (ParseException e) {
logger.error(e.getMessage());
}
}
private static List<Class<? extends AbstractTest>> getAllTests() {
Reflections reflections = new Reflections(TEST_PACKAGE);
return reflections.getSubTypesOf(AbstractTest.class)
.stream().filter(item -> item.isAnnotationPresent(SeleniumTestGroup.class) && item.isAnnotationPresent(SeleniumTest.class))
.collect(Collectors.toList());
}
private static void printTestList() {
final List<Class<? extends AbstractTest>> tests = getAllTests();
Map<String, List<String>> testsByGroups = new HashMap<>();
tests.forEach(item -> {
if (!testsByGroups.containsKey(item.getAnnotation(SeleniumTestGroup.class).name())) {
testsByGroups.put(item.getAnnotation(SeleniumTestGroup.class).name(), new ArrayList<>());
}
testsByGroups.get(item.getAnnotation(SeleniumTestGroup.class).name()).add(item.getAnnotation(SeleniumTest.class).name());
});
System.out.println();
System.out.println("====== ====== ====== Tests ====== ====== ======");
System.out.println("Total : " + tests.size());
System.out.println();
testsByGroups.forEach((key, value) -> {
System.out.println(key + " (" + value.size() + "):");
value.forEach(item ->
System.out.println("\t" + item)
);
});
System.out.println();
}
private static void printGradleHelp() {
System.out.println();
System.out.println("====== ====== ====== Help tutorial ====== ===== ======");
System.out.println();
System.out.println("Usage: gradle runTests [-options]");
System.out.println();
System.out.println("where options include:");
System.out.println(" -P_browser, -P_b =<value>\tset browser to test - 'FIREFOX' is default. ('FIREFOX','CHROME','OPERA','SAFARI','IE')");
System.out.println(" -P_all\t\t\tselect all existing tests to execute.");
System.out.println(" -P_groups, -P_g=<\"[args...]\">\tspecifies list of groups of tests to execute.");
System.out.println(" -P_tests, -P_t=<\"[args...]\">\tspecifies list of tests to execute");
System.out.println(" -P_test_list, -P_tl\t\t\tprint list of existing tests");
System.out.println(" -P_stop_on_error, -P_s\t\tif specified, tests will be stopper after first error");
System.out.println(" -P_help_g, -P_hg\t\t\tprint this help message");
System.out.println(" -P_help, -P_h\t\t\tprint help message for jar");
System.out.println();
System.out.println("====== ====== ====== ====== ====== ====== ===== ======");
System.out.println();
}
private static void printHelp() {
System.out.println();
System.out.println("====== ====== ====== Help tutorial ====== ===== ======");
System.out.println();
System.out.println("Usage: *.jar [-options]");
System.out.println();
System.out.println("where options include:");
System.out.println(" -browser, -b <value>\tset browser to test - 'FIREFOX' is default. ('FIREFOX','CHROME','OPERA','SAFARI','IE')");
System.out.println(" -all\t\t\tselect all existing tests to execute.");
System.out.println(" -groups, -g <args...>\tspecifies list of groups of tests to execute.");
System.out.println(" -tests, -t <args...>\tspecifies list of tests to execute");
System.out.println(" -test_list, -tl\t\tprint list of existing tests");
System.out.println(" -stop_on_error, -s\t\tif specified, tests will be stopper after first error");
System.out.println(" -help_g, -hg\t\tprint help message for gradle");
System.out.println(" -help, -h\t\t\tprint this help message");
System.out.println();
System.out.println("====== ====== ====== ====== ====== ====== ===== ======");
System.out.println();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.state.CompletedCheckpointStorageLocation;
import org.apache.flink.runtime.state.SharedStateRegistry;
import org.apache.flink.runtime.state.StateUtil;
import org.apache.flink.runtime.state.StreamStateHandle;
import org.apache.flink.util.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* A CompletedCheckpoint describes a checkpoint after all required tasks acknowledged it (with their
* state) and that is considered successful. The CompletedCheckpoint class contains all the metadata
* of the checkpoint, i.e., checkpoint ID, timestamps, and the handles to all states that are part
* of the checkpoint.
*
* <h2>Size the CompletedCheckpoint Instances</h2>
*
* <p>In most cases, the CompletedCheckpoint objects are very small, because the handles to the
* checkpoint states are only pointers (such as file paths). However, the some state backend
* implementations may choose to store some payload data directly with the metadata (for example to
* avoid many small files). If those thresholds are increased to large values, the memory
* consumption of the CompletedCheckpoint objects can be significant.
*
* <h2>Metadata Persistence</h2>
*
* <p>The metadata of the CompletedCheckpoint is also persisted in an external storage system.
* Checkpoints have an external pointer, which points to the metadata. For example when storing a
* checkpoint in a file system, that pointer is the file path to the checkpoint's folder or the
* metadata file. For a state backend that stores metadata in database tables, the pointer could be
* the table name and row key. The pointer is encoded as a String.
*/
@NotThreadSafe
public class CompletedCheckpoint implements Serializable, Checkpoint {
private static final Logger LOG = LoggerFactory.getLogger(CompletedCheckpoint.class);
private static final long serialVersionUID = -8360248179615702014L;
// ------------------------------------------------------------------------
/** The ID of the job that the checkpoint belongs to. */
private final JobID job;
/** The ID (logical timestamp) of the checkpoint. */
private final long checkpointID;
/** The timestamp when the checkpoint was triggered. */
private final long timestamp;
/** The timestamp when the checkpoint was completed. */
private final long completionTimestamp;
/** States of the different operator groups belonging to this checkpoint. */
private final Map<OperatorID, OperatorState> operatorStates;
/** Properties for this checkpoint. */
private final CheckpointProperties props;
/** States that were created by a hook on the master (in the checkpoint coordinator). */
private final Collection<MasterState> masterHookStates;
/** The location where the checkpoint is stored. */
private final CompletedCheckpointStorageLocation storageLocation;
/** The state handle to the externalized meta data. */
private final StreamStateHandle metadataHandle;
/** External pointer to the completed checkpoint (for example file path). */
private final String externalPointer;
/** Completed statistic for managing discard marker. */
@Nullable private final transient CompletedCheckpointStats completedCheckpointStats;
// ------------------------------------------------------------------------
public CompletedCheckpoint(
JobID job,
long checkpointID,
long timestamp,
long completionTimestamp,
Map<OperatorID, OperatorState> operatorStates,
@Nullable Collection<MasterState> masterHookStates,
CheckpointProperties props,
CompletedCheckpointStorageLocation storageLocation,
@Nullable CompletedCheckpointStats completedCheckpointStats) {
checkArgument(checkpointID >= 0);
checkArgument(timestamp >= 0);
checkArgument(completionTimestamp >= 0);
this.job = checkNotNull(job);
this.checkpointID = checkpointID;
this.timestamp = timestamp;
this.completionTimestamp = completionTimestamp;
// we create copies here, to make sure we have no shared mutable
// data structure with the "outside world"
this.operatorStates = new HashMap<>(checkNotNull(operatorStates));
this.masterHookStates =
masterHookStates == null || masterHookStates.isEmpty()
? Collections.emptyList()
: new ArrayList<>(masterHookStates);
this.props = checkNotNull(props);
this.storageLocation = checkNotNull(storageLocation);
this.metadataHandle = storageLocation.getMetadataHandle();
this.externalPointer = storageLocation.getExternalPointer();
this.completedCheckpointStats = completedCheckpointStats;
}
// ------------------------------------------------------------------------
// Properties
// ------------------------------------------------------------------------
public JobID getJobId() {
return job;
}
@Override
public long getCheckpointID() {
return checkpointID;
}
public long getTimestamp() {
return timestamp;
}
public long getCompletionTimestamp() {
return completionTimestamp;
}
public CheckpointProperties getProperties() {
return props;
}
public Map<OperatorID, OperatorState> getOperatorStates() {
return operatorStates;
}
public Collection<MasterState> getMasterHookStates() {
return Collections.unmodifiableCollection(masterHookStates);
}
public StreamStateHandle getMetadataHandle() {
return metadataHandle;
}
public String getExternalPointer() {
return externalPointer;
}
public long getStateSize() {
long result = 0L;
for (OperatorState operatorState : operatorStates.values()) {
result += operatorState.getStateSize();
}
return result;
}
// ------------------------------------------------------------------------
// Shared State
// ------------------------------------------------------------------------
/**
* Register all shared states in the given registry. This is method is called before the
* checkpoint is added into the store.
*
* @param sharedStateRegistry The registry where shared states are registered
*/
public void registerSharedStatesAfterRestored(SharedStateRegistry sharedStateRegistry) {
// in claim mode we should not register any shared handles
if (!props.isUnclaimed()) {
sharedStateRegistry.registerAll(operatorStates.values(), checkpointID);
}
}
// ------------------------------------------------------------------------
// Discard and Dispose
// ------------------------------------------------------------------------
public DiscardObject markAsDiscarded() {
if (completedCheckpointStats != null) {
completedCheckpointStats.discard();
}
return new CompletedCheckpointDiscardObject();
}
public DiscardObject markAsDiscardedOnSubsume() {
return shouldBeDiscardedOnSubsume() ? markAsDiscarded() : NOOP_DISCARD_OBJECT;
}
public DiscardObject markAsDiscardedOnShutdown(JobStatus jobStatus) {
return shouldBeDiscardedOnShutdown(jobStatus) ? markAsDiscarded() : NOOP_DISCARD_OBJECT;
}
public boolean shouldBeDiscardedOnSubsume() {
return props.discardOnSubsumed();
}
public boolean shouldBeDiscardedOnShutdown(JobStatus jobStatus) {
return jobStatus == JobStatus.FINISHED && props.discardOnJobFinished()
|| jobStatus == JobStatus.CANCELED && props.discardOnJobCancelled()
|| jobStatus == JobStatus.FAILED && props.discardOnJobFailed()
|| jobStatus == JobStatus.SUSPENDED && props.discardOnJobSuspended();
}
// ------------------------------------------------------------------------
// Miscellaneous
// ------------------------------------------------------------------------
public static boolean checkpointsMatch(
Collection<CompletedCheckpoint> first, Collection<CompletedCheckpoint> second) {
if (first.size() != second.size()) {
return false;
}
List<Tuple2<Long, JobID>> firstInterestingFields = new ArrayList<>(first.size());
for (CompletedCheckpoint checkpoint : first) {
firstInterestingFields.add(
new Tuple2<>(checkpoint.getCheckpointID(), checkpoint.getJobId()));
}
List<Tuple2<Long, JobID>> secondInterestingFields = new ArrayList<>(second.size());
for (CompletedCheckpoint checkpoint : second) {
secondInterestingFields.add(
new Tuple2<>(checkpoint.getCheckpointID(), checkpoint.getJobId()));
}
return firstInterestingFields.equals(secondInterestingFields);
}
@Nullable
public CompletedCheckpointStats getStatistic() {
return completedCheckpointStats;
}
@Override
public String toString() {
return String.format(
"%s %d @ %d for %s located at %s",
props.getCheckpointType().getName(), checkpointID, timestamp, job, externalPointer);
}
/** Implementation of {@link org.apache.flink.runtime.checkpoint.Checkpoint.DiscardObject}. */
@NotThreadSafe
public class CompletedCheckpointDiscardObject implements DiscardObject {
@Override
public void discard() throws Exception {
LOG.trace("Executing discard procedure for {}.", this);
checkState(
isMarkedAsDiscarded(),
"Checkpoint should be marked as discarded before discard.");
try {
// collect exceptions and continue cleanup
Exception exception = null;
// drop the metadata
try {
metadataHandle.discardState();
} catch (Exception e) {
exception = e;
}
// discard private state objects
try {
StateUtil.bestEffortDiscardAllStateObjects(operatorStates.values());
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
// discard location as a whole
try {
storageLocation.disposeStorageLocation();
} catch (Exception e) {
exception = ExceptionUtils.firstOrSuppressed(e, exception);
}
if (exception != null) {
throw exception;
}
} finally {
operatorStates.clear();
}
}
private boolean isMarkedAsDiscarded() {
return completedCheckpointStats == null || completedCheckpointStats.isDiscarded();
}
}
}
| |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.util;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Objects;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.Maps;
import static org.junit.Assert.*;
/**
* Unit tests for ExtendedSet.
*/
public class ExtendedSetTest {
@Test
public void testGet() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue e1 = new TestValue("foo", 1);
set.add(e1);
TestValue lookupValue = new TestValue("foo", 2);
TestValue setEntry = set.get(lookupValue);
assertEquals(e1, setEntry);
}
@Test
public void testInsertOrReplace() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue small = new TestValue("foo", 1);
TestValue medium = new TestValue("foo", 2);
TestValue large = new TestValue("foo", 3);
// input TestValue will replace existing TestValue if its value2() is greater
// than existing entry's value2()
assertTrue(set.insertOrReplace(small, existing -> existing.value2() < small.value2()));
assertTrue(set.insertOrReplace(large, existing -> existing.value2() < large.value2()));
assertFalse(set.insertOrReplace(medium, existing -> existing.value2() < medium.value2()));
assertTrue(set.contains(small));
assertTrue(set.contains(medium));
assertTrue(set.contains(large));
}
@Test
public void testConditionalRemove() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue small = new TestValue("foo", 1);
TestValue medium = new TestValue("foo", 2);
assertTrue(set.add(small));
set.conditionalRemove(medium, existing -> existing.value2() < medium.value2);
assertFalse(set.contains(small));
assertTrue(set.add(small));
set.conditionalRemove(medium, existing -> existing.value2() > medium.value2);
assertTrue(set.contains(small));
}
@Test
public void testIsEmpty() {
ExtendedSet<TestValue> nonemptyset = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(nonemptyset.add(val));
assertTrue(nonemptyset.contains(val));
assertFalse(nonemptyset.isEmpty());
ExtendedSet<TestValue> emptyset = new ExtendedSet<>(Maps.newConcurrentMap());
assertTrue(emptyset.isEmpty());
}
@Test
public void testClear() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
assertTrue(set.contains(val));
set.clear();
assertFalse(set.contains(val));
}
@Test
public void testSize() {
ExtendedSet<TestValue> nonemptyset = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(nonemptyset.add(val));
assertTrue(nonemptyset.contains(val));
assertEquals(1, nonemptyset.size());
TestValue secval = new TestValue("goo", 2);
assertTrue(nonemptyset.add(secval));
assertTrue(nonemptyset.contains(secval));
assertEquals(2, nonemptyset.size());
ExtendedSet<TestValue> emptyset = new ExtendedSet<>(Maps.newConcurrentMap());
assertEquals(0, emptyset.size());
}
@Test
public void testIterator() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
assertTrue(set.contains(nextval));
Iterator<TestValue> iterator = set.iterator();
assertEquals(val, iterator.next());
assertTrue(iterator.hasNext());
assertEquals(nextval, iterator.next());
}
@Test
public void testToArray() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
Object[] array = set.toArray();
TestValue[] valarray = {val, nextval};
assertArrayEquals(valarray, array);
assertTrue(set.toArray(new TestValue[0])[0] instanceof TestValue);
}
@Test
public void testContainsAll() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
ArrayList<TestValue> vals = new ArrayList<TestValue>();
vals.add(val);
vals.add(nextval);
assertTrue(set.containsAll(vals));
}
@Test
public void testRemove() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
assertTrue(set.remove(val));
assertFalse(set.contains(val));
assertTrue(set.remove(nextval));
assertFalse(set.contains(nextval));
}
@Test
public void testAddAll() {
ExtendedSet<TestValue> nonemptyset = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(nonemptyset.add(val));
TestValue nextval = new TestValue("goo", 2);
TestValue finalval = new TestValue("shoo", 3);
TestValue extremeval = new TestValue("who", 4);
assertTrue(nonemptyset.add(extremeval));
ArrayList<TestValue> vals = new ArrayList<TestValue>();
vals.add(nextval);
vals.add(finalval);
vals.add(extremeval);
assertTrue(nonemptyset.addAll(vals));
assertTrue(nonemptyset.contains(nextval));
assertTrue(nonemptyset.contains(finalval));
ExtendedSet<TestValue> emptyset = new ExtendedSet<>(Maps.newConcurrentMap());
vals = new ArrayList<TestValue>();
vals.add(val);
vals.add(nextval);
vals.add(finalval);
assertTrue(emptyset.addAll(vals));
assertTrue(emptyset.contains(val));
assertTrue(emptyset.contains(nextval));
assertTrue(emptyset.contains(finalval));
}
@Test
public void testRemoveAll() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
TestValue nextval = new TestValue("goo", 2);
assertTrue(set.add(nextval));
TestValue finalval = new TestValue("shoo", 3);
assertTrue(set.add(finalval));
ArrayList<TestValue> vals = new ArrayList<TestValue>();
vals.add(nextval);
vals.add(finalval);
vals.add(new TestValue("who", 4));
assertTrue(set.removeAll(vals));
assertFalse(set.contains(nextval));
assertFalse(set.contains(finalval));
}
@Test
@Ignore("retainAll appears to violate the documented semantics because it does" +
" not properly remove the items that are not in the Collection parameter.")
public void testRetainAll() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue small = new TestValue("foo", 1);
assertTrue(set.add(small));
TestValue medium = new TestValue("goo", 2);
assertTrue(set.add(medium));
TestValue large = new TestValue("shoo", 3);
assertTrue(set.add(large));
TestValue extreme = new TestValue("who", 4);
assertTrue(set.add(extreme));
ArrayList<TestValue> firstvals = new ArrayList<TestValue>();
firstvals.add(medium);
firstvals.add(extreme);
set.retainAll(firstvals);
assertTrue(set.contains(medium));
assertTrue(set.contains(extreme));
assertFalse(set.contains(small));
assertFalse(set.contains(large));
ArrayList<TestValue> secondval = new ArrayList<TestValue>();
secondval.add(medium);
set.retainAll(secondval);
assertFalse(set.contains(extreme));
assertTrue(set.contains(medium));
}
@Test
public void testAddFailure() {
ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertTrue(set.add(val));
assertFalse(set.add(val));
}
@Test
public void testRemoveFailure() {
ExtendedSet<TestValue> set = new ExtendedSet<TestValue>(Maps.newConcurrentMap());
TestValue val = new TestValue("foo", 1);
assertFalse(set.remove(val));
}
private class TestValue {
private String value1;
private int value2;
public TestValue(String v1, int v2) {
this.value1 = v1;
this.value2 = v2;
}
public String value1() {
return value1;
}
public int value2() {
return value2;
}
@Override
public boolean equals(Object other) {
if (other instanceof TestValue) {
TestValue that = (TestValue) other;
return Objects.equals(value1, that.value1);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(value1);
}
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ant;
import com.android.sdklib.build.ApkBuilder;
import com.android.sdklib.build.ApkBuilder.FileEntry;
import com.android.sdklib.build.ApkCreationException;
import com.android.sdklib.build.DuplicateFileException;
import com.android.sdklib.build.SealedApkException;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.types.Path;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public class ApkBuilderTask extends SingleDependencyTask {
private final static Pattern PATTERN_JAR_EXT = Pattern.compile("^.+\\.jar$",
Pattern.CASE_INSENSITIVE);
private String mOutFolder;
private String mApkFilepath;
private String mResourceFile;
private boolean mVerbose = false;
private boolean mDebugPackaging = false;
private boolean mDebugSigning = false;
private boolean mHasCode = true;
private Path mDexPath;
private final ArrayList<Path> mZipList = new ArrayList<Path>();
private final ArrayList<Path> mSourceList = new ArrayList<Path>();
private final ArrayList<Path> mJarfolderList = new ArrayList<Path>();
private final ArrayList<Path> mJarfileList = new ArrayList<Path>();
private final ArrayList<Path> mNativeList = new ArrayList<Path>();
private static class SourceFolderInputPath extends InputPath {
public SourceFolderInputPath(File file) {
super(file);
}
@Override
public boolean ignores(File file) {
if (file.isDirectory()) {
return !ApkBuilder.checkFolderForPackaging(file.getName());
} else {
return !ApkBuilder.checkFileForPackaging(file.getName());
}
}
}
/**
* Sets the value of the "outfolder" attribute.
* @param outFolder the value.
*/
public void setOutfolder(Path outFolder) {
mOutFolder = TaskHelper.checkSinglePath("outfolder", outFolder);
}
/**
* Sets the full filepath to the apk to generate.
* @param filepath
*/
public void setApkfilepath(String filepath) {
mApkFilepath = filepath;
}
/**
* Sets the resourcefile attribute
* @param resourceFile
*/
public void setResourcefile(String resourceFile) {
mResourceFile = resourceFile;
}
/**
* Sets the value of the "verbose" attribute.
* @param verbose the value.
*/
public void setVerbose(boolean verbose) {
mVerbose = verbose;
}
/**
* Sets the value of the "debug" attribute.
* @param debug the debug mode value.
*/
public void setDebug(boolean debug) {
System.out.println("WARNNG: Using deprecated 'debug' attribute in ApkBuilderTask." +
"Use 'debugpackaging' and 'debugsigning' instead.");
mDebugPackaging = debug;
mDebugSigning = debug;
}
/**
* Sets the value of the "debugpackaging" attribute.
* @param debug the debug mode value.
*/
public void setDebugpackaging(boolean debug) {
mDebugPackaging = debug;
}
/**
* Sets the value of the "debugsigning" attribute.
* @param debug the debug mode value.
*/
public void setDebugsigning(boolean debug) {
mDebugSigning = debug;
}
/**
* Sets the hascode attribute. Default is true.
* If set to false, then <dex> and <sourcefolder> nodes are ignored and not processed.
* @param hasCode the value of the attribute.
*/
public void setHascode(boolean hasCode) {
mHasCode = hasCode;
}
/**
* Returns an object representing a nested <var>zip</var> element.
*/
public Object createZip() {
Path path = new Path(getProject());
mZipList.add(path);
return path;
}
/**
* Returns an object representing a nested <var>dex</var> element.
* This is similar to a nested <var>file</var> element, except when {@link #mHasCode}
* is <code>false</code> in which case it's ignored.
*/
public Object createDex() {
if (mDexPath == null) {
return mDexPath = new Path(getProject());
} else {
throw new BuildException("Only one <dex> inner element can be provided");
}
}
/**
* Returns an object representing a nested <var>sourcefolder</var> element.
*/
public Object createSourcefolder() {
Path path = new Path(getProject());
mSourceList.add(path);
return path;
}
/**
* Returns an object representing a nested <var>jarfolder</var> element.
*/
public Object createJarfolder() {
Path path = new Path(getProject());
mJarfolderList.add(path);
return path;
}
/**
* Returns an object representing a nested <var>jarfile</var> element.
*/
public Object createJarfile() {
Path path = new Path(getProject());
mJarfileList.add(path);
return path;
}
/**
* Returns an object representing a nested <var>nativefolder</var> element.
*/
public Object createNativefolder() {
Path path = new Path(getProject());
mNativeList.add(path);
return path;
}
@Override
public void execute() throws BuildException {
File outputFile;
if (mApkFilepath != null) {
outputFile = new File(mApkFilepath);
} else {
throw new BuildException("missing attribute 'apkFilepath'");
}
if (mResourceFile == null) {
throw new BuildException("missing attribute 'resourcefile'");
}
if (mOutFolder == null) {
throw new BuildException("missing attribute 'outfolder'");
}
// check dexPath is only one file.
File dexFile = null;
if (mHasCode) {
String[] dexFiles = mDexPath.list();
if (dexFiles.length != 1) {
throw new BuildException(String.format(
"Expected one dex file but path value resolve to %d files.",
dexFiles.length));
}
dexFile = new File(dexFiles[0]);
}
try {
// build list of input files/folders to compute dependencies
// add the content of the zip files.
List<InputPath> inputPaths = new ArrayList<InputPath>();
// resource file
InputPath resourceInputPath = new InputPath(new File(mOutFolder, mResourceFile));
inputPaths.add(resourceInputPath);
// dex file
if (dexFile != null) {
inputPaths.add(new InputPath(dexFile));
}
// zip input files
List<File> zipFiles = new ArrayList<File>();
for (Path pathList : mZipList) {
for (String path : pathList.list()) {
File f = new File(path);
zipFiles.add(f);
inputPaths.add(new InputPath(f));
}
}
// now go through the list of source folders used to add non java files.
List<File> sourceFolderList = new ArrayList<File>();
if (mHasCode) {
for (Path pathList : mSourceList) {
for (String path : pathList.list()) {
File f = new File(path);
sourceFolderList.add(f);
// because this is a source folder but we only care about non
// java files.
inputPaths.add(new SourceFolderInputPath(f));
}
}
}
// now go through the list of jar folders.
List<File> jarFileList = new ArrayList<File>();
for (Path pathList : mJarfolderList) {
for (String path : pathList.list()) {
// it's ok if top level folders are missing
File folder = new File(path);
if (folder.isDirectory()) {
String[] filenames = folder.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return PATTERN_JAR_EXT.matcher(name).matches();
}
});
for (String filename : filenames) {
File f = new File(folder, filename);
jarFileList.add(f);
inputPaths.add(new InputPath(f));
}
}
}
}
// now go through the list of jar files.
for (Path pathList : mJarfileList) {
for (String path : pathList.list()) {
File f = new File(path);
jarFileList.add(f);
inputPaths.add(new InputPath(f));
}
}
// now the native lib folder.
List<FileEntry> nativeFileList = new ArrayList<FileEntry>();
for (Path pathList : mNativeList) {
for (String path : pathList.list()) {
// it's ok if top level folders are missing
File folder = new File(path);
if (folder.isDirectory()) {
List<FileEntry> entries = ApkBuilder.getNativeFiles(folder,
mDebugPackaging);
// add the list to the list of native files and then create an input
// path for each file
nativeFileList.addAll(entries);
for (FileEntry entry : entries) {
inputPaths.add(new InputPath(entry.mFile));
}
}
}
}
// Finally figure out the path to the dependency file.
String depFile = outputFile.getAbsolutePath() + ".d";
// check dependencies
if (initDependencies(depFile, inputPaths) && dependenciesHaveChanged() == false) {
System.out.println(
"No changes. No need to create apk.");
return;
}
if (mDebugSigning) {
System.out.println(String.format(
"Creating %s and signing it with a debug key...", outputFile.getName()));
} else {
System.out.println(String.format(
"Creating %s for release...", outputFile.getName()));
}
ApkBuilder apkBuilder = new ApkBuilder(
outputFile,
resourceInputPath.getFile(),
dexFile,
mDebugSigning ? ApkBuilder.getDebugKeystore() : null,
mVerbose ? System.out : null);
apkBuilder.setDebugMode(mDebugPackaging);
// add the content of the zip files.
for (File f : zipFiles) {
if (mVerbose) {
System.out.println("Zip Input: " + f.getAbsolutePath());
}
apkBuilder.addZipFile(f);
}
// now go through the list of file to directly add the to the list.
for (File f : sourceFolderList) {
if (mVerbose) {
System.out.println("Source Folder Input: " + f.getAbsolutePath());
}
apkBuilder.addSourceFolder(f);
}
// now go through the list of jar files.
for (File f : jarFileList) {
if (mVerbose) {
System.out.println("Jar Input: " + f.getAbsolutePath());
}
apkBuilder.addResourcesFromJar(f);
}
// and finally the native files
apkBuilder.addNativeLibraries(nativeFileList);
// close the archive
apkBuilder.sealApk();
// and generate the dependency file
generateDependencyFile(depFile, inputPaths, outputFile.getAbsolutePath());
} catch (DuplicateFileException e) {
System.err.println(String.format(
"Found duplicate file for APK: %1$s\nOrigin 1: %2$s\nOrigin 2: %3$s",
e.getArchivePath(), e.getFile1(), e.getFile2()));
throw new BuildException(e);
} catch (ApkCreationException e) {
throw new BuildException(e);
} catch (SealedApkException e) {
throw new BuildException(e);
} catch (IllegalArgumentException e) {
throw new BuildException(e);
}
}
@Override
protected String getExecTaskName() {
return "apkbuilder";
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.openapi.roots.impl;
import com.intellij.ProjectTopics;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionException;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressWrapper;
import com.intellij.openapi.project.*;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.EmptyRunnable;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.newvfs.events.VFileCreateEvent;
import com.intellij.openapi.vfs.newvfs.events.VFileEvent;
import com.intellij.openapi.vfs.newvfs.events.VFileMoveEvent;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.PsiManagerEx;
import com.intellij.psi.impl.file.impl.FileManagerImpl;
import com.intellij.ui.GuiUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.indexing.FileBasedIndexProjectHandler;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Future;
public class PushedFilePropertiesUpdaterImpl extends PushedFilePropertiesUpdater {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.roots.impl.PushedFilePropertiesUpdater");
private final Project myProject;
private final FilePropertyPusher[] myPushers;
private final FilePropertyPusher[] myFilePushers;
private final Queue<Runnable> myTasks = new ConcurrentLinkedQueue<>();
public PushedFilePropertiesUpdaterImpl(final Project project) {
myProject = project;
myPushers = Extensions.getExtensions(FilePropertyPusher.EP_NAME);
myFilePushers = ContainerUtil.findAllAsArray(myPushers, pusher -> !pusher.pushDirectoriesOnly());
StartupManager.getInstance(project).registerPreStartupActivity(
() -> project.getMessageBus().connect().subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() {
@Override
public void rootsChanged(final ModuleRootEvent event) {
for (FilePropertyPusher pusher : myPushers) {
pusher.afterRootsChanged(project);
}
}
}));
}
public void processAfterVfsChanges(@NotNull List<? extends VFileEvent> events) {
boolean pushedSomething = false;
List<Runnable> delayedTasks = ContainerUtil.newArrayList();
for (VFileEvent event : events) {
final VirtualFile file = event.getFile();
if (file == null) continue;
final FilePropertyPusher[] pushers = file.isDirectory() ? myPushers : myFilePushers;
if (pushers.length == 0) continue;
if (event instanceof VFileCreateEvent) {
if (!event.isFromRefresh() || !file.isDirectory()) {
// push synchronously to avoid entering dumb mode in the middle of a meaningful write action
// avoid dumb mode for just one file
doPushRecursively(file, pushers, ProjectRootManager.getInstance(myProject).getFileIndex());
pushedSomething = true;
}
else if (!ProjectUtil.isProjectOrWorkspaceFile(file)) {
ContainerUtil.addIfNotNull(delayedTasks, createRecursivePushTask(file, pushers));
}
} else if (event instanceof VFileMoveEvent) {
for (FilePropertyPusher pusher : pushers) {
file.putUserData(pusher.getFileDataKey(), null);
}
// push synchronously to avoid entering dumb mode in the middle of a meaningful write action
doPushRecursively(file, pushers, ProjectRootManager.getInstance(myProject).getFileIndex());
pushedSomething = true;
}
}
if (!delayedTasks.isEmpty()) {
queueTasks(delayedTasks);
}
if (pushedSomething) {
GuiUtils.invokeLaterIfNeeded(() -> scheduleDumbModeReindexingIfNeeded(), ModalityState.defaultModalityState());
}
}
@Override
public void initializeProperties() {
for (final FilePropertyPusher pusher : myPushers) {
pusher.initExtra(myProject, myProject.getMessageBus(), new FilePropertyPusher.Engine() {
@Override
public void pushAll() {
PushedFilePropertiesUpdaterImpl.this.pushAll(pusher);
}
@Override
public void pushRecursively(VirtualFile file, Project project) {
queueTasks(ContainerUtil.createMaybeSingletonList(createRecursivePushTask(file, new FilePropertyPusher[]{pusher})));
}
});
}
}
@Override
public void pushAllPropertiesNow() {
performPushTasks();
doPushAll(myPushers);
}
@Nullable
private Runnable createRecursivePushTask(final VirtualFile dir, final FilePropertyPusher[] pushers) {
if (pushers.length == 0) return null;
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
if (!fileIndex.isInContent(dir)) return null;
return () -> doPushRecursively(dir, pushers, fileIndex);
}
private void doPushRecursively(VirtualFile dir, final FilePropertyPusher[] pushers, ProjectFileIndex fileIndex) {
fileIndex.iterateContentUnderDirectory(dir, new ContentIterator() {
@Override
public boolean processFile(final VirtualFile fileOrDir) {
applyPushersToFile(fileOrDir, pushers, null);
return true;
}
});
}
private void queueTasks(List<? extends Runnable> actions) {
for (Runnable action : actions) {
myTasks.offer(action);
}
final DumbModeTask task = new DumbModeTask() {
@Override
public void performInDumbMode(@NotNull ProgressIndicator indicator) {
performPushTasks();
}
};
myProject.getMessageBus().connect(task).subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() {
@Override
public void rootsChanged(ModuleRootEvent event) {
DumbService.getInstance(myProject).cancelTask(task);
}
});
DumbService.getInstance(myProject).queueTask(task);
}
private void performPushTasks() {
boolean hadTasks = false;
while (true) {
Runnable task = myTasks.poll();
if (task == null) {
break;
}
try {
task.run();
hadTasks = true;
}
catch (ProcessCanceledException e) {
queueTasks(Collections.singletonList(task)); // reschedule dumb mode and ensure the canceled task is enqueued again
throw e;
}
}
if (hadTasks) {
scheduleDumbModeReindexingIfNeeded();
}
}
private void scheduleDumbModeReindexingIfNeeded() {
if (myProject.isDisposed()) return;
DumbModeTask task = FileBasedIndexProjectHandler.createChangedFilesIndexingTask(myProject);
if (task != null) {
DumbService.getInstance(myProject).queueTask(task);
}
}
private static <T> T findPusherValuesUpwards(Project project, VirtualFile dir, FilePropertyPusher<T> pusher, T moduleValue) {
final T value = pusher.getImmediateValue(project, dir);
if (value != null) return value;
if (moduleValue != null) return moduleValue;
final VirtualFile parent = dir.getParent();
if (parent != null) return findPusherValuesUpwards(project, parent, pusher);
T projectValue = pusher.getImmediateValue(project, null);
return projectValue != null? projectValue : pusher.getDefaultValue();
}
private static <T> T findPusherValuesUpwards(Project project, VirtualFile dir, FilePropertyPusher<T> pusher) {
final T userValue = dir.getUserData(pusher.getFileDataKey());
if (userValue != null) return userValue;
final T value = pusher.getImmediateValue(project, dir);
if (value != null) return value;
final VirtualFile parent = dir.getParent();
if (parent != null) return findPusherValuesUpwards(project, parent, pusher);
T projectValue = pusher.getImmediateValue(project, null);
return projectValue != null ? projectValue : pusher.getDefaultValue();
}
@Override
public void pushAll(final FilePropertyPusher... pushers) {
queueTasks(Collections.singletonList(() -> doPushAll(pushers)));
}
private void doPushAll(final FilePropertyPusher[] pushers) {
Module[] modules = ApplicationManager.getApplication().runReadAction(new Computable<Module[]>() {
@Override
public Module[] compute() {
return ModuleManager.getInstance(myProject).getModules();
}
});
List<Runnable> tasks = new ArrayList<>();
for (final Module module : modules) {
Runnable iteration = ApplicationManager.getApplication().runReadAction(new Computable<Runnable>() {
@Override
public Runnable compute() {
if (module.isDisposed()) return EmptyRunnable.INSTANCE;
ProgressManager.checkCanceled();
final Object[] moduleValues = new Object[pushers.length];
for (int i = 0; i < moduleValues.length; i++) {
moduleValues[i] = pushers[i].getImmediateValue(module);
}
final ModuleFileIndex fileIndex = ModuleRootManager.getInstance(module).getFileIndex();
return () -> fileIndex.iterateContent(new ContentIterator() {
@Override
public boolean processFile(final VirtualFile fileOrDir) {
applyPushersToFile(fileOrDir, pushers, moduleValues);
return true;
}
});
}
});
tasks.add(iteration);
}
invokeConcurrentlyIfPossible(tasks);
}
public static void invokeConcurrentlyIfPossible(final List<Runnable> tasks) {
if (tasks.size() == 1 ||
ApplicationManager.getApplication().isWriteAccessAllowed() ||
!Registry.is("idea.concurrent.scanning.files.to.index")) {
for(Runnable r:tasks) r.run();
return;
}
final ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator();
final ConcurrentLinkedQueue<Runnable> tasksQueue = new ConcurrentLinkedQueue<>(tasks);
List<Future<?>> results = ContainerUtil.newArrayList();
if (tasks.size() > 1) {
int numThreads = Math.max(Math.min(CacheUpdateRunner.indexingThreadCount() - 1, tasks.size() - 1), 1);
for (int i = 0; i < numThreads; ++i) {
results.add(ApplicationManager.getApplication().executeOnPooledThread(() -> ProgressManager.getInstance().runProcess(() -> {
Runnable runnable;
while ((runnable = tasksQueue.poll()) != null) runnable.run();
}, ProgressWrapper.wrap(progress))));
}
}
Runnable runnable;
while ((runnable = tasksQueue.poll()) != null) runnable.run();
for (Future<?> result : results) {
try {
result.get();
} catch (Exception ex) {
LOG.error(ex);
}
}
}
private void applyPushersToFile(final VirtualFile fileOrDir, final FilePropertyPusher[] pushers, final Object[] moduleValues) {
ApplicationManager.getApplication().runReadAction(() -> {
ProgressManager.checkCanceled();
if (!fileOrDir.isValid()) return;
doApplyPushersToFile(fileOrDir, pushers, moduleValues);
});
}
private void doApplyPushersToFile(VirtualFile fileOrDir, FilePropertyPusher[] pushers, Object[] moduleValues) {
FilePropertyPusher<Object> pusher = null;
try {
final boolean isDir = fileOrDir.isDirectory();
for (int i = 0, pushersLength = pushers.length; i < pushersLength; i++) {
//noinspection unchecked
pusher = pushers[i];
if (!isDir && (pusher.pushDirectoriesOnly() || !pusher.acceptsFile(fileOrDir)) || isDir && !pusher.acceptsDirectory(fileOrDir, myProject)) {
continue;
}
findAndUpdateValue(fileOrDir, pusher, moduleValues != null ? moduleValues[i] : null);
}
}
catch (AbstractMethodError ame) { // acceptsDirectory is missed
if (pusher != null) throw new ExtensionException(pusher.getClass());
throw ame;
}
}
@Override
public <T> void findAndUpdateValue(final VirtualFile fileOrDir, final FilePropertyPusher<T> pusher, final T moduleValue) {
final T value = findPusherValuesUpwards(myProject, fileOrDir, pusher, moduleValue);
updateValue(myProject, fileOrDir, value, pusher);
}
public static <T> void updateValue(final Project project, final VirtualFile fileOrDir, final T value, final FilePropertyPusher<T> pusher) {
final T oldValue = fileOrDir.getUserData(pusher.getFileDataKey());
if (value != oldValue) {
fileOrDir.putUserData(pusher.getFileDataKey(), value);
try {
pusher.persistAttribute(project, fileOrDir, value);
}
catch (IOException e) {
LOG.error(e);
}
}
}
@Override
public void filePropertiesChanged(@NotNull final VirtualFile file) {
ApplicationManager.getApplication().assertReadAccessAllowed();
FileBasedIndex.getInstance().requestReindex(file);
for (final Project project : ProjectManager.getInstance().getOpenProjects()) {
reloadPsi(file, project);
}
}
private static void reloadPsi(final VirtualFile file, final Project project) {
final FileManagerImpl fileManager = (FileManagerImpl)((PsiManagerEx)PsiManager.getInstance(project)).getFileManager();
if (fileManager.findCachedViewProvider(file) != null) {
Runnable runnable = () -> WriteAction.run(() -> fileManager.forceReload(file));
if (ApplicationManager.getApplication().isDispatchThread()) {
runnable.run();
} else {
TransactionGuard.submitTransaction(project, runnable);
}
}
}
}
| |
package org.grobid.trainer.evaluation;
import org.chasen.crfpp.Tagger;
import org.grobid.core.GrobidModels;
import org.grobid.core.engines.tagging.GenericTagger;
import org.grobid.core.engines.tagging.TaggerFactory;
import org.grobid.core.exceptions.GrobidException;
import org.grobid.core.utilities.GrobidProperties;
import org.grobid.core.utilities.TextUtilities;
import org.grobid.trainer.AbstractTrainer;
import org.grobid.trainer.PatentParserTrainer;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TreeMap;
/**
* Evaluation of the extraction and parsing of the patent and NPL citations present in the patent
* description.
*
* @author Patrice Lopez
*/
public class PatentEvaluation {
private String evaluationPath = null;
private GenericTagger taggerPatent = null;
private GenericTagger taggerNPL = null;
private GenericTagger taggerAll = null;
//where a test file would be put
private String outputPath;
public PatentEvaluation() {
evaluationPath = AbstractTrainer.getEvalCorpusBasePath().getAbsolutePath();
outputPath = GrobidProperties.getInstance().getTempPath().getAbsolutePath();
taggerNPL = TaggerFactory.getTagger(GrobidModels.PATENT_NPL);
taggerPatent = TaggerFactory.getTagger(GrobidModels.PATENT_PATENT);
taggerAll = TaggerFactory.getTagger(GrobidModels.PATENT_ALL);
}
/**
* Evaluation of the patent and NPL parsers against an evaluation set in the normal training format
* at token and instance level.
*
* @param type gives the model to be evaluated: 0 is the patent citation only model, 1 is the NPL
* citation only model and 2 is the combined patent+NPL citation model.
* @return report
*/
public String evaluate(int type) {
// we need first to produce the evaluation files with features from the files in corpus format present
// in the evaluation folder
PatentParserTrainer ppt = new PatentParserTrainer();
//noinspection NullableProblems
//ppt.createDataSet("test", null, evaluationPath, outputPath);
String setName;
GenericTagger tagger;
if (type == 0) {
tagger = taggerPatent;
setName = "patent";
} else if (type == 1) {
tagger = taggerNPL;
setName = "npl";
} else if (type == 2) {
tagger = taggerAll;
setName = "all";
} else {
throw new GrobidException("An exception occured while evaluating Grobid. The parameter " +
"type is undefined.");
}
return evaluate();
}
/**
* Evaluation of the patent and NPL parsers against an evaluation set in the normal training format
* at token and instance level.
* @return report
*/
public String evaluate() {
// we need first to produce the evaluation files with features from the files in corpus format present
// in the evaluation folder
StringBuilder report = new StringBuilder();
PatentParserTrainer ppt = new PatentParserTrainer();
//noinspection NullableProblems
ppt.createDataSet("test", null, evaluationPath, outputPath, 1);
List<GenericTagger> taggers = new ArrayList<GenericTagger>();
taggers.add(taggerNPL);
taggers.add(taggerPatent);
taggers.add(taggerAll);
// note: there is no field for these models
for (GenericTagger tagger : taggers) {
// total tag
int totalExpected = 0;
int totalCorrect = 0;
int totalSuggested = 0;
// total instance
int totalInstanceExpected = 0;
int totalInstanceCorrect = 0;
int totalInstanceSuggested = 0;
// npl tag
int totalNPLExpected = 0;
int totalNPLCorrect = 0;
int totalNPLSuggested = 0;
// npl instance
int totalInstanceNPLExpected = 0;
int totalInstanceNPLCorrect = 0;
int totalInstanceNPLSuggested = 0;
// patent tag
int totalPatentExpected = 0;
int totalPatentCorrect = 0;
int totalPatentSuggested = 0;
// patent instance
int totalInstancePatentExpected = 0;
int totalInstancePatentCorrect = 0;
int totalInstancePatentSuggested = 0;
try {
// read the evaluation file enriched with feature
BufferedReader bufReader = new BufferedReader(
new InputStreamReader(new FileInputStream(outputPath + "/all.test"), "UTF-8"));
String line = null;
ArrayList<String> patentBlocks = new ArrayList<String>();
while ((line = bufReader.readLine()) != null) {
patentBlocks.add(line);
}
bufReader.close();
//TODO: VZ_FIX
// String theResult = EvaluationUtilities.taggerRun(patentBlocks, tagger);
String theResult = tagger.label(patentBlocks);
//System.out.println(theResult);
StringTokenizer stt = new StringTokenizer(theResult, "\n");
// line = null;
String previousExpectedLabel = null;
String previousSuggestedLabel = null;
boolean instanceCorrect = true;
while (stt.hasMoreTokens()) {
line = stt.nextToken();
StringTokenizer st = new StringTokenizer(line, "\t");
String expected = null; // expected tag
String actual = null; // tag suggested by the model
String word = null; // the token
boolean start = true;
boolean failure = false;
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (start) {
word = token.trim();
start = false;
}
expected = actual;
actual = token.trim();
}
// we can simply compare the two last line (expected, actual) for evaluation
// in this context an instance is a connected sequence the "non-other" tags in the expected tags
// As in (Peng & McCallum, 2006), we simply measure the accuracy at the instance level.
// tags
if ((expected != null) && (actual != null)) {
if (!expected.equals("<other>")) {
totalExpected++;
if (expected.endsWith("refPatent>"))
totalPatentExpected++;
else if (expected.endsWith("refNPL>"))
totalNPLExpected++;
else
report.append("WARNING bizarre suggested tag: " + expected + "\n");
}
if (!actual.equals("<other>")) {
totalSuggested++;
if (actual.endsWith("refPatent>"))
totalPatentSuggested++;
else if (actual.endsWith("refNPL>"))
totalNPLSuggested++;
else
report.append("WARNING bizarre suggested tag: " + actual + "\n");
}
if (actual.endsWith("refPatent>"))
actual = "refPatent";
else if (actual.endsWith("refNPL>")) {
actual = "refNPL";
}
if (expected.endsWith("refPatent>"))
expected = "refPatent";
else if (expected.endsWith("refNPL>"))
expected = "refNPL";
if (actual.equals("<other>"))
actual = "other";
if (expected.equals("<other>"))
expected = "other";
if (expected.equals(actual)) {
if (!actual.equals("other") && !expected.equals("other")) {
totalCorrect++;
if (expected.startsWith("refPatent"))
totalPatentCorrect++;
else if (expected.startsWith("refNPL"))
totalNPLCorrect++;
}
} else {
failure = true;
}
// expected instance
if (!expected.equals("other")) {
if ((previousExpectedLabel == null) || (!expected.equals(previousExpectedLabel))) {
// we are starting a new instance
// are we ending an instance?
if (previousExpectedLabel != null) {
if (!previousExpectedLabel.equals("other")) {
// we are ending an instance
if (instanceCorrect) {
if (previousExpectedLabel.startsWith("refPatent"))
totalInstancePatentCorrect++;
else if (previousExpectedLabel.startsWith("refNPL"))
totalInstanceNPLCorrect++;
}
}
}
// new instance
totalInstanceExpected++;
if (expected.startsWith("refPatent"))
totalInstancePatentExpected++;
else if (expected.startsWith("refNPL"))
totalInstanceNPLExpected++;
instanceCorrect = true;
}
} else {
// are we ending an instance?
if (previousExpectedLabel != null) {
if (!previousExpectedLabel.equals("other")) {
// we are ending an instance
if (instanceCorrect) {
totalInstanceCorrect++;
if (previousExpectedLabel.startsWith("refPatent"))
totalInstancePatentCorrect++;
else if (previousExpectedLabel.startsWith("refNPL"))
totalInstanceNPLCorrect++;
}
instanceCorrect = true;
}
}
}
if (failure) {
instanceCorrect = false;
}
previousExpectedLabel = expected;
previousSuggestedLabel = actual;
}
}
} catch (Exception e) {
throw new GrobidException("An exception occured while evaluating Grobid.", e);
}
double precision;
double recall;
double f;
if (tagger == taggerNPL) {
report.append("\n\n*********************************************\n");
report.append("****** NPL reference extraction model *******\n");
report.append("*********************************************\n");
} else if (tagger == taggerPatent) {
report.append("\n\n************************************************\n");
report.append("****** patent reference extraction model *******\n");
report.append("************************************************\n");
} else if (tagger == taggerAll) {
report.append("\n\n*************************************************************\n");
report.append("****** combined NPL+patent reference extraction model *******\n");
report.append("*************************************************************\n");
}
if (tagger == taggerAll) {
report.append("\n======== GENERAL TAG EVALUATION ========\n");
report.append("Total expected tags: ").append(totalExpected).append("\n");
report.append("Total suggested tags: ").append(totalSuggested).append("\n");
report.append("Total correct tags (Correct Positive): ").append(totalCorrect).append("\n");
report.append("Total incorrect tags (False Positive + False Negative): ").append(Math.abs(totalSuggested - totalCorrect)).append("\n");
precision = (double) totalCorrect / totalSuggested;
recall = (double) totalCorrect / totalExpected;
f = 2 * precision * recall / (precision + recall);
report.append("Precision\t= ").append(TextUtilities.formatTwoDecimals(precision * 100)).append("\n");
report.append("Recall\t= ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n");
report.append("F-score\t= ").append(TextUtilities.formatTwoDecimals(f * 100)).append("\n");
}
if (tagger != taggerPatent) {
report.append("\n======== TAG NPL EVALUATION ========\n");
report.append("Total expected tags: ").append(totalNPLExpected).append("\n");
report.append("Total suggested tags: ").append(totalNPLSuggested).append("\n");
report.append("Total correct tags (Correct Positive): ").append(totalNPLCorrect).append("\n");
report.append("Total incorrect tags (False Positive + False Negative): ").append(Math.abs(totalNPLSuggested - totalNPLCorrect)).append("\n");
precision = (double) totalNPLCorrect / totalNPLSuggested;
recall = (double) totalNPLCorrect / totalNPLExpected;
f = 2 * precision * recall / (precision + recall);
report.append("Precision\t= ").append(TextUtilities.formatTwoDecimals(precision * 100)).append("\n");
report.append("Recall\t= ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n");
report.append("F-score\t= ").append(TextUtilities.formatTwoDecimals(f * 100)).append("\n");
}
if (tagger != taggerNPL) {
report.append("\n======== TAG PATENT EVALUATION ========\n");
report.append("Total expected tags: ").append(totalPatentExpected).append("\n");
report.append("Total suggested tags: ").append(totalPatentSuggested).append("\n");
report.append("Total correct tags (Correct Positive): ").append(totalPatentCorrect).append("\n");
report.append("Total incorrect tags (False Positive + False Negative): ").append(Math.abs(totalPatentSuggested - totalPatentCorrect)).append("\n");
precision = (double) totalPatentCorrect / totalPatentSuggested;
recall = (double) totalPatentCorrect / totalPatentExpected;
f = 2 * precision * recall / (precision + recall);
report.append("Precision\t= ").append(TextUtilities.formatTwoDecimals(precision * 100)).append("\n");
report.append("Recall\t= ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n");
report.append("F-score\t= ").append(TextUtilities.formatTwoDecimals(f * 100)).append("\n");
}
if (tagger == taggerAll) {
report.append("\n======== GENERAL INSTANCE EVALUATION ========\n");
report.append("Total expected instances: ").append(totalInstanceExpected).append("\n");
report.append("Total correct instances: ").append(totalInstanceCorrect).append("\n");
recall = (double) totalInstanceCorrect / totalInstanceExpected;
report.append("Instance Accuracy = ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n");
}
if (tagger != taggerPatent) {
report.append("\n======== INSTANCE NPL EVALUATION ========\n");
report.append("Total expected instances: ").append(totalInstanceNPLExpected).append("\n");
report.append("Total correct instances: ").append(totalInstanceNPLCorrect).append("\n");
recall = (double) totalInstanceNPLCorrect / totalInstanceNPLExpected;
report.append("Instance accuracy = ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n");
}
if (tagger != taggerNPL) {
report.append("\n======== INSTANCE PATENT EVALUATION ========\n");
report.append("Total expected instances: ").append(totalInstancePatentExpected).append("\n");
report.append("Total correct instances: ").append(totalInstancePatentCorrect).append("\n");
recall = (double) totalInstancePatentCorrect / totalInstancePatentExpected;
report.append("Instance accuracy = ").append(TextUtilities.formatTwoDecimals(recall * 100)).append("\n\n");
}
}
return report.toString();
}
/**
* Evaluation of the extraction against the gold corpus for patent reference resolution.
* Use in particular for a comparison with Ddoc and ACE.
* @param path file path
*/
public void evaluateGold(File path) {
try {
TreeMap<String, ArrayList<String>> rfap_reference = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rf_reference = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rfap_ace = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rf_ace = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rfap_Ddoc = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rf_Ddoc = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rfap = new TreeMap<String, ArrayList<String>>();
TreeMap<String, ArrayList<String>> rf = new TreeMap<String, ArrayList<String>>();
// we parse the log file for getting reference data and ACE/Ddoc results
String dossierName = null;
BufferedReader br = new BufferedReader(
new InputStreamReader(
new FileInputStream(evaluationPath + "/gold/REF_20100426.txt"), "UTF8"));
String s;
// boolean rf_part = false;
ArrayList<String> resap_reference = null;
ArrayList<String> res_reference = null;
while ((s = br.readLine()) != null) {
if (s.length() == 0) continue;
if (s.startsWith("RFAP:")) {
resap_reference = new ArrayList<String>();
s = s.substring(5, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!resap_reference.contains(pat))
resap_reference.add(pat);
}
}
}
} else if (s.startsWith("RF:")) {
res_reference = new ArrayList<String>();
s = s.substring(3, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!res_reference.contains(pat))
res_reference.add(pat);
}
}
}
} else {
if (dossierName != null) {
rfap_reference.put(dossierName, resap_reference);
rf_reference.put(dossierName, res_reference);
}
dossierName = s.trim();
dossierName = dossierName.replace(".txt", "");
}
}
rfap_reference.put(dossierName, resap_reference);
rf_reference.put(dossierName, res_reference);
br.close();
// we parse the log file for getting ACE results
br = new BufferedReader(
new InputStreamReader(
new FileInputStream(evaluationPath + "/ACE_20100426.txt"), "UTF8"));
// rf_part = false;
ArrayList<String> resap_ace = null;
ArrayList<String> res_ace = null;
dossierName = null;
while ((s = br.readLine()) != null) {
if (s.length() == 0) continue;
if (s.startsWith("RFAP:")) {
resap_ace = new ArrayList<String>();
s = s.substring(5, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!resap_ace.contains(pat))
resap_ace.add(pat);
}
}
}
} else if (s.startsWith("RF:")) {
res_ace = new ArrayList<String>();
s = s.substring(3, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!res_ace.contains(pat))
res_ace.add(pat);
}
}
}
} else {
if (dossierName != null) {
rfap_ace.put(dossierName, resap_ace);
rf_ace.put(dossierName, res_ace);
}
dossierName = s.trim();
dossierName = dossierName.replace(".txt", "");
}
}
rfap_ace.put(dossierName, resap_ace);
rf_ace.put(dossierName, res_ace);
br.close();
// we parse the log file for Ddoc results
br = new BufferedReader(
new InputStreamReader(
new FileInputStream(evaluationPath + "/Ddoc_20100426.txt"), "UTF8"));
ArrayList<String> resap_Ddoc = null;
ArrayList<String> res_Ddoc = null;
dossierName = null;
while ((s = br.readLine()) != null) {
if (s.length() == 0) continue;
if (s.startsWith("RFAP:")) {
resap_Ddoc = new ArrayList<String>();
s = s.substring(5, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!resap_Ddoc.contains(pat))
resap_Ddoc.add(pat);
}
}
}
} else if (s.startsWith("RF:")) {
res_Ddoc = new ArrayList<String>();
s = s.substring(3, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!res_Ddoc.contains(pat))
res_Ddoc.add(pat);
}
}
}
} else {
if (dossierName != null) {
rfap_Ddoc.put(dossierName, resap_Ddoc);
rf_Ddoc.put(dossierName, res_Ddoc);
}
dossierName = s.trim();
dossierName = dossierName.replace(".txt", "");
}
}
rfap_Ddoc.put(dossierName, resap_Ddoc);
rf_Ddoc.put(dossierName, res_Ddoc);
br.close();
/*while((s = br.readLine()) != null) {
s = s.substring(1, s.length());
if (s.trim().length() == 0) continue;
if (s.startsWith("EP") & (dossierName == null)) {
StringTokenizer st = new StringTokenizer(s, " ");
dossierName = st.nextToken().trim();
//dossierName = "EP20"+dossierName.substring(2,4)+"0"+dossierName.substring(4,dossierName.length());
//System.out.println(dossierName);
}
else if (s.startsWith("RFAP")) {
rf_part = false;
resap_reference = new ArrayList<String>();
resap_ace = new ArrayList<String>();
resap_Ddoc = new ArrayList<String>();
}
else if (s.startsWith("RF")) {
rf_part = true;
res_reference = new ArrayList<String>();
res_ace = new ArrayList<String>();
res_Ddoc = new ArrayList<String>();
}
else if (s.startsWith("_______")) {
rfap_reference.put(dossierName, resap_reference);
rf_reference.put(dossierName, res_reference);
rfap_ace.put(dossierName, resap_ace);
rf_ace.put(dossierName, res_ace);
rfap_Ddoc.put(dossierName, resap_Ddoc);
rf_Ddoc.put(dossierName, res_Ddoc);
dossierName = null;
}
else {
StringTokenizer st = new StringTokenizer(s, "|");
if (rf_part) {
String tok1 = st.nextToken().trim();
String tok2 = st.nextToken().trim();
String tok3 = st.nextToken().trim();
if (tok1.length() > 0) {
if (!res_reference.contains(tok1))
res_reference.add(tok1);
}
if (tok2.length() > 0) {
if (!res_ace.contains(tok2))
res_ace.add(tok2);
}
if (tok3.length() > 0) {
if (!res_Ddoc.contains(tok3))
res_Ddoc.add(tok3);
}
}
else {
String tok1 = st.nextToken().trim();
if (!st.hasMoreTokens())
System.out.println("WARNING: " + s);
String tok2 = st.nextToken().trim();
if (!st.hasMoreTokens())
System.out.println("WARNING: " + s);
String tok3 = st.nextToken().trim();
if (tok1.length() > 0) {
if (!resap_reference.contains(tok1))
resap_reference.add(tok1);
}
if (tok2.length() > 0) {
if (!resap_ace.contains(tok2))
resap_ace.add(tok2);
}
if (tok3.length() > 0) {
if (!resap_Ddoc.contains(tok3))
resap_Ddoc.add(tok3);
}
}
}
}
br.close();
*/
// we parse our own results
BufferedReader br2 = new BufferedReader(
new InputStreamReader(new FileInputStream(path.getParent() + "/report.txt"), "UTF8"));
dossierName = null;
ArrayList<String> resap = null;
ArrayList<String> res = null;
while ((s = br2.readLine()) != null) {
if (s.length() == 0) continue;
if (s.startsWith("RFAP:")) {
resap = new ArrayList<String>();
s = s.substring(5, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!resap.contains(pat))
resap.add(pat);
}
}
}
} else if (s.startsWith("RF:")) {
res = new ArrayList<String>();
s = s.substring(3, s.length());
String[] pats = s.split(" ");
for (String pat : pats) {
if (pat != null) {
if (pat.length() > 0) {
if (!res.contains(pat))
res.add(pat);
}
}
}
} else {
if (dossierName != null) {
rfap.put(dossierName, resap);
rf.put(dossierName, res);
}
dossierName = s.trim();
dossierName = dossierName.replace(".txt", "");
}
}
rfap.put(dossierName, resap);
rf.put(dossierName, res);
br2.close();
// all the set are initiated, we compute the metrics
// reference
int count_rfap_reference = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap_reference.entrySet()) {
// dossierName = entry.getKey();
ArrayList<String> liste = entry.getValue();
count_rfap_reference += liste.size();
}
int count_rf_reference = 0;
int nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf_reference.entrySet()) {
// dossierName = entry.getKey();
ArrayList<String> liste = entry.getValue();
count_rf_reference += liste.size();
nbDossier++;
}
System.out.println("Ref. data: " + count_rfap_reference + " serials and "
+ count_rf_reference + " publications, total: "
+ (count_rfap_reference + count_rf_reference) + " in " + nbDossier + " dossiers");
// ace
int count_rfap_ace = 0;
int count_rfap_ace_correct = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap_ace.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rfap_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
count_rfap_ace += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rfap_ace_correct++;
}
}
}
int count_rf_ace = 0;
int count_rf_ace_correct = 0;
nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf_ace.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rf_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
count_rf_ace += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rf_ace_correct++;
}
}
nbDossier++;
}
System.out.println("ACE data: " + count_rfap_ace + " (" + count_rfap_ace_correct + " correct) serials and "
+ count_rf_ace + " (" + count_rf_ace_correct + " correct) publications, total: " + (count_rfap_ace + count_rf_ace)
+ " in " + nbDossier + " dossiers");
// Ddoc
int count_rfap_Ddoc = 0;
int count_rfap_Ddoc_correct = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rfap_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
count_rfap_Ddoc += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rfap_Ddoc_correct++;
}
}
}
int count_rf_Ddoc = 0;
int count_rf_Ddoc_correct = 0;
nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rf_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
count_rf_Ddoc += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rf_Ddoc_correct++;
}
}
nbDossier++;
}
System.out.println("Ddoc data: " + count_rfap_Ddoc + " (" + count_rfap_Ddoc_correct + " correct) serials and "
+ count_rf_Ddoc + " (" + count_rf_Ddoc_correct + " correct) publications, total: " + (count_rfap_Ddoc + count_rf_Ddoc)
+ " in " + nbDossier + " dossiers");
// GROBID
int count_rfap = 0;
int count_rfap_correct = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap.entrySet()) {
//System.out.println("key is " + entry.getKey() + " and value is " + entry.getValue());
dossierName = entry.getKey();
ArrayList<String> referenceListe = rfap_reference.get(dossierName);
if (referenceListe != null) {
ArrayList<String> liste = entry.getValue();
count_rfap += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rfap_correct++;
}
}
}
}
int count_rf = 0;
int count_rf_correct = 0;
nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rf_reference.get(dossierName);
if (referenceListe != null) {
ArrayList<String> liste = entry.getValue();
count_rf += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rf_correct++;
}
}
nbDossier++;
} else
System.out.println("WARNING! file " + dossierName
+ " in GROBID's results but not in reference results");
}
System.out.println("GROBID data: " + count_rfap + " (" + count_rfap_correct + " correct) serials and "
+ count_rf + " (" + count_rf_correct + " correct) publications, total: " + (count_rfap + count_rf)
+ " in " + nbDossier + " dossiers");
// creating sharing Ddoc and Grobid by intersection
int count_rfap_DdocIGROBID = 0;
int count_rfap_DdocIGROBID_correct = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rfap_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
ArrayList<String> listeGrobid = rfap.get(dossierName);
if (listeGrobid == null) {
System.out.println("WARNING! file " + dossierName
+ " in Ddoc results but not in GROBID's one");
} else {
ArrayList<String> liste2 = new ArrayList<String>();
for (String toto : liste) {
if (listeGrobid.contains(toto))
liste2.add(toto);
}
count_rfap_DdocIGROBID += liste2.size();
for (String pat : liste2) {
if (referenceListe.contains(pat)) {
count_rfap_DdocIGROBID_correct++;
}
}
}
}
int count_rf_DdocIGROBID = 0;
int count_rf_DdocIGROBID_correct = 0;
nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rf_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
ArrayList<String> listeGrobid = rf.get(dossierName);
if (listeGrobid == null) {
System.out.println("WARNING! file " + dossierName
+ " in Ddoc results but not in GROBID's one");
} else {
ArrayList<String> liste2 = new ArrayList<String>();
for (String toto : liste) {
if (listeGrobid.contains(toto))
liste2.add(toto);
}
count_rf_DdocIGROBID += liste2.size();
for (String pat : liste2) {
if (referenceListe.contains(pat)) {
count_rf_DdocIGROBID_correct++;
}
}
nbDossier++;
}
}
System.out.println("Ddoc+GROBID data: " + count_rfap_DdocIGROBID + " (" + count_rfap_DdocIGROBID_correct
+ " correct) serials and "
+ count_rf_DdocIGROBID + " (" + count_rf_DdocIGROBID_correct + " correct) publications, total: "
+ (count_rfap_DdocIGROBID + count_rf_DdocIGROBID)
+ " in " + nbDossier + " dossiers");
// creating sharing Ddoc and Grobid by union
int count_rfap_DdocUGROBID = 0;
int count_rfap_DdocUGROBID_correct = 0;
for (Map.Entry<String, ArrayList<String>> entry : rfap_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rfap_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
ArrayList<String> listeGrobid = rfap.get(dossierName);
if (listeGrobid == null) {
System.out.println("WARNING! file " + dossierName
+ " in Ddoc results but not in GROBID's one");
} else {
for (String toto : listeGrobid) {
if (!liste.contains(toto))
liste.add(toto);
}
count_rfap_DdocUGROBID += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rfap_DdocUGROBID_correct++;
}
}
}
}
int count_rf_DdocUGROBID = 0;
int count_rf_DdocUGROBID_correct = 0;
nbDossier = 0;
for (Map.Entry<String, ArrayList<String>> entry : rf_Ddoc.entrySet()) {
dossierName = entry.getKey();
ArrayList<String> referenceListe = rf_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
ArrayList<String> listeGrobid = rf.get(dossierName);
if (listeGrobid == null) {
System.out.println("WARNING! file " + dossierName
+ " in Ddoc results but not in GROBID's one");
} else {
for (String toto : listeGrobid) {
if (!liste.contains(toto))
liste.add(toto);
}
count_rf_DdocUGROBID += liste.size();
for (String pat : liste) {
if (referenceListe.contains(pat)) {
count_rf_DdocUGROBID_correct++;
}
}
nbDossier++;
}
}
System.out.println("Ddoc|GROBID data: " + count_rfap_DdocUGROBID + " (" + count_rfap_DdocUGROBID_correct
+ " correct) serials and "
+ count_rf_DdocUGROBID + " (" + count_rf_DdocUGROBID_correct + " correct) publications, total: "
+ (count_rfap_DdocUGROBID + count_rf_DdocUGROBID)
+ " in " + nbDossier + " dossiers");
// ACE
double ace_rfap_precision = (double) count_rfap_ace_correct / count_rfap_ace;
double ace_rfap_recall = (double) count_rfap_ace_correct / count_rfap_reference;
double ace_rfap_f = (2 * ace_rfap_precision * ace_rfap_recall)
/ (ace_rfap_precision + ace_rfap_recall);
double ace_rf_precision = (double) count_rf_ace_correct / count_rf_ace;
double ace_rf_recall = (double) count_rf_ace_correct / count_rf_reference;
double ace_rf_f = (2 * ace_rf_precision * ace_rf_recall)
/ (ace_rf_precision + ace_rf_recall);
double ace_rfall_precision = (double) (count_rfap_ace_correct + count_rf_ace_correct)
/ (count_rfap_ace + count_rf_ace);
double ace_rfall_recall = (double) (count_rfap_ace_correct + count_rf_ace_correct)
/ (count_rfap_reference + count_rf_reference);
double ace_rfall_f = (2 * ace_rfall_precision * ace_rfall_recall)
/ (ace_rfall_precision + ace_rfall_recall);
// Ddoc
double Ddoc_rfap_precision = (double) count_rfap_Ddoc_correct / count_rfap_Ddoc;
double Ddoc_rfap_recall = (double) count_rfap_Ddoc_correct / count_rfap_reference;
double Ddoc_rfap_f = (2 * Ddoc_rfap_precision * Ddoc_rfap_recall)
/ (Ddoc_rfap_precision + Ddoc_rfap_recall);
double Ddoc_rf_precision = (double) count_rf_Ddoc_correct / count_rf_Ddoc;
double Ddoc_rf_recall = (double) count_rf_Ddoc_correct / count_rf_reference;
double Ddoc_rf_f = (2 * Ddoc_rf_precision * Ddoc_rf_recall)
/ (Ddoc_rf_precision + Ddoc_rf_recall);
double Ddoc_rfall_precision = (double) (count_rfap_Ddoc_correct + count_rf_Ddoc_correct)
/ (count_rfap_Ddoc + count_rf_Ddoc);
double Ddoc_rfall_recall = (double) (count_rfap_Ddoc_correct + count_rf_Ddoc_correct)
/ (count_rfap_reference + count_rf_reference);
double Ddoc_rfall_f = (2 * Ddoc_rfall_precision * Ddoc_rfall_recall)
/ (Ddoc_rfall_precision + Ddoc_rfall_recall);
// GROBID
double grobid_rfap_precision = (double) count_rfap_correct / count_rfap;
double grobid_rfap_recall = (double) count_rfap_correct / count_rfap_reference;
double grobid_rfap_f = (2 * grobid_rfap_precision * grobid_rfap_recall)
/ (grobid_rfap_precision + grobid_rfap_recall);
double grobid_rf_precision = (double) count_rf_correct / count_rf;
double grobid_rf_recall = (double) count_rf_correct / count_rf_reference;
double grobid_rf_f = (2 * grobid_rf_precision * grobid_rf_recall)
/ (grobid_rf_precision + grobid_rf_recall);
double grobid_rfall_precision = (double) (count_rfap_correct + count_rf_correct)
/ (count_rf + count_rfap);
double grobid_rfall_recall = (double) (count_rfap_correct + count_rf_correct)
/ (count_rfap_reference + count_rf_reference);
double grobid_rfall_f = (2 * grobid_rfall_precision * grobid_rfall_recall)
/ (grobid_rfall_precision + grobid_rfall_recall);
// Ddoc ? GROBID
double DdocIGROBID_rfap_precision = (double) count_rfap_DdocIGROBID_correct / count_rfap_DdocIGROBID;
double DdocIGROBID_rfap_recall = (double) count_rfap_DdocIGROBID_correct / count_rfap_reference;
double DdocIGROBID_rfap_f = (2 * DdocIGROBID_rfap_precision * DdocIGROBID_rfap_recall)
/ (DdocIGROBID_rfap_precision + DdocIGROBID_rfap_recall);
double DdocIGROBID_rf_precision = (double) count_rf_DdocIGROBID_correct / count_rf_DdocIGROBID;
double DdocIGROBID_rf_recall = (double) count_rf_DdocIGROBID_correct / count_rf_reference;
double DdocIGROBID_rf_f = (2 * DdocIGROBID_rf_precision * DdocIGROBID_rf_recall)
/ (DdocIGROBID_rf_precision + DdocIGROBID_rf_recall);
double DdocIGROBID_rfall_precision = (double) (count_rfap_DdocIGROBID_correct + count_rf_DdocIGROBID_correct)
/ (count_rfap_DdocIGROBID + count_rf_DdocIGROBID);
double DdocIGROBID_rfall_recall = (double) (count_rfap_DdocIGROBID_correct + count_rf_DdocIGROBID_correct)
/ (count_rfap_reference + count_rf_reference);
double DdocIGROBID_rfall_f = (2 * DdocIGROBID_rfall_precision * DdocIGROBID_rfall_recall)
/ (DdocIGROBID_rfall_precision + DdocIGROBID_rfall_recall);
// Ddoc U GROBID
double DdocUGROBID_rfap_precision = (double) count_rfap_DdocUGROBID_correct / count_rfap_DdocUGROBID;
double DdocUGROBID_rfap_recall = (double) count_rfap_DdocUGROBID_correct / count_rfap_reference;
double DdocUGROBID_rfap_f = (2 * DdocUGROBID_rfap_precision * DdocUGROBID_rfap_recall)
/ (DdocUGROBID_rfap_precision + DdocUGROBID_rfap_recall);
double DdocUGROBID_rf_precision = (double) count_rf_DdocUGROBID_correct / count_rf_DdocUGROBID;
double DdocUGROBID_rf_recall = (double) count_rf_DdocUGROBID_correct / count_rf_reference;
double DdocUGROBID_rf_f = (2 * DdocUGROBID_rf_precision * DdocUGROBID_rf_recall)
/ (DdocUGROBID_rf_precision + DdocUGROBID_rf_recall);
double DdocUGROBID_rfall_precision = (double) (count_rfap_DdocUGROBID_correct + count_rf_DdocUGROBID_correct)
/ (count_rfap_DdocUGROBID + count_rf_DdocUGROBID);
double DdocUGROBID_rfall_recall = (double) (count_rfap_DdocUGROBID_correct + count_rf_DdocUGROBID_correct)
/ (count_rfap_reference + count_rf_reference);
double DdocUGROBID_rfall_f = (2 * DdocUGROBID_rfall_precision * DdocUGROBID_rfall_recall)
/ (DdocUGROBID_rfall_precision + DdocUGROBID_rfall_recall);
// print the report
System.out.println("___________________________________________________________");
System.out.println("RFAP: ");
System.out.println("\t\tPrecision\tRecall\t\tF-score");
System.out.println("ACE\t\t" + TextUtilities.formatTwoDecimals(ace_rfap_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(ace_rfap_recall * 100)
+ "\t\t" + TextUtilities.formatTwoDecimals(ace_rfap_f * 100));
System.out.println("Ddoc\t" + TextUtilities.formatTwoDecimals(Ddoc_rfap_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(Ddoc_rfap_recall * 100)
+ "\t\t" + TextUtilities.formatTwoDecimals(Ddoc_rfap_f * 100));
System.out.println("GROBID\t" + TextUtilities.formatTwoDecimals(grobid_rfap_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(grobid_rfap_recall * 100)
+ "\t\t" + TextUtilities.formatTwoDecimals(grobid_rfap_f * 100));
System.out.println("Ddoc+GROBID\t" + TextUtilities.formatTwoDecimals(DdocIGROBID_rfap_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rfap_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rfap_f * 100));
System.out.println("Ddoc|GROBID\t" + TextUtilities.formatTwoDecimals(DdocUGROBID_rfap_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rfap_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rfap_f * 100));
System.out.println("\n___________________________________________________________");
System.out.println("RF: ");
System.out.println("\t\tPrecision\tRecall\t\tF-score");
System.out.println("ACE\t\t" + TextUtilities.formatTwoDecimals(ace_rf_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(ace_rf_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(ace_rf_f * 100));
System.out.println("Ddoc\t" + TextUtilities.formatTwoDecimals(Ddoc_rf_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(Ddoc_rf_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(Ddoc_rf_f * 100));
System.out.println("GROBID\t" + TextUtilities.formatTwoDecimals(grobid_rf_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(grobid_rf_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(grobid_rf_f * 100));
System.out.println("Ddoc+GROBID\t" + TextUtilities.formatTwoDecimals(DdocIGROBID_rf_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rf_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rf_f * 100));
System.out.println("Ddoc|GROBID\t" + TextUtilities.formatTwoDecimals(DdocUGROBID_rf_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rf_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rf_f * 100));
System.out.println("\n___________________________________________________________");
System.out.println("All: ");
System.out.println("\t\tPrecision\tRecall\t\tF-score");
System.out.println("ACE\t\t" + TextUtilities.formatTwoDecimals(ace_rfall_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(ace_rfall_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(ace_rfall_f * 100));
System.out.println("Ddoc\t" + TextUtilities.formatTwoDecimals(Ddoc_rfall_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(Ddoc_rfall_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(Ddoc_rfall_f * 100));
System.out.println("GROBID\t" + TextUtilities.formatTwoDecimals(grobid_rfall_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(grobid_rfall_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(grobid_rfall_f * 100));
System.out.println("Ddoc+GROBID\t" + TextUtilities.formatTwoDecimals(DdocIGROBID_rfall_precision * 100)
+ "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rfall_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocIGROBID_rfall_f * 100));
System.out.println("Ddod|GROBID\t" + TextUtilities.formatTwoDecimals(DdocUGROBID_rfall_precision * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rfall_recall * 100) + "\t\t"
+ TextUtilities.formatTwoDecimals(DdocUGROBID_rfall_f * 100));
// write Ddoc and reference results
File fileOut = new File(path.getParent() + "/reference.txt");
OutputStream os = new FileOutputStream(fileOut, false);
Writer referenceWriter = new OutputStreamWriter(os, "UTF-8");
//Collection.reverse(rf_reference);
//rf_reference = new TreeMap<String, ArrayList<String>>(Collections.reverseOrder());
System.out.println("Reference data in " + path.getParent() + "/reference.txt");
for (Map.Entry<String, ArrayList<String>> entry : rf_reference.entrySet()) {
dossierName = entry.getKey();
referenceWriter.write(dossierName + ".txt\n");
ArrayList<String> referenceListe1 = rfap_reference.get(dossierName);
ArrayList<String> liste = entry.getValue();
referenceWriter.write("RFAP: ");
for (String toto : referenceListe1) {
referenceWriter.write(toto + " ");
}
referenceWriter.write("\nRF: ");
for (String toto : liste) {
referenceWriter.write(toto + " ");
}
referenceWriter.write("\n");
}
referenceWriter.close();
} catch (Exception e) {
e.printStackTrace();
throw new GrobidException("An exception occurred while evaluating Grobid.", e);
}
}
}
| |
/*
Derby - Class com.pivotal.gemfirexd.internal.impl.services.monitor.FileMonitor
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.services.monitor;
import com.pivotal.gemfirexd.internal.iapi.reference.Property;
import com.pivotal.gemfirexd.internal.iapi.services.info.ProductGenusNames;
import com.pivotal.gemfirexd.internal.iapi.services.info.ProductVersionHolder;
import com.pivotal.gemfirexd.internal.iapi.services.io.FileUtil;
import com.pivotal.gemfirexd.internal.iapi.services.monitor.Monitor;
import com.pivotal.gemfirexd.internal.iapi.services.property.PropertyUtil;
import com.pivotal.gemfirexd.internal.iapi.services.stream.HeaderPrintWriter;
import com.pivotal.gemfirexd.internal.impl.services.stream.GfxdHeaderPrintWriterImpl;
import java.io.FileInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
Implementation of the monitor that uses the class loader
that the its was loaded in for all class loading.
*/
public final class FileMonitor extends BaseMonitor implements java.security.PrivilegedExceptionAction<Object>
{
/* Fields */
private File home;
private ProductVersionHolder engineVersion;
public FileMonitor() {
initialize(true);
applicationProperties = readApplicationProperties();
}
public FileMonitor(java.util.Properties properties, java.io.PrintStream log) {
runWithState(properties, log);
}
private InputStream PBapplicationPropertiesStream(BaseMonitor m)
throws IOException {
//GemStone changes BEGIN
{
String fileName = PBgetJVMProperty(com.pivotal.gemfirexd.Property.PROPERTIES_FILE);
//SQLF:BC
if (fileName == null) {
fileName = PBgetJVMProperty(com.pivotal.gemfirexd.Property.SQLF_PROPERTIES_FILE);
}
//if system property is not defined & there ain't any 'gemfirexd.properties'
// file in gemfirexd.system.home or user.home
if(fileName != null) {
File sr = FileUtil.newFile(null, fileName);
if (!sr.exists()) {
final String errorStr = "[warning] "+ fileName + " gemfirexd.properties not found to read ";
HeaderPrintWriter hpw = Monitor.getStream();
if(hpw == null) {
new GfxdHeaderPrintWriterImpl(System.err, null,
GfxdHeaderPrintWriterImpl.GfxdLogWriter.getInstance(), true,
"System.err").println(errorStr);
this.getTempWriter().append(errorStr);
}
throw new IOException(errorStr);
}
return new FileInputStream(sr);
}
}
//GemStone changes END
File sr = FileUtil.newFile(home, com.pivotal.gemfirexd.Property.PROPERTIES_FILE);
// GemStone changes BEGIN
//SQL:BC
File sqlfsr = FileUtil.newFile(home,
com.pivotal.gemfirexd.Property.SQLF_PROPERTIES_FILE);
if (!sr.exists()) {
File userhome = new File(System.getProperty("user.home"));
if (!sqlfsr.exists()) {
if (!userhome.exists() || !userhome.isDirectory()) {
return null;
}
sr = FileUtil.newFile(userhome,
com.pivotal.gemfirexd.Property.PROPERTIES_FILE);
if (!sr.exists()) {
sqlfsr = sr = FileUtil.newFile(userhome,
com.pivotal.gemfirexd.Property.SQLF_PROPERTIES_FILE);
if (!sqlfsr.exists()) {
return null;
}
else {
PBsetJVMProperty(Property.PROP_SQLF_BC_MODE_INDICATOR, Boolean.TRUE.toString());
PropertyUtil.setSQLFire();
}
}
}
else {
PBsetJVMProperty(Property.PROP_SQLF_BC_MODE_INDICATOR, Boolean.TRUE.toString());
PropertyUtil.setSQLFire();
}
}
/* (before SQLF:BC
if (!sr.exists()) {
File userhome = new File(System.getProperty("user.home"));
if (!userhome.exists() || !userhome.isDirectory()) {
return null;
}
sr = FileUtil.newFile(userhome, com.pivotal.gemfirexd.Property.PROPERTIES_FILE);
if (!sr.exists()) {
return null;
}
}*/
/* (original code)
if (!sr.exists())
return null;
*/
// GemStone changes END
return new FileInputStream(PropertyUtil.isSQLFire ? sqlfsr : sr);
}
public Object getEnvironment() {
return home;
}
/**
SECURITY WARNING.
This method is run in a privileged block in a Java 2 environment.
Set the system home directory. Returns false if it couldn't for
some reason.
**/
private boolean PBinitialize(boolean lite)
{
if (!lite) {
try {
// Create a ThreadGroup and set the daemon property to
// make sure the group is destroyed and garbage
// collected when all its members have finished (i.e.,
// when the driver is unloaded).
daemonGroup = new ThreadGroup("gemfirexd.daemons");
daemonGroup.setDaemon(true);
} catch (SecurityException se) {
}
}
InputStream versionStream = getClass().getResourceAsStream(ProductGenusNames.DBMS_INFO);
engineVersion = ProductVersionHolder.getProductVersionHolderFromMyEnv(versionStream);
String systemHome;
// create the system home directory if it doesn't exist
try {
// SECURITY PERMISSION - OP2
systemHome = System.getProperty(Property.SYSTEM_HOME_PROPERTY);
//SQLF:BC
if (systemHome == null) {
systemHome = System.getProperty("sqlfire.system.home");
}
} catch (SecurityException se) {
// system home will be the current directory
systemHome = null;
}
if (systemHome != null) {
home = new File(systemHome);
// SECURITY PERMISSION - OP2a
if (home.exists()) {
if (!home.isDirectory()) {
report(Property.SYSTEM_HOME_PROPERTY + "=" + systemHome
+ " does not represent a directory");
return false;
}
} else if (!lite) {
try {
// SECURITY PERMISSION - OP2b
// Attempt to create just the folder initially
// which does not require read permission on
// the parent folder. This is to allow a policy
// file to limit file permissions for gemfirexd.jar
// to be contained under gemfirexd.system.home.
// If the folder cannot be created that way
// due to missing parent folder(s)
// then mkdir() will return false and thus
// mkdirs will be called to create the
// intermediate folders. This use of mkdir()
// and mkdirs() retains existing (pre10.3) behaviour
// but avoids requiring read permission on the parent
// directory if it exists.
boolean created = home.mkdir() || home.mkdirs();
} catch (SecurityException se) {
return false;
}
}
}
return true;
}
/**
SECURITY WARNING.
This method is run in a privileged block in a Java 2 environment.
Return a property from the JVM's system set.
In a Java2 environment this will be executed as a privileged block
if and only if the property starts with 'derby.'.
If a SecurityException occurs, null is returned.
*/
private String PBgetJVMProperty(String key) {
try {
// SECURITY PERMISSION - OP1
return System.getProperty(key);
} catch (SecurityException se) {
return null;
}
}
//GemStone changes BEGIN
private String PBsetJVMProperty(String key, String value) {
try {
// SECURITY PERMISSION - OP1
return System.setProperty(key, value);
} catch (SecurityException se) {
return null;
}
}
private String PBclearJVMProperty(String key) {
try {
// SECURITY PERMISSION - OP1
return System.clearProperty(key);
} catch (SecurityException se) {
return null;
}
}
//GemStone changes END
/*
** Priv block code, moved out of the old Java2 version.
*/
private int action;
private String key3;
//GemStone changes BEGIN
private String value3;
//GemStone changes END
private Runnable task;
private int intValue;
/**
Initialize the system in a privileged block.
**/
synchronized final boolean initialize(boolean lite)
{
action = lite ? 0 : 1;
try {
Object ret = java.security.AccessController.doPrivileged(this);
return ((Boolean) ret).booleanValue();
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException) pae.getException();
}
}
synchronized final Properties getDefaultModuleProperties() {
action = 2;
try {
return (Properties) java.security.AccessController.doPrivileged(this);
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException) pae.getException();
}
}
public synchronized final String getJVMProperty(String key) {
if (!key.startsWith("gemfirexd.") && !key.startsWith("derby."))
return PBgetJVMProperty(key);
try {
action = 3;
key3 = key;
String value = (String) java.security.AccessController.doPrivileged(this);
key3 = null;
return value;
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException) pae.getException();
}
}
//GemStone changes BEGIN
public synchronized final String setJVMProperty(String key, String value) {
if (!key.startsWith("gemfirexd.") || !key.startsWith("derby."))
return PBsetJVMProperty(key, value);
try {
action = 7;
key3 = key;
value3 = value;
String rValue = (String) java.security.AccessController.doPrivileged(this);
key3 = null;
value3 = null;
return rValue;
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException)pae.getException();
}
}
public synchronized final String clearJVMProperty(String key) {
if (!key.startsWith("gemfirexd.") || !key.startsWith("derby."))
return PBclearJVMProperty(key);
try {
action = 8;
key3 = key;
String rValue = (String) java.security.AccessController.doPrivileged(this);
key3 = null;
return rValue;
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException)pae.getException();
}
}
//GemStone changes END
public synchronized final Thread getDaemonThread(Runnable task, String name, boolean setMinPriority) {
action = 4;
key3 = name;
this.task = task;
this.intValue = setMinPriority ? 1 : 0;
try {
Thread t = (Thread) java.security.AccessController.doPrivileged(this);
key3 = null;
task = null;
return t;
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException) pae.getException();
}
}
public synchronized final void setThreadPriority(int priority) {
action = 5;
intValue = priority;
try {
java.security.AccessController.doPrivileged(this);
} catch (java.security.PrivilegedActionException pae) {
throw (RuntimeException) pae.getException();
}
}
synchronized final InputStream applicationPropertiesStream()
throws IOException {
action = 6;
try {
// SECURITY PERMISSION - OP3
return (InputStream) java.security.AccessController.doPrivileged(this);
}
catch (java.security.PrivilegedActionException pae)
{
throw (IOException) pae.getException();
}
}
public synchronized final Object run() throws IOException {
switch (action) {
case 0:
case 1:
// SECURITY PERMISSION - OP2, OP2a, OP2b
return Boolean.valueOf(PBinitialize(action == 0));
case 2:
// SECURITY PERMISSION - IP1
return super.getDefaultModuleProperties();
case 3:
// SECURITY PERMISSION - OP1
return PBgetJVMProperty(key3);
case 4:
return super.getDaemonThread(task, key3, intValue != 0);
case 5:
super.setThreadPriority(intValue);
return null;
case 6:
// SECURITY PERMISSION - OP3
return PBapplicationPropertiesStream(this);
// GemStone changes BEGIN
case 7:
// SECURITY PERMISSION - OP1
return PBsetJVMProperty(key3, value3);
case 8:
// SECURITY PERMISSION - OP1
return PBclearJVMProperty(key3);
// GemStone changes END
default:
return null;
}
}
public final ProductVersionHolder getEngineVersion() {
return engineVersion;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.indexing;
import com.intellij.openapi.vfs.InvalidVirtualFileAccessException;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.util.containers.ConcurrentHashMap;
import com.intellij.util.io.DataInputOutputUtil;
import gnu.trove.TObjectLongHashMap;
import gnu.trove.TObjectLongProcedure;
import org.jetbrains.annotations.Nullable;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
/**
* @author Eugene Zhuravlev
* Date: Dec 25, 2007
*/
public class IndexingStamp {
private IndexingStamp() {
}
/**
* The class is meant to be accessed from synchronized block only
*/
private static class Timestamps {
private static final FileAttribute PERSISTENCE = new FileAttribute("__index_stamps__", 1, false);
private TObjectLongHashMap<ID<?, ?>> myIndexStamps;
private boolean myIsDirty = false;
private Timestamps(@Nullable DataInputStream stream) throws IOException {
if (stream != null) {
try {
long dominatingIndexStamp = DataInputOutputUtil.readTIME(stream);
while(stream.available() > 0) {
ID<?, ?> id = ID.findById(DataInputOutputUtil.readINT(stream));
if (id != null) {
long stamp = IndexInfrastructure.getIndexCreationStamp(id);
if (myIndexStamps == null) myIndexStamps = new TObjectLongHashMap<ID<?, ?>>(5, 0.98f);
if (stamp <= dominatingIndexStamp) myIndexStamps.put(id, stamp);
}
}
}
finally {
stream.close();
}
}
}
private void writeToStream(final DataOutputStream stream) throws IOException {
if (myIndexStamps != null && !myIndexStamps.isEmpty()) {
final long[] dominatingIndexStamp = new long[1];
myIndexStamps.forEachEntry(
new TObjectLongProcedure<ID<?, ?>>() {
@Override
public boolean execute(ID<?, ?> a, long b) {
dominatingIndexStamp[0] = Math.max(dominatingIndexStamp[0], b);
return true;
}
}
);
DataInputOutputUtil.writeTIME(stream, dominatingIndexStamp[0]);
myIndexStamps.forEachEntry(new TObjectLongProcedure<ID<?, ?>>() {
@Override
public boolean execute(final ID<?, ?> id, final long timestamp) {
try {
DataInputOutputUtil.writeINT(stream, id.getUniqueId());
return true;
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
});
} else {
DataInputOutputUtil.writeTIME(stream, DataInputOutputUtil.timeBase);
}
}
public long get(ID<?, ?> id) {
return myIndexStamps != null? myIndexStamps.get(id) : 0L;
}
public void set(ID<?, ?> id, long tmst) {
try {
if (tmst < 0) {
if (myIndexStamps == null) return;
myIndexStamps.remove(id);
return;
}
if (myIndexStamps == null) myIndexStamps = new TObjectLongHashMap<ID<?, ?>>(5, 0.98f);
myIndexStamps.put(id, tmst);
}
finally {
myIsDirty = true;
}
}
public boolean isDirty() {
return myIsDirty;
}
}
private static final ConcurrentHashMap<VirtualFile, Timestamps> myTimestampsCache = new ConcurrentHashMap<VirtualFile, Timestamps>();
private static final int CAPACITY = 100;
private static final ArrayBlockingQueue<VirtualFile> myFinishedFiles = new ArrayBlockingQueue<VirtualFile>(CAPACITY);
public static boolean isFileIndexed(VirtualFile file, ID<?, ?> indexName, final long indexCreationStamp) {
try {
return getIndexStamp(file, indexName) == indexCreationStamp;
}
catch (RuntimeException e) {
final Throwable cause = e.getCause();
if (!(cause instanceof IOException)) {
throw e; // in case of IO exceptions consider file unindexed
}
}
return false;
}
public static long getIndexStamp(VirtualFile file, ID<?, ?> indexName) {
synchronized (getStripedLock(file)) {
Timestamps stamp = createOrGetTimeStamp(file);
if (stamp != null) return stamp.get(indexName);
return 0;
}
}
private static Timestamps createOrGetTimeStamp(VirtualFile file) {
if (file instanceof NewVirtualFile && file.isValid()) {
Timestamps timestamps = myTimestampsCache.get(file);
if (timestamps == null) {
final DataInputStream stream = Timestamps.PERSISTENCE.readAttribute(file);
try {
timestamps = new Timestamps(stream);
}
catch (IOException e) {
throw new RuntimeException(e);
}
myTimestampsCache.put(file, timestamps);
}
return timestamps;
}
return null;
}
public static void update(final VirtualFile file, final ID<?, ?> indexName, final long indexCreationStamp) {
synchronized (getStripedLock(file)) {
try {
Timestamps stamp = createOrGetTimeStamp(file);
if (stamp != null) stamp.set(indexName, indexCreationStamp);
}
catch (InvalidVirtualFileAccessException ignored /*ok to ignore it here*/) {
}
}
}
public static void flushCaches() {
flushCache(null);
myTimestampsCache.clear();
}
public static void flushCache(@Nullable VirtualFile finishedFile) {
if (finishedFile == null || !myFinishedFiles.offer(finishedFile)) {
VirtualFile[] files = null;
synchronized (myFinishedFiles) {
int size = myFinishedFiles.size();
if ((finishedFile == null && size > 0) || size == CAPACITY) {
files = myFinishedFiles.toArray(new VirtualFile[size]);
myFinishedFiles.clear();
}
}
if (files != null) {
for(VirtualFile file:files) {
synchronized (getStripedLock(file)) {
Timestamps timestamp = myTimestampsCache.remove(file);
if (timestamp == null) continue;
try {
if (timestamp.isDirty() && file.isValid()) {
final DataOutputStream sink = Timestamps.PERSISTENCE.writeAttribute(file);
timestamp.writeToStream(sink);
sink.close();
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
if (finishedFile != null) myFinishedFiles.offer(finishedFile);
}
}
private static final Object[] ourLocks = new Object[16];
static {
for(int i = 0; i < ourLocks.length; ++i) ourLocks[i] = new Object();
}
private static Object getStripedLock(VirtualFile file) {
if (!(file instanceof NewVirtualFile)) return 0;
int id = ((NewVirtualFile)file).getId();
return ourLocks[(id & 0xFF) % ourLocks.length];
}
}
| |
package API.amazon.mws.feeds.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.w3c.dom.Element;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Type">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="Receiver"/>
* <enumeration value="Sender"/>
* </restriction>
* </simpleType>
* </element>
* <element name="Code" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="Message" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="Detail">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
* Generated by AWS Code Generator
* <p/>
* Wed Feb 18 13:28:59 PST 2009
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"type",
"code",
"message",
"detail"
})
@XmlRootElement(name = "Error")
public class Error {
@XmlElement(name = "Type", required = true)
protected String type;
@XmlElement(name = "Code", required = true)
protected String code;
@XmlElement(name = "Message", required = true)
protected String message;
@XmlElement(name = "Detail", required = true)
protected Error.Detail detail;
/**
* Default constructor
*
*/
public Error() {
super();
}
/**
* Value constructor
*
*/
public Error(final String type, final String code, final String message, final Error.Detail detail) {
this.type = type;
this.code = code;
this.message = message;
this.detail = detail;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
public boolean isSetType() {
return (this.type!= null);
}
/**
* Gets the value of the code property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCode() {
return code;
}
/**
* Sets the value of the code property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCode(String value) {
this.code = value;
}
public boolean isSetCode() {
return (this.code!= null);
}
/**
* Gets the value of the message property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMessage() {
return message;
}
/**
* Sets the value of the message property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMessage(String value) {
this.message = value;
}
public boolean isSetMessage() {
return (this.message!= null);
}
/**
* Gets the value of the detail property.
*
* @return
* possible object is
* {@link Error.Detail }
*
*/
public Error.Detail getDetail() {
return detail;
}
/**
* Sets the value of the detail property.
*
* @param value
* allowed object is
* {@link Error.Detail }
*
*/
public void setDetail(Error.Detail value) {
this.detail = value;
}
public boolean isSetDetail() {
return (this.detail!= null);
}
/**
* Sets the value of the Type property.
*
* @param value
* @return
* this instance
*/
public Error withType(String value) {
setType(value);
return this;
}
/**
* Sets the value of the Code property.
*
* @param value
* @return
* this instance
*/
public Error withCode(String value) {
setCode(value);
return this;
}
/**
* Sets the value of the Message property.
*
* @param value
* @return
* this instance
*/
public Error withMessage(String value) {
setMessage(value);
return this;
}
/**
* Sets the value of the Detail property.
*
* @param value
* @return
* this instance
*/
public Error withDetail(Error.Detail value) {
setDetail(value);
return this;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
* Generated by AWS Code Generator
* <p/>
* Wed Feb 18 13:28:59 PST 2009
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"any"
})
public static class Detail {
@XmlAnyElement(lax = true)
protected List<Object> any;
/**
* Default constructor
*
*/
public Detail() {
super();
}
/**
* Value constructor
*
*/
public Detail(final List<Object> any) {
this.any = any;
}
/**
* Gets the value of the any property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the any property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAny().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Element }
* {@link Object }
*
*
*/
public List<Object> getAny() {
if (any == null) {
any = new ArrayList<Object>();
}
return this.any;
}
public boolean isSetAny() {
return ((this.any!= null)&&(!this.any.isEmpty()));
}
public void unsetAny() {
this.any = null;
}
/**
* Sets the value of the Any property.
*
* @param values
* @return
* this instance
*/
public Error.Detail withAny(Object... values) {
for (Object value: values) {
getAny().add(value);
}
return this;
}
/**
* Sets the value of the any property.
*
* @param any
* allowed object is
* {@link Element }
* {@link Object }
*
*/
public void setAny(List<Object> any) {
this.any = any;
}
}
/**
*
* XML fragment representation of this object
*
* @return XML fragment for this object. Name for outer
* tag expected to be set by calling method. This fragment
* returns inner properties representation only
*/
protected String toXMLFragment() {
StringBuffer xml = new StringBuffer();
if (isSetType()) {
xml.append("<Type>");
xml.append(getType() + "");
xml.append("</Type>");
}
if (isSetCode()) {
xml.append("<Code>");
xml.append(escapeXML(getCode()));
xml.append("</Code>");
}
if (isSetMessage()) {
xml.append("<Message>");
xml.append(escapeXML(getMessage()));
xml.append("</Message>");
}
if (isSetDetail()) {
Error.Detail detail = getDetail();
xml.append("<Detail>");
xml.append(detail.toString());
xml.append("</Detail>");
}
return xml.toString();
}
/**
*
* Escape XML special characters
*/
private String escapeXML(String string) {
StringBuffer sb = new StringBuffer();
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '&':
sb.append("&");
break;
case '<':
sb.append("<");
break;
case '>':
sb.append(">");
break;
case '\'':
sb.append("'");
break;
case '"':
sb.append(""");
break;
default:
sb.append(c);
}
}
return sb.toString();
}
/**
*
* JSON fragment representation of this object
*
* @return JSON fragment for this object. Name for outer
* object expected to be set by calling method. This fragment
* returns inner properties representation only
*
*/
protected String toJSONFragment() {
StringBuffer json = new StringBuffer();
boolean first = true;
if (isSetType()) {
if (!first) json.append(", ");
json.append(quoteJSON("Type"));
json.append(" : ");
json.append(quoteJSON(getType() + ""));
first = false;
}
if (isSetCode()) {
if (!first) json.append(", ");
json.append(quoteJSON("Code"));
json.append(" : ");
json.append(quoteJSON(getCode()));
first = false;
}
if (isSetMessage()) {
if (!first) json.append(", ");
json.append(quoteJSON("Message"));
json.append(" : ");
json.append(quoteJSON(getMessage()));
first = false;
}
if (isSetDetail()) {
if (!first) json.append(", ");
json.append("\"Detail\" : {");
Error.Detail detail = getDetail();
json.append(detail.toString());
json.append("}");
first = false;
}
return json.toString();
}
/**
*
* Quote JSON string
*/
private String quoteJSON(String string) {
StringBuffer sb = new StringBuffer();
sb.append("\"");
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '"':
sb.append("\\\"");
break;
case '\\':
sb.append("\\\\");
break;
case '/':
sb.append("\\/");
break;
case '\b':
sb.append("\\b");
break;
case '\f':
sb.append("\\f");
break;
case '\n':
sb.append("\\n");
break;
case '\r':
sb.append("\\r");
break;
case '\t':
sb.append("\\t");
break;
default:
if (c < ' ') {
sb.append("\\u" + String.format("%03x", Integer.valueOf(c)));
} else {
sb.append(c);
}
}
}
sb.append("\"");
return sb.toString();
}
}
| |
/*
* Copyright 2014 Click Travel Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.clicktravel.infrastructure.persistence.inmemory.database;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import com.clicktravel.cheddar.infrastructure.persistence.database.*;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.DatabaseSchemaHolder;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.ItemConfiguration;
import com.clicktravel.cheddar.infrastructure.persistence.database.configuration.UniqueConstraint;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.ItemConstraintViolationException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.NonExistentItemException;
import com.clicktravel.cheddar.infrastructure.persistence.database.exception.handler.PersistenceExceptionHandler;
import com.clicktravel.cheddar.infrastructure.persistence.database.query.*;
import com.clicktravel.infrastructure.inmemory.Resettable;
import com.clicktravel.infrastructure.persistence.inmemory.SerializedItem;
public class InMemoryDatabaseTemplate extends AbstractDatabaseTemplate implements Resettable {
private static final AtomicLong ATOMIC_COUNTER = new AtomicLong();
private final Map<String, Map<ItemId, SerializedItem>> items = new ConcurrentHashMap<>();
private final Map<String, Map<String, ItemId>> uniqueConstraints = new ConcurrentHashMap<>();
private final Map<Class<? extends Item>, ItemConfiguration> itemConfigurations = new ConcurrentHashMap<>();
public InMemoryDatabaseTemplate(final DatabaseSchemaHolder databaseSchemaHolder) {
for (final ItemConfiguration itemConfiguration : databaseSchemaHolder.itemConfigurations()) {
itemConfigurations.put(itemConfiguration.itemClass(), itemConfiguration);
final String tableName = itemConfiguration.tableName();
items.put(tableName, new HashMap<ItemId, SerializedItem>());
for (final UniqueConstraint uniqueConstraint : itemConfiguration.uniqueConstraints()) {
uniqueConstraints.put(newUniqueConstraintKey(tableName, uniqueConstraint.propertyName()),
new HashMap<String, ItemId>());
}
}
}
@Override
public <T extends Item> T read(final ItemId itemId, final Class<T> itemClass) throws NonExistentItemException {
final SerializedItem serializedItem = getItemMap(getItemTableName(itemClass)).get(itemId);
if (serializedItem == null) {
throw new NonExistentItemException("Item with identifier [" + itemId.value() + "] did not exist");
}
final T item = serializedItem.getEntity(itemClass);
if (itemClass.isAssignableFrom(item.getClass())) {
return item;
}
throw new NonExistentItemException("Item with identifier [" + itemId.value() + "] did not exist");
}
@Override
public <T extends Item> T create(final T item,
final PersistenceExceptionHandler<?>... persistenceExceptionHandlers) {
final ItemId itemId = getItemId(item);
final String tableName = getItemTableName(item.getClass());
final SerializedItem oldSerializedItem = getItemMap(tableName).get(itemId);
if (oldSerializedItem != null) {
throw new ItemConstraintViolationException(itemId.value(),
"Item already exist with identifier in [" + item.getClass() + "] repository");
}
createUniqueConstraints(item);
item.setVersion(1L);
getItemMap(tableName).put(itemId, getSerializedItem(itemId.value(), item));
return item;
}
@SuppressWarnings("unchecked")
@Override
public <T extends Item> T update(final T item,
final PersistenceExceptionHandler<?>... persistenceExceptionHandlers) {
final ItemId itemId = getItemId(item);
final Class<? extends Item> itemType = item.getClass();
final String tableName = getItemTableName(itemType);
final SerializedItem oldSerializedItem = getItemMap(tableName).get(itemId);
if (oldSerializedItem == null) {
return create(item);
}
final T oldItem = (T) oldSerializedItem.getEntity(item.getClass());
if (!item.getVersion().equals(oldItem.getVersion())) {
throw new IllegalAccessError(
"Expected version [" + item.getVersion() + "] but was [" + oldItem.getVersion() + "]");
}
deleteUniqueConstraints(oldItem);
try {
createUniqueConstraints(item);
} catch (final ItemConstraintViolationException e) {
createUniqueConstraints(oldItem);
throw e;
}
item.setVersion(item.getVersion() + 1);
getItemMap(tableName).put(itemId, getSerializedItem(itemId.value(), item));
return item;
}
@Override
public void delete(final Item item, final PersistenceExceptionHandler<?>... persistenceExceptionHandlers) {
if (item != null) {
getItemMap(getItemTableName(item.getClass())).remove(getItemId(item));
}
deleteUniqueConstraints(item);
}
private String uniqueConstraintPropertyValue(final Object propertyValue) {
return String.valueOf(propertyValue).toUpperCase();
}
private void createUniqueConstraints(final Item item) {
final Class<? extends Item> itemClass = item.getClass();
final String tableName = getItemTableName(itemClass);
final Collection<PropertyDescriptor> uniqueConstraintProperties = getUniqueConstraintProperties(itemClass);
final Map<String, String> newConstraints = new HashMap<>();
final ItemId itemId = getItemId(item);
for (final PropertyDescriptor propertyDescriptor : uniqueConstraintProperties) {
final String propertyName = propertyDescriptor.getName();
final String uniqueConstraintKey = newUniqueConstraintKey(tableName, propertyName);
final Map<String, ItemId> uniqueValues = uniqueConstraints.get(uniqueConstraintKey);
Object propertyValue = null;
try {
propertyValue = propertyDescriptor.getReadMethod().invoke(item);
} catch (final Exception e) {
throw new IllegalStateException("Could not invoke read method", e);
}
if (propertyValue != null) {
final String uniqueConstraintPropertyValue = uniqueConstraintPropertyValue(propertyValue);
final ItemId existingItemId = uniqueValues.get(uniqueConstraintPropertyValue);
if (existingItemId != null) {
throw new ItemConstraintViolationException(propertyName, "Already in use");
}
newConstraints.put(uniqueConstraintKey, uniqueConstraintPropertyValue);
}
}
for (final Entry<String, String> entry : newConstraints.entrySet()) {
final String uniqueConstraintKey = entry.getKey();
final String uniqueConstraintPropertyValue = entry.getValue();
uniqueConstraints.get(uniqueConstraintKey).put(uniqueConstraintPropertyValue, itemId);
}
}
private void deleteUniqueConstraints(final Item item) {
final Class<? extends Item> itemClass = item.getClass();
final String tableName = getItemTableName(itemClass);
final Collection<PropertyDescriptor> uniqueConstraintProperties = getUniqueConstraintProperties(itemClass);
for (final PropertyDescriptor propertyDescriptor : uniqueConstraintProperties) {
final String uniqueConstraintKey = newUniqueConstraintKey(tableName, propertyDescriptor.getName());
final Map<String, ItemId> uniqueValues = uniqueConstraints.get(uniqueConstraintKey);
Object propertyValue = null;
try {
propertyValue = propertyDescriptor.getReadMethod().invoke(item);
} catch (final Exception e) {
throw new IllegalStateException("Could not invoke read method", e);
}
if (propertyValue != null) {
final String uniqueConstraintPropertyValue = uniqueConstraintPropertyValue(propertyValue);
final ItemId itemId = uniqueValues.get(uniqueConstraintPropertyValue);
if (itemId.equals(getItemId(item))) {
uniqueConstraints.get(uniqueConstraintKey).remove(uniqueConstraintPropertyValue);
}
}
}
}
/**
* Checks the presence of a matching unique constraint for the given item property
*
* @param item The Item for which to check unique constraints
* @param propertyName The name of the property for which the unique constraint should be checked
* @param propertyValue The value of the property which needs to be checked for presence of unique constraint
* @return
*/
boolean hasMatchingUniqueConstraint(final Item item, final String propertyName, final String propertyValue) {
final Class<? extends Item> itemClass = item.getClass();
final String tableName = getItemTableName(itemClass);
final Map<String, ItemId> uniqueConstraintsForProperty = uniqueConstraints
.get(newUniqueConstraintKey(tableName, propertyName));
final ItemId itemId = getItemId(item);
for (final Entry<String, ItemId> entry : uniqueConstraintsForProperty.entrySet()) {
if (entry.getValue().equals(itemId)) {
return entry.getKey().equals(propertyValue.toUpperCase());
}
}
return false;
}
@Override
public <T extends Item> Collection<T> fetch(final Query query, final Class<T> itemClass) {
if (query instanceof AttributeQuery) {
return executeQuery((AttributeQuery) query, itemClass);
} else if (query instanceof KeySetQuery) {
return executeQuery((KeySetQuery) query, itemClass);
} else {
throw new UnsupportedQueryException(query.getClass());
}
}
@Override
public GeneratedKeyHolder generateKeys(final SequenceKeyGenerator sequenceKeyGenerator) {
final Collection<Long> keys = new ArrayList<>();
final int keyCount = sequenceKeyGenerator.keyCount();
final long startingKey = ATOMIC_COUNTER.getAndAdd(keyCount) + 1;
for (int i = 0; i < keyCount; i++) {
keys.add(startingKey + i);
}
return new GeneratedKeyHolder(keys);
}
private static String newUniqueConstraintKey(final String tableName, final String propertyName) {
return tableName + ":" + propertyName;
}
private ItemId getItemId(final Item item) {
final ItemConfiguration itemConfiguration = getItemConfiguration(item.getClass());
return itemConfiguration.getItemId(item);
}
private ItemConfiguration getItemConfiguration(final Class<? extends Item> itemClass) {
final ItemConfiguration itemConfiguration = itemConfigurations.get(itemClass);
if (itemConfiguration == null) {
throw new IllegalStateException("No ItemConfiguration for " + itemClass);
}
return itemConfiguration;
}
private String getItemTableName(final Class<? extends Item> itemClass) {
final ItemConfiguration itemConfiguration = itemConfigurations.get(itemClass);
if (itemConfiguration == null) {
throw new IllegalStateException("No ItemConfiguration for " + itemClass);
}
return itemConfiguration.tableName();
}
private <T extends Item> Collection<PropertyDescriptor> getUniqueConstraintProperties(
final Class<? extends Item> itemClass) {
final Collection<PropertyDescriptor> contraintPropertyDescriptors = new HashSet<>();
for (final UniqueConstraint uniqueConstraint : getItemConfiguration(itemClass).uniqueConstraints()) {
contraintPropertyDescriptors.add(uniqueConstraint.propertyDescriptor());
}
return contraintPropertyDescriptors;
}
private SerializedItem getSerializedItem(final String itemId, final Object repositoryItem) {
return new SerializedItem(repositoryItem);
}
private <T extends Item> Collection<T> executeQuery(final KeySetQuery query, final Class<T> itemClass) {
final Map<ItemId, T> allItems = getAllItems(itemClass);
final Collection<T> matches = new ArrayList<>();
for (final Entry<ItemId, T> entry : allItems.entrySet()) {
if (query.itemIds().contains(entry.getKey())) {
matches.add(entry.getValue());
}
}
return matches;
}
private <T extends Item> Collection<T> executeQuery(final AttributeQuery query, final Class<T> itemClass) {
final Map<ItemId, T> allItems = getAllItems(itemClass);
final Collection<T> matches = new ArrayList<>();
for (final T item : allItems.values()) {
if (itemMatches(query, item)) {
matches.add(item);
}
}
return matches;
}
private <T extends Item> boolean itemMatches(final AttributeQuery attributeQuery, final T item) {
final String attribute = attributeQuery.getAttributeName();
final Condition condition = attributeQuery.getCondition();
if (CompoundAttributeQuery.class.isAssignableFrom(attributeQuery.getClass())) {
final CompoundAttributeQuery compoundAttributeQuery = (CompoundAttributeQuery) attributeQuery;
return propertyMatches(item, attribute, condition)
&& propertyMatches(item, compoundAttributeQuery.getSupportingAttributeName(),
compoundAttributeQuery.getSupportingCondition());
}
return propertyMatches(item, attribute, condition);
}
private <T extends Item> boolean propertyMatches(final T item, final String attribute, final Condition condition) {
try {
final Method getter = new PropertyDescriptor(attribute, item.getClass()).getReadMethod();
final Object itemPropertyValue = getter.invoke(item);
final Class<?> itemPropertyType = getter.getReturnType();
final Set<String> values = condition.getValues();
String singleValue = null;
if (values != null && !values.isEmpty()) {
singleValue = values.iterator().next();
}
final boolean isSingleItemProperty = !Collection.class.isAssignableFrom(itemPropertyType);
String singleItemPropertyValue = null;
if (isSingleItemProperty) {
singleItemPropertyValue = String.valueOf(itemPropertyValue);
}
boolean matches = false;
switch (condition.getComparisonOperator()) {
case NULL:
if (itemPropertyValue == null) {
matches = true;
}
break;
case NOT_NULL:
if (itemPropertyValue != null) {
matches = true;
}
break;
case LESS_THAN_OR_EQUALS:
if (isSingleItemProperty && singleItemPropertyValue.compareTo(singleValue) <= 0) {
matches = true;
}
break;
case GREATER_THAN_OR_EQUALS:
if (isSingleItemProperty && singleItemPropertyValue.compareTo(singleValue) >= 0) {
matches = true;
}
break;
case EQUALS:
if (isSingleItemProperty && singleItemPropertyValue.equals(singleValue)) {
matches = true;
} else if (values.equals(itemPropertyValue)) {
matches = true;
}
break;
default:
break;
}
return matches;
} catch (final Exception e) {
throw new IllegalStateException(
"No getter for property [" + attribute + "] on class: [" + item.getClass() + "]");
}
}
private <T extends Item> Map<ItemId, T> getAllItems(final Class<T> itemClass) {
final Map<ItemId, T> allItems = new HashMap<>();
final String tableName = getItemTableName(itemClass);
for (final SerializedItem serializedItem : getItemMap(tableName).values()) {
final T item = serializedItem.getEntity(itemClass);
if (itemClass.isAssignableFrom(item.getClass())) {
allItems.put(getItemId(item), item);
}
}
return allItems;
}
private Map<ItemId, SerializedItem> getItemMap(final String tableName) {
final Map<ItemId, SerializedItem> itemMap = items.get(tableName);
if (itemMap == null) {
throw new IllegalStateException("Unknown table: " + tableName);
}
return itemMap;
}
@Override
public void reset() {
for (final Entry<String, Map<ItemId, SerializedItem>> entry : items.entrySet()) {
entry.setValue(new HashMap<ItemId, SerializedItem>());
}
for (final Entry<String, Map<String, ItemId>> entry : uniqueConstraints.entrySet()) {
entry.setValue(new HashMap<String, ItemId>());
}
}
}
| |
/*
* Copyright (C) 2018 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.index;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Map;
import java.util.NoSuchElementException;
import org.joda.beans.Bean;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.MetaProperty;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.ImmutableDefaults;
import org.joda.beans.gen.ImmutablePreBuild;
import org.joda.beans.gen.ImmutableValidator;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.Resolvable;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.index.OvernightIndex;
import com.opengamma.strata.basics.value.Rounding;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.product.SecuritizedProduct;
import com.opengamma.strata.product.SecurityId;
import com.opengamma.strata.product.rate.OvernightRateComputation;
import com.opengamma.strata.product.swap.OvernightAccrualMethod;
/**
* A futures contract based on an Overnight index.
* <p>
* An Overnight rate future is a financial instrument that is based on the future value of
* an Overnight index interest rate. The profit or loss of an Overnight rate future is settled daily.
* This class represents the structure of a single futures contract.
* <p>
* For example, the widely traded "30-Day Federal Funds futures contract" has a notional
* of 5 million USD, is based on the US Federal Funds Effective Rate 'USD-FED-FUND',
* expiring the last business day of each month.
*
* <h4>Price</h4>
* The price of an Overnight rate future is based on the interest rate of the underlying index.
* It is defined as {@code (100 - percentRate)}.
* <p>
* Strata uses <i>decimal prices</i> for Overnight rate futures in the trade model, pricers and market data.
* The decimal price is based on the decimal rate equivalent to the percentage.
* For example, a price of 99.32 implies an interest rate of 0.68% which is represented in Strata by 0.9932.
*/
@BeanDefinition(constructorScope = "package")
public final class OvernightFuture
implements SecuritizedProduct, Resolvable<ResolvedOvernightFuture>, ImmutableBean, Serializable {
/**
* The security identifier.
* <p>
* This identifier uniquely identifies the security within the system.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final SecurityId securityId;
/**
* The currency that the future is traded in, defaulted from the index if not set.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final Currency currency;
/**
* The notional amount.
* <p>
* This is the full notional of the deposit, such as 5 million dollars.
* The notional expressed here must be positive.
* The currency of the notional is specified by {@code currency}.
*/
@PropertyDefinition(validate = "ArgChecker.notNegativeOrZero")
private final double notional;
/**
* The accrual factor, defaulted from the index if not set.
* <p>
* This is the year fraction of the contract, typically 1/12 for a 30-day future.
* As such, it is often unrelated to the day count of the index.
* The year fraction must be positive.
*/
@PropertyDefinition(validate = "ArgChecker.notNegativeOrZero")
private final double accrualFactor;
/**
* The last date of trading.
* <p>
* This must be a valid business day on the fixing calendar of {@code index}.
* For example, the last trade date is often the last business day of the month.
*/
@PropertyDefinition(validate = "notNull")
private final LocalDate lastTradeDate;
/**
* The first date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
*/
@PropertyDefinition(validate = "notNull")
private final LocalDate startDate;
/**
* The last date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
*/
@PropertyDefinition(validate = "notNull")
private final LocalDate endDate;
/**
* The underlying Overnight index.
* <p>
* The future is based on this index.
* It will be a well known market index such as 'USD-FED-FUND'.
*/
@PropertyDefinition(validate = "notNull")
private final OvernightIndex index;
/**
* The method of accruing Overnight interest.
* <p>
* The average rate is calculated based on this method over the period between {@code startDate} and {@code endDate}.
*/
@PropertyDefinition(validate = "notNull")
private final OvernightAccrualMethod accrualMethod;
/**
* The definition of how to round the futures price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 for a 0.2875% rate is
* represented as 0.997125 which has 6 decimal places.
*/
@PropertyDefinition(validate = "notNull")
private final Rounding rounding;
//-------------------------------------------------------------------------
@ImmutableValidator
private void validate() {
ArgChecker.inOrderNotEqual(startDate, endDate, "startDate", "endDate");
}
//-------------------------------------------------------------------------
@ImmutableDefaults
private static void applyDefaults(Builder builder) {
builder.rounding(Rounding.none());
}
@ImmutablePreBuild
private static void preBuild(Builder builder) {
if (builder.index != null) {
if (builder.currency == null) {
builder.currency = builder.index.getCurrency();
}
}
}
//-------------------------------------------------------------------------
@Override
public ResolvedOvernightFuture resolve(ReferenceData refData) {
OvernightRateComputation overnightAveragedRate = OvernightRateComputation.of(
index, startDate, endDate, 0, accrualMethod, refData);
return ResolvedOvernightFuture.builder()
.securityId(securityId)
.accrualFactor(accrualFactor)
.currency(currency)
.notional(notional)
.lastTradeDate(lastTradeDate)
.overnightRate(overnightAveragedRate)
.rounding(rounding)
.build();
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code OvernightFuture}.
* @return the meta-bean, not null
*/
public static OvernightFuture.Meta meta() {
return OvernightFuture.Meta.INSTANCE;
}
static {
MetaBean.register(OvernightFuture.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static OvernightFuture.Builder builder() {
return new OvernightFuture.Builder();
}
/**
* Creates an instance.
* @param securityId the value of the property, not null
* @param currency the value of the property, not null
* @param notional the value of the property
* @param accrualFactor the value of the property
* @param lastTradeDate the value of the property, not null
* @param startDate the value of the property, not null
* @param endDate the value of the property, not null
* @param index the value of the property, not null
* @param accrualMethod the value of the property, not null
* @param rounding the value of the property, not null
*/
OvernightFuture(
SecurityId securityId,
Currency currency,
double notional,
double accrualFactor,
LocalDate lastTradeDate,
LocalDate startDate,
LocalDate endDate,
OvernightIndex index,
OvernightAccrualMethod accrualMethod,
Rounding rounding) {
JodaBeanUtils.notNull(securityId, "securityId");
JodaBeanUtils.notNull(currency, "currency");
ArgChecker.notNegativeOrZero(notional, "notional");
ArgChecker.notNegativeOrZero(accrualFactor, "accrualFactor");
JodaBeanUtils.notNull(lastTradeDate, "lastTradeDate");
JodaBeanUtils.notNull(startDate, "startDate");
JodaBeanUtils.notNull(endDate, "endDate");
JodaBeanUtils.notNull(index, "index");
JodaBeanUtils.notNull(accrualMethod, "accrualMethod");
JodaBeanUtils.notNull(rounding, "rounding");
this.securityId = securityId;
this.currency = currency;
this.notional = notional;
this.accrualFactor = accrualFactor;
this.lastTradeDate = lastTradeDate;
this.startDate = startDate;
this.endDate = endDate;
this.index = index;
this.accrualMethod = accrualMethod;
this.rounding = rounding;
validate();
}
@Override
public OvernightFuture.Meta metaBean() {
return OvernightFuture.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the security identifier.
* <p>
* This identifier uniquely identifies the security within the system.
* @return the value of the property, not null
*/
@Override
public SecurityId getSecurityId() {
return securityId;
}
//-----------------------------------------------------------------------
/**
* Gets the currency that the future is traded in, defaulted from the index if not set.
* @return the value of the property, not null
*/
@Override
public Currency getCurrency() {
return currency;
}
//-----------------------------------------------------------------------
/**
* Gets the notional amount.
* <p>
* This is the full notional of the deposit, such as 5 million dollars.
* The notional expressed here must be positive.
* The currency of the notional is specified by {@code currency}.
* @return the value of the property
*/
public double getNotional() {
return notional;
}
//-----------------------------------------------------------------------
/**
* Gets the accrual factor, defaulted from the index if not set.
* <p>
* This is the year fraction of the contract, typically 1/12 for a 30-day future.
* As such, it is often unrelated to the day count of the index.
* The year fraction must be positive.
* @return the value of the property
*/
public double getAccrualFactor() {
return accrualFactor;
}
//-----------------------------------------------------------------------
/**
* Gets the last date of trading.
* <p>
* This must be a valid business day on the fixing calendar of {@code index}.
* For example, the last trade date is often the last business day of the month.
* @return the value of the property, not null
*/
public LocalDate getLastTradeDate() {
return lastTradeDate;
}
//-----------------------------------------------------------------------
/**
* Gets the first date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
* @return the value of the property, not null
*/
public LocalDate getStartDate() {
return startDate;
}
//-----------------------------------------------------------------------
/**
* Gets the last date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
* @return the value of the property, not null
*/
public LocalDate getEndDate() {
return endDate;
}
//-----------------------------------------------------------------------
/**
* Gets the underlying Overnight index.
* <p>
* The future is based on this index.
* It will be a well known market index such as 'USD-FED-FUND'.
* @return the value of the property, not null
*/
public OvernightIndex getIndex() {
return index;
}
//-----------------------------------------------------------------------
/**
* Gets the method of accruing Overnight interest.
* <p>
* The average rate is calculated based on this method over the period between {@code startDate} and {@code endDate}.
* @return the value of the property, not null
*/
public OvernightAccrualMethod getAccrualMethod() {
return accrualMethod;
}
//-----------------------------------------------------------------------
/**
* Gets the definition of how to round the futures price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 for a 0.2875% rate is
* represented as 0.997125 which has 6 decimal places.
* @return the value of the property, not null
*/
public Rounding getRounding() {
return rounding;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
OvernightFuture other = (OvernightFuture) obj;
return JodaBeanUtils.equal(securityId, other.securityId) &&
JodaBeanUtils.equal(currency, other.currency) &&
JodaBeanUtils.equal(notional, other.notional) &&
JodaBeanUtils.equal(accrualFactor, other.accrualFactor) &&
JodaBeanUtils.equal(lastTradeDate, other.lastTradeDate) &&
JodaBeanUtils.equal(startDate, other.startDate) &&
JodaBeanUtils.equal(endDate, other.endDate) &&
JodaBeanUtils.equal(index, other.index) &&
JodaBeanUtils.equal(accrualMethod, other.accrualMethod) &&
JodaBeanUtils.equal(rounding, other.rounding);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(securityId);
hash = hash * 31 + JodaBeanUtils.hashCode(currency);
hash = hash * 31 + JodaBeanUtils.hashCode(notional);
hash = hash * 31 + JodaBeanUtils.hashCode(accrualFactor);
hash = hash * 31 + JodaBeanUtils.hashCode(lastTradeDate);
hash = hash * 31 + JodaBeanUtils.hashCode(startDate);
hash = hash * 31 + JodaBeanUtils.hashCode(endDate);
hash = hash * 31 + JodaBeanUtils.hashCode(index);
hash = hash * 31 + JodaBeanUtils.hashCode(accrualMethod);
hash = hash * 31 + JodaBeanUtils.hashCode(rounding);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(352);
buf.append("OvernightFuture{");
buf.append("securityId").append('=').append(JodaBeanUtils.toString(securityId)).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' ');
buf.append("notional").append('=').append(JodaBeanUtils.toString(notional)).append(',').append(' ');
buf.append("accrualFactor").append('=').append(JodaBeanUtils.toString(accrualFactor)).append(',').append(' ');
buf.append("lastTradeDate").append('=').append(JodaBeanUtils.toString(lastTradeDate)).append(',').append(' ');
buf.append("startDate").append('=').append(JodaBeanUtils.toString(startDate)).append(',').append(' ');
buf.append("endDate").append('=').append(JodaBeanUtils.toString(endDate)).append(',').append(' ');
buf.append("index").append('=').append(JodaBeanUtils.toString(index)).append(',').append(' ');
buf.append("accrualMethod").append('=').append(JodaBeanUtils.toString(accrualMethod)).append(',').append(' ');
buf.append("rounding").append('=').append(JodaBeanUtils.toString(rounding));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code OvernightFuture}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code securityId} property.
*/
private final MetaProperty<SecurityId> securityId = DirectMetaProperty.ofImmutable(
this, "securityId", OvernightFuture.class, SecurityId.class);
/**
* The meta-property for the {@code currency} property.
*/
private final MetaProperty<Currency> currency = DirectMetaProperty.ofImmutable(
this, "currency", OvernightFuture.class, Currency.class);
/**
* The meta-property for the {@code notional} property.
*/
private final MetaProperty<Double> notional = DirectMetaProperty.ofImmutable(
this, "notional", OvernightFuture.class, Double.TYPE);
/**
* The meta-property for the {@code accrualFactor} property.
*/
private final MetaProperty<Double> accrualFactor = DirectMetaProperty.ofImmutable(
this, "accrualFactor", OvernightFuture.class, Double.TYPE);
/**
* The meta-property for the {@code lastTradeDate} property.
*/
private final MetaProperty<LocalDate> lastTradeDate = DirectMetaProperty.ofImmutable(
this, "lastTradeDate", OvernightFuture.class, LocalDate.class);
/**
* The meta-property for the {@code startDate} property.
*/
private final MetaProperty<LocalDate> startDate = DirectMetaProperty.ofImmutable(
this, "startDate", OvernightFuture.class, LocalDate.class);
/**
* The meta-property for the {@code endDate} property.
*/
private final MetaProperty<LocalDate> endDate = DirectMetaProperty.ofImmutable(
this, "endDate", OvernightFuture.class, LocalDate.class);
/**
* The meta-property for the {@code index} property.
*/
private final MetaProperty<OvernightIndex> index = DirectMetaProperty.ofImmutable(
this, "index", OvernightFuture.class, OvernightIndex.class);
/**
* The meta-property for the {@code accrualMethod} property.
*/
private final MetaProperty<OvernightAccrualMethod> accrualMethod = DirectMetaProperty.ofImmutable(
this, "accrualMethod", OvernightFuture.class, OvernightAccrualMethod.class);
/**
* The meta-property for the {@code rounding} property.
*/
private final MetaProperty<Rounding> rounding = DirectMetaProperty.ofImmutable(
this, "rounding", OvernightFuture.class, Rounding.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"securityId",
"currency",
"notional",
"accrualFactor",
"lastTradeDate",
"startDate",
"endDate",
"index",
"accrualMethod",
"rounding");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 1574023291: // securityId
return securityId;
case 575402001: // currency
return currency;
case 1585636160: // notional
return notional;
case -1540322338: // accrualFactor
return accrualFactor;
case -1041950404: // lastTradeDate
return lastTradeDate;
case -2129778896: // startDate
return startDate;
case -1607727319: // endDate
return endDate;
case 100346066: // index
return index;
case -1335729296: // accrualMethod
return accrualMethod;
case -142444: // rounding
return rounding;
}
return super.metaPropertyGet(propertyName);
}
@Override
public OvernightFuture.Builder builder() {
return new OvernightFuture.Builder();
}
@Override
public Class<? extends OvernightFuture> beanType() {
return OvernightFuture.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code securityId} property.
* @return the meta-property, not null
*/
public MetaProperty<SecurityId> securityId() {
return securityId;
}
/**
* The meta-property for the {@code currency} property.
* @return the meta-property, not null
*/
public MetaProperty<Currency> currency() {
return currency;
}
/**
* The meta-property for the {@code notional} property.
* @return the meta-property, not null
*/
public MetaProperty<Double> notional() {
return notional;
}
/**
* The meta-property for the {@code accrualFactor} property.
* @return the meta-property, not null
*/
public MetaProperty<Double> accrualFactor() {
return accrualFactor;
}
/**
* The meta-property for the {@code lastTradeDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> lastTradeDate() {
return lastTradeDate;
}
/**
* The meta-property for the {@code startDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> startDate() {
return startDate;
}
/**
* The meta-property for the {@code endDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> endDate() {
return endDate;
}
/**
* The meta-property for the {@code index} property.
* @return the meta-property, not null
*/
public MetaProperty<OvernightIndex> index() {
return index;
}
/**
* The meta-property for the {@code accrualMethod} property.
* @return the meta-property, not null
*/
public MetaProperty<OvernightAccrualMethod> accrualMethod() {
return accrualMethod;
}
/**
* The meta-property for the {@code rounding} property.
* @return the meta-property, not null
*/
public MetaProperty<Rounding> rounding() {
return rounding;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 1574023291: // securityId
return ((OvernightFuture) bean).getSecurityId();
case 575402001: // currency
return ((OvernightFuture) bean).getCurrency();
case 1585636160: // notional
return ((OvernightFuture) bean).getNotional();
case -1540322338: // accrualFactor
return ((OvernightFuture) bean).getAccrualFactor();
case -1041950404: // lastTradeDate
return ((OvernightFuture) bean).getLastTradeDate();
case -2129778896: // startDate
return ((OvernightFuture) bean).getStartDate();
case -1607727319: // endDate
return ((OvernightFuture) bean).getEndDate();
case 100346066: // index
return ((OvernightFuture) bean).getIndex();
case -1335729296: // accrualMethod
return ((OvernightFuture) bean).getAccrualMethod();
case -142444: // rounding
return ((OvernightFuture) bean).getRounding();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code OvernightFuture}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<OvernightFuture> {
private SecurityId securityId;
private Currency currency;
private double notional;
private double accrualFactor;
private LocalDate lastTradeDate;
private LocalDate startDate;
private LocalDate endDate;
private OvernightIndex index;
private OvernightAccrualMethod accrualMethod;
private Rounding rounding;
/**
* Restricted constructor.
*/
private Builder() {
applyDefaults(this);
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(OvernightFuture beanToCopy) {
this.securityId = beanToCopy.getSecurityId();
this.currency = beanToCopy.getCurrency();
this.notional = beanToCopy.getNotional();
this.accrualFactor = beanToCopy.getAccrualFactor();
this.lastTradeDate = beanToCopy.getLastTradeDate();
this.startDate = beanToCopy.getStartDate();
this.endDate = beanToCopy.getEndDate();
this.index = beanToCopy.getIndex();
this.accrualMethod = beanToCopy.getAccrualMethod();
this.rounding = beanToCopy.getRounding();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 1574023291: // securityId
return securityId;
case 575402001: // currency
return currency;
case 1585636160: // notional
return notional;
case -1540322338: // accrualFactor
return accrualFactor;
case -1041950404: // lastTradeDate
return lastTradeDate;
case -2129778896: // startDate
return startDate;
case -1607727319: // endDate
return endDate;
case 100346066: // index
return index;
case -1335729296: // accrualMethod
return accrualMethod;
case -142444: // rounding
return rounding;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 1574023291: // securityId
this.securityId = (SecurityId) newValue;
break;
case 575402001: // currency
this.currency = (Currency) newValue;
break;
case 1585636160: // notional
this.notional = (Double) newValue;
break;
case -1540322338: // accrualFactor
this.accrualFactor = (Double) newValue;
break;
case -1041950404: // lastTradeDate
this.lastTradeDate = (LocalDate) newValue;
break;
case -2129778896: // startDate
this.startDate = (LocalDate) newValue;
break;
case -1607727319: // endDate
this.endDate = (LocalDate) newValue;
break;
case 100346066: // index
this.index = (OvernightIndex) newValue;
break;
case -1335729296: // accrualMethod
this.accrualMethod = (OvernightAccrualMethod) newValue;
break;
case -142444: // rounding
this.rounding = (Rounding) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public OvernightFuture build() {
preBuild(this);
return new OvernightFuture(
securityId,
currency,
notional,
accrualFactor,
lastTradeDate,
startDate,
endDate,
index,
accrualMethod,
rounding);
}
//-----------------------------------------------------------------------
/**
* Sets the security identifier.
* <p>
* This identifier uniquely identifies the security within the system.
* @param securityId the new value, not null
* @return this, for chaining, not null
*/
public Builder securityId(SecurityId securityId) {
JodaBeanUtils.notNull(securityId, "securityId");
this.securityId = securityId;
return this;
}
/**
* Sets the currency that the future is traded in, defaulted from the index if not set.
* @param currency the new value, not null
* @return this, for chaining, not null
*/
public Builder currency(Currency currency) {
JodaBeanUtils.notNull(currency, "currency");
this.currency = currency;
return this;
}
/**
* Sets the notional amount.
* <p>
* This is the full notional of the deposit, such as 5 million dollars.
* The notional expressed here must be positive.
* The currency of the notional is specified by {@code currency}.
* @param notional the new value
* @return this, for chaining, not null
*/
public Builder notional(double notional) {
ArgChecker.notNegativeOrZero(notional, "notional");
this.notional = notional;
return this;
}
/**
* Sets the accrual factor, defaulted from the index if not set.
* <p>
* This is the year fraction of the contract, typically 1/12 for a 30-day future.
* As such, it is often unrelated to the day count of the index.
* The year fraction must be positive.
* @param accrualFactor the new value
* @return this, for chaining, not null
*/
public Builder accrualFactor(double accrualFactor) {
ArgChecker.notNegativeOrZero(accrualFactor, "accrualFactor");
this.accrualFactor = accrualFactor;
return this;
}
/**
* Sets the last date of trading.
* <p>
* This must be a valid business day on the fixing calendar of {@code index}.
* For example, the last trade date is often the last business day of the month.
* @param lastTradeDate the new value, not null
* @return this, for chaining, not null
*/
public Builder lastTradeDate(LocalDate lastTradeDate) {
JodaBeanUtils.notNull(lastTradeDate, "lastTradeDate");
this.lastTradeDate = lastTradeDate;
return this;
}
/**
* Sets the first date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
* @param startDate the new value, not null
* @return this, for chaining, not null
*/
public Builder startDate(LocalDate startDate) {
JodaBeanUtils.notNull(startDate, "startDate");
this.startDate = startDate;
return this;
}
/**
* Sets the last date of the rate calculation period.
* <p>
* This is not necessarily a valid business day on the fixing calendar of {@code index}.
* However, it will be adjusted in {@code OvernightRateComputation} if needed.
* @param endDate the new value, not null
* @return this, for chaining, not null
*/
public Builder endDate(LocalDate endDate) {
JodaBeanUtils.notNull(endDate, "endDate");
this.endDate = endDate;
return this;
}
/**
* Sets the underlying Overnight index.
* <p>
* The future is based on this index.
* It will be a well known market index such as 'USD-FED-FUND'.
* @param index the new value, not null
* @return this, for chaining, not null
*/
public Builder index(OvernightIndex index) {
JodaBeanUtils.notNull(index, "index");
this.index = index;
return this;
}
/**
* Sets the method of accruing Overnight interest.
* <p>
* The average rate is calculated based on this method over the period between {@code startDate} and {@code endDate}.
* @param accrualMethod the new value, not null
* @return this, for chaining, not null
*/
public Builder accrualMethod(OvernightAccrualMethod accrualMethod) {
JodaBeanUtils.notNull(accrualMethod, "accrualMethod");
this.accrualMethod = accrualMethod;
return this;
}
/**
* Sets the definition of how to round the futures price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 for a 0.2875% rate is
* represented as 0.997125 which has 6 decimal places.
* @param rounding the new value, not null
* @return this, for chaining, not null
*/
public Builder rounding(Rounding rounding) {
JodaBeanUtils.notNull(rounding, "rounding");
this.rounding = rounding;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(352);
buf.append("OvernightFuture.Builder{");
buf.append("securityId").append('=').append(JodaBeanUtils.toString(securityId)).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' ');
buf.append("notional").append('=').append(JodaBeanUtils.toString(notional)).append(',').append(' ');
buf.append("accrualFactor").append('=').append(JodaBeanUtils.toString(accrualFactor)).append(',').append(' ');
buf.append("lastTradeDate").append('=').append(JodaBeanUtils.toString(lastTradeDate)).append(',').append(' ');
buf.append("startDate").append('=').append(JodaBeanUtils.toString(startDate)).append(',').append(' ');
buf.append("endDate").append('=').append(JodaBeanUtils.toString(endDate)).append(',').append(' ');
buf.append("index").append('=').append(JodaBeanUtils.toString(index)).append(',').append(' ');
buf.append("accrualMethod").append('=').append(JodaBeanUtils.toString(accrualMethod)).append(',').append(' ');
buf.append("rounding").append('=').append(JodaBeanUtils.toString(rounding));
buf.append('}');
return buf.toString();
}
}
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.api.records.LogAggregationStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.server.api.ContainerLogAggregationPolicy;
import org.apache.hadoop.yarn.server.api.ContainerLogContext;
import org.apache.hadoop.yarn.server.api.ContainerType;
import org.apache.hadoop.yarn.server.api.protocolrecords.LogAggregationReport;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.Times;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
public class AppLogAggregatorImpl implements AppLogAggregator {
private static final Log LOG = LogFactory
.getLog(AppLogAggregatorImpl.class);
private static final int THREAD_SLEEP_TIME = 1000;
// This is temporary solution. The configuration will be deleted once
// we find a more scalable method to only write a single log file per LRS.
private static final String NM_LOG_AGGREGATION_NUM_LOG_FILES_SIZE_PER_APP
= YarnConfiguration.NM_PREFIX + "log-aggregation.num-log-files-per-app";
private static final int
DEFAULT_NM_LOG_AGGREGATION_NUM_LOG_FILES_SIZE_PER_APP = 30;
// This configuration is for debug and test purpose. By setting
// this configuration as true. We can break the lower bound of
// NM_LOG_AGGREGATION_ROLL_MONITORING_INTERVAL_SECONDS.
private static final String NM_LOG_AGGREGATION_DEBUG_ENABLED
= YarnConfiguration.NM_PREFIX + "log-aggregation.debug-enabled";
private static final boolean
DEFAULT_NM_LOG_AGGREGATION_DEBUG_ENABLED = false;
private static final long
NM_LOG_AGGREGATION_MIN_ROLL_MONITORING_INTERVAL_SECONDS = 3600;
private final LocalDirsHandlerService dirsHandler;
private final Dispatcher dispatcher;
private final ApplicationId appId;
private final String applicationId;
private boolean logAggregationDisabled = false;
private final Configuration conf;
private final DeletionService delService;
private final UserGroupInformation userUgi;
private final Path remoteNodeLogFileForApp;
private final Path remoteNodeTmpLogFileForApp;
private final BlockingQueue<ContainerId> pendingContainers;
private final AtomicBoolean appFinishing = new AtomicBoolean();
private final AtomicBoolean appAggregationFinished = new AtomicBoolean();
private final AtomicBoolean aborted = new AtomicBoolean();
private final Map<ApplicationAccessType, String> appAcls;
private final FileContext lfs;
private final LogAggregationContext logAggregationContext;
private final Context context;
private final int retentionSize;
private final long rollingMonitorInterval;
private final boolean logAggregationInRolling;
private final NodeId nodeId;
// This variable is only for testing
private final AtomicBoolean waiting = new AtomicBoolean(false);
private boolean renameTemporaryLogFileFailed = false;
private final Map<ContainerId, ContainerLogAggregator> containerLogAggregators =
new HashMap<ContainerId, ContainerLogAggregator>();
private final ContainerLogAggregationPolicy logAggPolicy;
public AppLogAggregatorImpl(Dispatcher dispatcher,
DeletionService deletionService, Configuration conf,
ApplicationId appId, UserGroupInformation userUgi, NodeId nodeId,
LocalDirsHandlerService dirsHandler, Path remoteNodeLogFileForApp,
Map<ApplicationAccessType, String> appAcls,
LogAggregationContext logAggregationContext, Context context,
FileContext lfs) {
this.dispatcher = dispatcher;
this.conf = conf;
this.delService = deletionService;
this.appId = appId;
this.applicationId = ConverterUtils.toString(appId);
this.userUgi = userUgi;
this.dirsHandler = dirsHandler;
this.remoteNodeLogFileForApp = remoteNodeLogFileForApp;
this.remoteNodeTmpLogFileForApp = getRemoteNodeTmpLogFileForApp();
this.pendingContainers = new LinkedBlockingQueue<ContainerId>();
this.appAcls = appAcls;
this.lfs = lfs;
this.logAggregationContext = logAggregationContext;
this.context = context;
this.nodeId = nodeId;
int configuredRentionSize =
conf.getInt(NM_LOG_AGGREGATION_NUM_LOG_FILES_SIZE_PER_APP,
DEFAULT_NM_LOG_AGGREGATION_NUM_LOG_FILES_SIZE_PER_APP);
if (configuredRentionSize <= 0) {
this.retentionSize =
DEFAULT_NM_LOG_AGGREGATION_NUM_LOG_FILES_SIZE_PER_APP;
} else {
this.retentionSize = configuredRentionSize;
}
long configuredRollingMonitorInterval = conf.getLong(
YarnConfiguration
.NM_LOG_AGGREGATION_ROLL_MONITORING_INTERVAL_SECONDS,
YarnConfiguration
.DEFAULT_NM_LOG_AGGREGATION_ROLL_MONITORING_INTERVAL_SECONDS);
boolean debug_mode =
conf.getBoolean(NM_LOG_AGGREGATION_DEBUG_ENABLED,
DEFAULT_NM_LOG_AGGREGATION_DEBUG_ENABLED);
if (configuredRollingMonitorInterval > 0
&& configuredRollingMonitorInterval <
NM_LOG_AGGREGATION_MIN_ROLL_MONITORING_INTERVAL_SECONDS) {
if (debug_mode) {
this.rollingMonitorInterval = configuredRollingMonitorInterval;
} else {
LOG.warn(
"rollingMonitorIntervall should be more than or equal to "
+ NM_LOG_AGGREGATION_MIN_ROLL_MONITORING_INTERVAL_SECONDS
+ " seconds. Using "
+ NM_LOG_AGGREGATION_MIN_ROLL_MONITORING_INTERVAL_SECONDS
+ " seconds instead.");
this.rollingMonitorInterval =
NM_LOG_AGGREGATION_MIN_ROLL_MONITORING_INTERVAL_SECONDS;
}
} else {
if (configuredRollingMonitorInterval <= 0) {
LOG.warn("rollingMonitorInterval is set as "
+ configuredRollingMonitorInterval + ". "
+ "The log rolling mornitoring interval is disabled. "
+ "The logs will be aggregated after this application is finished.");
} else {
LOG.warn("rollingMonitorInterval is set as "
+ configuredRollingMonitorInterval + ". "
+ "The logs will be aggregated every "
+ configuredRollingMonitorInterval + " seconds");
}
this.rollingMonitorInterval = configuredRollingMonitorInterval;
}
this.logAggregationInRolling =
this.rollingMonitorInterval <= 0 || this.logAggregationContext == null
|| this.logAggregationContext.getRolledLogsIncludePattern() == null
|| this.logAggregationContext.getRolledLogsIncludePattern()
.isEmpty() ? false : true;
this.logAggPolicy = getLogAggPolicy(conf);
}
private ContainerLogAggregationPolicy getLogAggPolicy(Configuration conf) {
ContainerLogAggregationPolicy policy = getLogAggPolicyInstance(conf);
String params = getLogAggPolicyParameters(conf);
if (params != null) {
policy.parseParameters(params);
}
return policy;
}
// Use the policy class specified in LogAggregationContext if available.
// Otherwise use the cluster-wide default policy class.
private ContainerLogAggregationPolicy getLogAggPolicyInstance(
Configuration conf) {
Class<? extends ContainerLogAggregationPolicy> policyClass = null;
if (this.logAggregationContext != null) {
String className =
this.logAggregationContext.getLogAggregationPolicyClassName();
if (className != null) {
try {
Class<?> policyFromContext = conf.getClassByName(className);
if (ContainerLogAggregationPolicy.class.isAssignableFrom(
policyFromContext)) {
policyClass = policyFromContext.asSubclass(
ContainerLogAggregationPolicy.class);
} else {
LOG.warn(this.appId + " specified invalid log aggregation policy " +
className);
}
} catch (ClassNotFoundException cnfe) {
// We don't fail the app if the policy class isn't valid.
LOG.warn(this.appId + " specified invalid log aggregation policy " +
className);
}
}
}
if (policyClass == null) {
policyClass = conf.getClass(YarnConfiguration.NM_LOG_AGG_POLICY_CLASS,
AllContainerLogAggregationPolicy.class,
ContainerLogAggregationPolicy.class);
} else {
LOG.info(this.appId + " specifies ContainerLogAggregationPolicy of "
+ policyClass);
}
return ReflectionUtils.newInstance(policyClass, conf);
}
// Use the policy parameters specified in LogAggregationContext if available.
// Otherwise use the cluster-wide default policy parameters.
private String getLogAggPolicyParameters(Configuration conf) {
String params = null;
if (this.logAggregationContext != null) {
params = this.logAggregationContext.getLogAggregationPolicyParameters();
}
if (params == null) {
params = conf.get(YarnConfiguration.NM_LOG_AGG_POLICY_CLASS_PARAMETERS);
}
return params;
}
private void uploadLogsForContainers(boolean appFinished) {
if (this.logAggregationDisabled) {
return;
}
if (UserGroupInformation.isSecurityEnabled()) {
Credentials systemCredentials =
context.getSystemCredentialsForApps().get(appId);
if (systemCredentials != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding new framework-token for " + appId
+ " for log-aggregation: " + systemCredentials.getAllTokens()
+ "; userUgi=" + userUgi);
}
// this will replace old token
userUgi.addCredentials(systemCredentials);
}
}
// Create a set of Containers whose logs will be uploaded in this cycle.
// It includes:
// a) all containers in pendingContainers: those containers are finished
// and satisfy the ContainerLogAggregationPolicy.
// b) some set of running containers: For all the Running containers,
// we use exitCode of 0 to find those which satisfy the
// ContainerLogAggregationPolicy.
Set<ContainerId> pendingContainerInThisCycle = new HashSet<ContainerId>();
this.pendingContainers.drainTo(pendingContainerInThisCycle);
Set<ContainerId> finishedContainers =
new HashSet<ContainerId>(pendingContainerInThisCycle);
if (this.context.getApplications().get(this.appId) != null) {
for (Container container : this.context.getApplications()
.get(this.appId).getContainers().values()) {
ContainerType containerType =
container.getContainerTokenIdentifier().getContainerType();
if (shouldUploadLogs(new ContainerLogContext(
container.getContainerId(), containerType, 0))) {
pendingContainerInThisCycle.add(container.getContainerId());
}
}
}
LogWriter writer = null;
try {
try {
writer =
new LogWriter(this.conf, this.remoteNodeTmpLogFileForApp,
this.userUgi);
// Write ACLs once when the writer is created.
writer.writeApplicationACLs(appAcls);
writer.writeApplicationOwner(this.userUgi.getShortUserName());
} catch (IOException e1) {
LOG.error("Cannot create writer for app " + this.applicationId
+ ". Skip log upload this time. ", e1);
return;
}
boolean uploadedLogsInThisCycle = false;
for (ContainerId container : pendingContainerInThisCycle) {
ContainerLogAggregator aggregator = null;
if (containerLogAggregators.containsKey(container)) {
aggregator = containerLogAggregators.get(container);
} else {
aggregator = new ContainerLogAggregator(container);
containerLogAggregators.put(container, aggregator);
}
Set<Path> uploadedFilePathsInThisCycle =
aggregator.doContainerLogAggregation(writer, appFinished);
if (uploadedFilePathsInThisCycle.size() > 0) {
uploadedLogsInThisCycle = true;
this.delService.delete(this.userUgi.getShortUserName(), null,
uploadedFilePathsInThisCycle
.toArray(new Path[uploadedFilePathsInThisCycle.size()]));
}
// This container is finished, and all its logs have been uploaded,
// remove it from containerLogAggregators.
if (finishedContainers.contains(container)) {
containerLogAggregators.remove(container);
}
}
// Before upload logs, make sure the number of existing logs
// is smaller than the configured NM log aggregation retention size.
if (uploadedLogsInThisCycle) {
cleanOldLogs();
}
if (writer != null) {
writer.close();
writer = null;
}
long currentTime = System.currentTimeMillis();
final Path renamedPath = this.rollingMonitorInterval <= 0
? remoteNodeLogFileForApp : new Path(
remoteNodeLogFileForApp.getParent(),
remoteNodeLogFileForApp.getName() + "_"
+ currentTime);
String diagnosticMessage = "";
boolean logAggregationSucceedInThisCycle = true;
final boolean rename = uploadedLogsInThisCycle;
try {
userUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
FileSystem remoteFS = remoteNodeLogFileForApp.getFileSystem(conf);
if (remoteFS.exists(remoteNodeTmpLogFileForApp)) {
if (rename) {
remoteFS.rename(remoteNodeTmpLogFileForApp, renamedPath);
} else {
remoteFS.delete(remoteNodeTmpLogFileForApp, false);
}
}
return null;
}
});
diagnosticMessage =
"Log uploaded successfully for Application: " + appId
+ " in NodeManager: "
+ LogAggregationUtils.getNodeString(nodeId) + " at "
+ Times.format(currentTime) + "\n";
} catch (Exception e) {
LOG.error(
"Failed to move temporary log file to final location: ["
+ remoteNodeTmpLogFileForApp + "] to ["
+ renamedPath + "]", e);
diagnosticMessage =
"Log uploaded failed for Application: " + appId
+ " in NodeManager: "
+ LogAggregationUtils.getNodeString(nodeId) + " at "
+ Times.format(currentTime) + "\n";
renameTemporaryLogFileFailed = true;
logAggregationSucceedInThisCycle = false;
}
LogAggregationReport report =
Records.newRecord(LogAggregationReport.class);
report.setApplicationId(appId);
report.setDiagnosticMessage(diagnosticMessage);
report.setLogAggregationStatus(logAggregationSucceedInThisCycle
? LogAggregationStatus.RUNNING
: LogAggregationStatus.RUNNING_WITH_FAILURE);
this.context.getLogAggregationStatusForApps().add(report);
if (appFinished) {
// If the app is finished, one extra final report with log aggregation
// status SUCCEEDED/FAILED will be sent to RM to inform the RM
// that the log aggregation in this NM is completed.
LogAggregationReport finalReport =
Records.newRecord(LogAggregationReport.class);
finalReport.setApplicationId(appId);
finalReport.setLogAggregationStatus(renameTemporaryLogFileFailed
? LogAggregationStatus.FAILED : LogAggregationStatus.SUCCEEDED);
this.context.getLogAggregationStatusForApps().add(finalReport);
}
} finally {
if (writer != null) {
writer.close();
}
}
}
private void cleanOldLogs() {
try {
final FileSystem remoteFS =
this.remoteNodeLogFileForApp.getFileSystem(conf);
Path appDir =
this.remoteNodeLogFileForApp.getParent().makeQualified(
remoteFS.getUri(), remoteFS.getWorkingDirectory());
Set<FileStatus> status =
new HashSet<FileStatus>(Arrays.asList(remoteFS.listStatus(appDir)));
Iterable<FileStatus> mask =
Iterables.filter(status, new Predicate<FileStatus>() {
@Override
public boolean apply(FileStatus next) {
return next.getPath().getName()
.contains(LogAggregationUtils.getNodeString(nodeId))
&& !next.getPath().getName().endsWith(
LogAggregationUtils.TMP_FILE_SUFFIX);
}
});
status = Sets.newHashSet(mask);
// Normally, we just need to delete one oldest log
// before we upload a new log.
// If we can not delete the older logs in this cycle,
// we will delete them in next cycle.
if (status.size() >= this.retentionSize) {
// sort by the lastModificationTime ascending
List<FileStatus> statusList = new ArrayList<FileStatus>(status);
Collections.sort(statusList, new Comparator<FileStatus>() {
public int compare(FileStatus s1, FileStatus s2) {
return s1.getModificationTime() < s2.getModificationTime() ? -1
: s1.getModificationTime() > s2.getModificationTime() ? 1 : 0;
}
});
for (int i = 0 ; i <= statusList.size() - this.retentionSize; i++) {
final FileStatus remove = statusList.get(i);
try {
userUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
remoteFS.delete(remove.getPath(), false);
return null;
}
});
} catch (Exception e) {
LOG.error("Failed to delete " + remove.getPath(), e);
}
}
}
} catch (Exception e) {
LOG.error("Failed to clean old logs", e);
}
}
@Override
public void run() {
try {
doAppLogAggregation();
} catch (Exception e) {
// do post clean up of log directories on any exception
LOG.error("Error occured while aggregating the log for the application "
+ appId, e);
doAppLogAggregationPostCleanUp();
} finally {
if (!this.appAggregationFinished.get()) {
LOG.warn("Aggregation did not complete for application " + appId);
}
this.appAggregationFinished.set(true);
}
}
@SuppressWarnings("unchecked")
private void doAppLogAggregation() {
while (!this.appFinishing.get() && !this.aborted.get()) {
synchronized(this) {
try {
waiting.set(true);
if (logAggregationInRolling) {
wait(this.rollingMonitorInterval * 1000);
if (this.appFinishing.get() || this.aborted.get()) {
break;
}
uploadLogsForContainers(false);
} else {
wait(THREAD_SLEEP_TIME);
}
} catch (InterruptedException e) {
LOG.warn("PendingContainers queue is interrupted");
this.appFinishing.set(true);
}
}
}
if (this.aborted.get()) {
return;
}
// App is finished, upload the container logs.
uploadLogsForContainers(true);
doAppLogAggregationPostCleanUp();
this.dispatcher.getEventHandler().handle(
new ApplicationEvent(this.appId,
ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED));
this.appAggregationFinished.set(true);
}
private void doAppLogAggregationPostCleanUp() {
// Remove the local app-log-dirs
List<Path> localAppLogDirs = new ArrayList<Path>();
for (String rootLogDir : dirsHandler.getLogDirsForCleanup()) {
Path logPath = new Path(rootLogDir, applicationId);
try {
// check if log dir exists
lfs.getFileStatus(logPath);
localAppLogDirs.add(logPath);
} catch (UnsupportedFileSystemException ue) {
LOG.warn("Log dir " + rootLogDir + "is an unsupported file system", ue);
continue;
} catch (IOException fe) {
continue;
}
}
if (localAppLogDirs.size() > 0) {
this.delService.delete(this.userUgi.getShortUserName(), null,
localAppLogDirs.toArray(new Path[localAppLogDirs.size()]));
}
}
private Path getRemoteNodeTmpLogFileForApp() {
return new Path(remoteNodeLogFileForApp.getParent(),
(remoteNodeLogFileForApp.getName() + LogAggregationUtils.TMP_FILE_SUFFIX));
}
// TODO: The condition: containerId.getId() == 1 to determine an AM container
// is not always true.
private boolean shouldUploadLogs(ContainerLogContext logContext) {
return logAggPolicy.shouldDoLogAggregation(logContext);
}
@Override
public void startContainerLogAggregation(ContainerLogContext logContext) {
if (shouldUploadLogs(logContext)) {
LOG.info("Considering container " + logContext.getContainerId()
+ " for log-aggregation");
this.pendingContainers.add(logContext.getContainerId());
}
}
@Override
public synchronized void finishLogAggregation() {
LOG.info("Application just finished : " + this.applicationId);
this.appFinishing.set(true);
this.notifyAll();
}
@Override
public synchronized void abortLogAggregation() {
LOG.info("Aborting log aggregation for " + this.applicationId);
this.aborted.set(true);
this.notifyAll();
}
@Override
public void disableLogAggregation() {
this.logAggregationDisabled = true;
}
@Private
@VisibleForTesting
// This is only used for testing.
// This will wake the log aggregation thread that is waiting for
// rollingMonitorInterval.
// To use this method, make sure the log aggregation thread is running
// and waiting for rollingMonitorInterval.
public synchronized void doLogAggregationOutOfBand() {
while(!waiting.get()) {
try {
wait(200);
} catch (InterruptedException e) {
// Do Nothing
}
}
LOG.info("Do OutOfBand log aggregation");
this.notifyAll();
}
private class ContainerLogAggregator {
private final ContainerId containerId;
private Set<String> uploadedFileMeta =
new HashSet<String>();
public ContainerLogAggregator(ContainerId containerId) {
this.containerId = containerId;
}
public Set<Path> doContainerLogAggregation(LogWriter writer,
boolean appFinished) {
LOG.info("Uploading logs for container " + containerId
+ ". Current good log dirs are "
+ StringUtils.join(",", dirsHandler.getLogDirsForRead()));
final LogKey logKey = new LogKey(containerId);
final LogValue logValue =
new LogValue(dirsHandler.getLogDirsForRead(), containerId,
userUgi.getShortUserName(), logAggregationContext,
this.uploadedFileMeta, appFinished);
try {
writer.append(logKey, logValue);
} catch (Exception e) {
LOG.error("Couldn't upload logs for " + containerId
+ ". Skipping this container.", e);
return new HashSet<Path>();
}
this.uploadedFileMeta.addAll(logValue
.getCurrentUpLoadedFileMeta());
// if any of the previous uploaded logs have been deleted,
// we need to remove them from alreadyUploadedLogs
Iterable<String> mask =
Iterables.filter(uploadedFileMeta, new Predicate<String>() {
@Override
public boolean apply(String next) {
return logValue.getAllExistingFilesMeta().contains(next);
}
});
this.uploadedFileMeta = Sets.newHashSet(mask);
return logValue.getCurrentUpLoadedFilesPath();
}
}
// only for test
@VisibleForTesting
public UserGroupInformation getUgi() {
return this.userUgi;
}
}
| |
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.15.0.1751 modeling language!*/
package service;
import java.util.List;
import java.util.ArrayList;
import shared.domain.Election;
import shared.domain.Position;
import shared.domain.ElectionForPosition;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
import shared.Credentials;
/**
* handles all database-related tasks regarding ElectionForPosition objects
*/
public class ElectionForPositionService
{
//------------------------
// STATIC VARIABLES
//------------------------
private static ElectionForPositionService theInstance = null;
//------------------------
// MEMBER VARIABLES
//------------------------
//ElectionForPositionService Attributes
private ElectionForPosition electionForPositionToSearch;
private ElectionForPosition newElectionForPosition;
private boolean electionForPositionFound;
private boolean electionForPositionAdded;
private Election selectedElection;
private List<ElectionForPosition> foundEFPs;
private Connection theConnection;
private boolean efpFound;
//ElectionForPositionService State Machines
enum ElectionForPositionServiceCycle { Idle, CreatingElectionForPosition, FindingElectionForPosition, FindingElctionForPositionsList }
private ElectionForPositionServiceCycle ElectionForPositionServiceCycle;
//------------------------
// CONSTRUCTOR
//------------------------
private ElectionForPositionService()
{
electionForPositionFound = false;
electionForPositionAdded = false;
efpFound = false;
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.Idle);
}
public static ElectionForPositionService getInstance()
{
if(theInstance == null)
{
theInstance = new ElectionForPositionService();
}
return theInstance;
}
//------------------------
// INTERFACE
//------------------------
public boolean setElectionForPositionToSearch(ElectionForPosition aElectionForPositionToSearch)
{
boolean wasSet = false;
electionForPositionToSearch = aElectionForPositionToSearch;
wasSet = true;
return wasSet;
}
public boolean setNewElectionForPosition(ElectionForPosition aNewElectionForPosition)
{
boolean wasSet = false;
newElectionForPosition = aNewElectionForPosition;
wasSet = true;
createElectionForPosition();
return wasSet;
}
public boolean setElectionForPositionFound(boolean aElectionForPositionFound)
{
boolean wasSet = false;
electionForPositionFound = aElectionForPositionFound;
wasSet = true;
return wasSet;
}
public boolean setElectionForPositionAdded(boolean aElectionForPositionAdded)
{
boolean wasSet = false;
electionForPositionAdded = aElectionForPositionAdded;
wasSet = true;
return wasSet;
}
public boolean setSelectedElection(Election aSelectedElection)
{
boolean wasSet = false;
selectedElection = aSelectedElection;
wasSet = true;
findElectionForPositions();
return wasSet;
}
public boolean setFoundEFPs(List<ElectionForPosition> aFoundEFPs)
{
boolean wasSet = false;
foundEFPs = aFoundEFPs;
wasSet = true;
return wasSet;
}
public boolean setEfpFound(boolean aEfpFound)
{
boolean wasSet = false;
efpFound = aEfpFound;
wasSet = true;
return wasSet;
}
/**
* Returns the ElectionForPosition object that was set to search in the database.
* The electionForPositionToSearch attribute, once set, triggers a search in the database.
*/
public ElectionForPosition getElectionForPositionToSearch()
{
return electionForPositionToSearch;
}
/**
* Returns the new ElectionForPosition object to be inserted in the database.
* The newElectionForPosition attribute, once set, triggers an insertion search in the database.
*/
public ElectionForPosition getNewElectionForPosition()
{
return newElectionForPosition;
}
/**
* Returns the result of the latest search for an ElectionForPosition Object.
*/
public boolean getElectionForPositionFound()
{
return electionForPositionFound;
}
/**
* Returns the ElectionForPosition object that was set to search in the database.
* The electionForPositionToSearch attribute, once set, triggers a search in the database.
*/
public boolean getElectionForPositionAdded()
{
return electionForPositionAdded;
}
public Election getSelectedElection()
{
return selectedElection;
}
/**
* all election_for_position objects with the election id equal to the one of the selectedElection
*/
public List<ElectionForPosition> getFoundEFPs()
{
return foundEFPs;
}
public boolean getEfpFound()
{
return efpFound;
}
public boolean isElectionForPositionFound()
{
return electionForPositionFound;
}
public boolean isElectionForPositionAdded()
{
return electionForPositionAdded;
}
public boolean isEfpFound()
{
return efpFound;
}
public String getElectionForPositionServiceCycleFullName()
{
String answer = ElectionForPositionServiceCycle.toString();
return answer;
}
public ElectionForPositionServiceCycle getElectionForPositionServiceCycle()
{
return ElectionForPositionServiceCycle;
}
public boolean createElectionForPosition()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case Idle:
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.FindingElectionForPosition);
wasEventProcessed = true;
break;
}
return wasEventProcessed;
}
public boolean findElectionForPositions()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case Idle:
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.FindingElctionForPositionsList);
wasEventProcessed = true;
break;
}
return wasEventProcessed;
}
private boolean __autotransition637__()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case CreatingElectionForPosition:
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.Idle);
wasEventProcessed = true;
break;
}
return wasEventProcessed;
}
private boolean __autotransition638__()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case FindingElectionForPosition:
if (efpFound)
{
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.Idle);
wasEventProcessed = true;
break;
}
break;
}
return wasEventProcessed;
}
private boolean __autotransition639__()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case FindingElectionForPosition:
if (!efpFound)
{
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.CreatingElectionForPosition);
wasEventProcessed = true;
break;
}
break;
}
return wasEventProcessed;
}
private boolean __autotransition640__()
{
boolean wasEventProcessed = false;
ElectionForPositionServiceCycle aElectionForPositionServiceCycle = ElectionForPositionServiceCycle;
switch (aElectionForPositionServiceCycle)
{
case FindingElctionForPositionsList:
setElectionForPositionServiceCycle(ElectionForPositionServiceCycle.Idle);
wasEventProcessed = true;
break;
}
return wasEventProcessed;
}
private void setElectionForPositionServiceCycle(ElectionForPositionServiceCycle aElectionForPositionServiceCycle)
{
try {
Class.forName("com.mysql.jdbc.Driver").newInstance();
theConnection = DriverManager.getConnection("jdbc:mysql://"+Credentials.db_hostname+"/elections", Credentials.db_username, Credentials.db_password);
} catch(Exception e) {
System.err.println("Exception: " + e.getMessage());
}
ElectionForPositionServiceCycle = aElectionForPositionServiceCycle;
// entry actions and do activities
switch(ElectionForPositionServiceCycle)
{
case CreatingElectionForPosition:
addElectionForPosition();
__autotransition637__();
break;
case FindingElectionForPosition:
tryFindingElectionForPosition();
__autotransition638__();
__autotransition639__();
break;
case FindingElctionForPositionsList:
tryFindingElectionForPositionsList();
__autotransition640__();
break;
}
}
public void delete()
{}
public void addElectionForPosition(){
try {
Statement stmt = theConnection.createStatement();
stmt.executeUpdate("insert into elections.election_for_position (election_id_election, position_id_position) values ('"+newElectionForPosition.getElection().getIdElection()+"', '"+newElectionForPosition.getPosition().getIdPosition()+"')");
electionForPositionAdded=true;
} catch(Exception e) {
System.err.println("Exception: " + e.getMessage());
electionForPositionAdded=false;
}
}
//------------------------
// DEVELOPER CODE - PROVIDED AS-IS
//------------------------
private void tryFindingElectionForPosition() {
efpFound=true;
try {
Statement stmt = theConnection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM election_for_position where election_id_election='"+newElectionForPosition.getElection().getIdElection()+"' and position_id_position='"+newElectionForPosition.getPosition().getIdPosition()+"'");
if (!rs.next())
efpFound=false;
} catch(Exception e) {
System.err.println("Exception: " + e.getMessage());
efpFound=false;
}
}
private void tryFindingElectionForPositionsList() {
foundEFPs=new ArrayList<ElectionForPosition>();
try {
Statement stmt = theConnection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM election_for_position where election_id_election='"+selectedElection.getIdElection()+"'");
while (rs.next()) {
Integer idPosition=Integer.parseInt(rs.getString("position_id_position"));
Position position=new Position(idPosition, "", "");
Integer idEFP=Integer.parseInt(rs.getString("id_election_for_position"));
ElectionForPosition efp=new ElectionForPosition(idEFP, selectedElection, position);
foundEFPs.add(efp);
}
} catch(Exception e) {
System.err.println("Exception: " + e.getMessage());
}
}
}
| |
package com.cloud.api.command.user.loadbalancer;
import com.cloud.acl.RoleType;
import com.cloud.api.APICommand;
import com.cloud.api.APICommandGroup;
import com.cloud.api.ApiCommandJobType;
import com.cloud.api.ApiConstants;
import com.cloud.api.ApiErrorCode;
import com.cloud.api.BaseAsyncCmd;
import com.cloud.api.BaseAsyncCreateCmd;
import com.cloud.api.Parameter;
import com.cloud.api.ServerApiException;
import com.cloud.api.response.DomainResponse;
import com.cloud.api.response.IPAddressResponse;
import com.cloud.api.response.LoadBalancerResponse;
import com.cloud.api.response.NetworkResponse;
import com.cloud.api.response.ZoneResponse;
import com.cloud.context.CallContext;
import com.cloud.event.EventTypes;
import com.cloud.legacymodel.dc.DataCenter;
import com.cloud.legacymodel.exceptions.InsufficientAddressCapacityException;
import com.cloud.legacymodel.exceptions.InvalidParameterValueException;
import com.cloud.legacymodel.exceptions.NetworkRuleConflictException;
import com.cloud.legacymodel.exceptions.ResourceAllocationException;
import com.cloud.legacymodel.exceptions.ResourceUnavailableException;
import com.cloud.legacymodel.network.LoadBalancer;
import com.cloud.legacymodel.network.Network;
import com.cloud.legacymodel.user.Account;
import com.cloud.model.enumeration.NetworkType;
import com.cloud.network.IpAddress;
import com.cloud.utils.net.NetUtils;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@APICommand(name = "createLoadBalancerRule", group = APICommandGroup.LoadBalancerService, description = "Creates a load balancer rule", responseObject = LoadBalancerResponse.class,
requestHasSensitiveInfo = false, responseHasSensitiveInfo = false)
public class CreateLoadBalancerRuleCmd extends BaseAsyncCreateCmd /*implements LoadBalancer */ {
public static final Logger s_logger = LoggerFactory.getLogger(CreateLoadBalancerRuleCmd.class.getName());
private static final String s_name = "createloadbalancerruleresponse";
/////////////////////////////////////////////////////
//////////////// API parameters /////////////////////
/////////////////////////////////////////////////////
@Parameter(name = ApiConstants.ALGORITHM, type = CommandType.STRING, required = true, description = "load balancer algorithm (source, roundrobin, leastconn)")
private String algorithm;
@Parameter(name = ApiConstants.DESCRIPTION, type = CommandType.STRING, description = "the description of the load balancer rule", length = 4096)
private String description;
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, required = true, description = "name of the load balancer rule")
private String loadBalancerRuleName;
@Parameter(name = ApiConstants.PRIVATE_PORT,
type = CommandType.INTEGER,
required = true,
description = "the private port of the private IP address/virtual machine where the network traffic will be load balanced to")
private Integer privatePort;
@Parameter(name = ApiConstants.PUBLIC_IP_ID,
type = CommandType.UUID,
entityType = IPAddressResponse.class,
description = "public IP address ID from where the network traffic will be load balanced from")
private Long publicIpId;
@Parameter(name = ApiConstants.ZONE_ID,
type = CommandType.UUID,
entityType = ZoneResponse.class,
required = false,
description = "zone where the load balancer is going to be created. This parameter is required when LB service provider is ElasticLoadBalancerVm")
private Long zoneId;
@Parameter(name = ApiConstants.PUBLIC_PORT,
type = CommandType.INTEGER,
required = true,
description = "the public port from where the network traffic will be load balanced from")
private Integer publicPort;
@Parameter(name = ApiConstants.OPEN_FIREWALL, type = CommandType.BOOLEAN, description = "if true, firewall rule for"
+ " source/end public port is automatically created; if false - firewall rule has to be created explicitely. If not specified 1) defaulted to false when LB"
+ " rule is being created for VPC guest network 2) in all other cases defaulted to true")
private Boolean openFirewall;
@Parameter(name = ApiConstants.ACCOUNT,
type = CommandType.STRING,
description = "the account associated with the load balancer. Must be used with the domainId parameter.")
private String accountName;
@Parameter(name = ApiConstants.DOMAIN_ID, type = CommandType.UUID, entityType = DomainResponse.class, description = "the domain ID associated with the load balancer")
private Long domainId;
@Parameter(name = ApiConstants.CIDR_LIST, type = CommandType.LIST, collectionType = CommandType.STRING, description = "the CIDR list to forward traffic from")
private List<String> cidrlist;
@Parameter(name = ApiConstants.NETWORK_ID, type = CommandType.UUID, entityType = NetworkResponse.class, description = "The guest network this "
+ "rule will be created for. Required when public Ip address is not associated with any Guest network yet (VPC case)")
private Long networkId;
@Parameter(name = ApiConstants.PROTOCOL, type = CommandType.STRING, description = "The protocol for the LB")
private String lbProtocol;
@Parameter(name = ApiConstants.FOR_DISPLAY, type = CommandType.BOOLEAN, description = "an optional field, whether to the display the rule to the end user or not", since = "4" +
".4", authorized = {RoleType.Admin})
private Boolean display;
@Parameter(name = ApiConstants.CLIENT_TIMEOUT,
type = CommandType.INTEGER,
description = "the HAProxy client_timeout setting for this load balancing rule (in ms).")
private Integer clientTimeout;
@Parameter(name = ApiConstants.SERVER_TIMEOUT,
type = CommandType.INTEGER,
description = "the HAProxy server_timeout setting for this load balancing rule (in ms).")
private Integer serverTimeout;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
public String getAlgorithm() {
return algorithm.toLowerCase();
}
public String getDescription() {
return description;
}
public String getLoadBalancerRuleName() {
return loadBalancerRuleName;
}
public Integer getPrivatePort() {
return privatePort;
}
public Integer getPublicPort() {
return publicPort;
}
public Integer getClientTimeout() {
return clientTimeout;
}
public void setClientTimeout(final Integer clientTimeout) {
this.clientTimeout = clientTimeout;
}
public Integer getServerTimeout() {
return serverTimeout;
}
public void setServerTimeout(final Integer serverTimeout) {
this.serverTimeout = serverTimeout;
}
public List<String> getSourceCidrList() {
if (cidrlist != null) {
throw new InvalidParameterValueException(
"Parameter cidrList is deprecated; if you need to open firewall rule for the specific CIDR, please refer to createFirewallRule command");
}
return null;
}
public String getLbProtocol() {
return lbProtocol;
}
@Override
public void execute() throws ResourceAllocationException, ResourceUnavailableException {
final CallContext callerContext = CallContext.current();
boolean success = true;
LoadBalancer rule = null;
try {
CallContext.current().setEventDetails("Rule Id: " + getEntityId());
if (getOpenFirewall()) {
success = success && _firewallService.applyIngressFirewallRules(getSourceIpAddressId(), callerContext.getCallingAccount());
}
// State might be different after the rule is applied, so get new object here
rule = _entityMgr.findById(LoadBalancer.class, getEntityId());
LoadBalancerResponse lbResponse = new LoadBalancerResponse();
if (rule != null) {
lbResponse = _responseGenerator.createLoadBalancerResponse(rule);
setResponseObject(lbResponse);
}
lbResponse.setResponseName(getCommandName());
} catch (final Exception ex) {
s_logger.warn("Failed to create LB rule due to exception ", ex);
}
if (!success || rule == null) {
if (getOpenFirewall()) {
_firewallService.revokeRelatedFirewallRule(getEntityId(), true);
}
// no need to apply the rule on the backend as it exists in the db only
_lbService.deleteLoadBalancerRule(getEntityId(), false);
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to create load balancer rule");
}
}
public Boolean getOpenFirewall() {
final boolean isVpc = getVpcId() == null ? false : true;
if (openFirewall != null) {
if (isVpc && openFirewall) {
throw new InvalidParameterValueException("Can't have openFirewall=true when IP address belongs to VPC");
}
return openFirewall;
} else {
if (isVpc) {
return false;
}
return true;
}
}
public Long getSourceIpAddressId() {
if (publicIpId != null) {
final IpAddress ipAddr = _networkService.getIp(publicIpId);
if (ipAddr == null || !ipAddr.readyToUse()) {
throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address ID " + publicIpId);
}
} else if (getEntityId() != null) {
final LoadBalancer rule = _entityMgr.findById(LoadBalancer.class, getEntityId());
return rule.getSourceIpAddressId();
}
return publicIpId;
}
private Long getVpcId() {
if (publicIpId != null) {
final IpAddress ipAddr = _networkService.getIp(publicIpId);
if (ipAddr == null || !ipAddr.readyToUse()) {
throw new InvalidParameterValueException("Unable to create load balancer rule, invalid IP address ID " + publicIpId);
} else {
return ipAddr.getVpcId();
}
}
return null;
}
public void setSourceIpAddressId(final Long ipId) {
this.publicIpId = ipId;
}
@Override
public String getCommandName() {
return s_name;
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////
@Override
public long getEntityOwnerId() {
return getAccountId();
}
@Override
public boolean isDisplay() {
if (display != null) {
return display;
} else {
return true;
}
}
@Override
public void create() {
//cidr list parameter is deprecated
if (cidrlist != null) {
throw new InvalidParameterValueException(
"Parameter cidrList is deprecated; if you need to open firewall rule for the specific CIDR, please refer to createFirewallRule command");
}
if (lbProtocol != null && !lbProtocol.toLowerCase().startsWith("tcp")) {
throw new InvalidParameterValueException(
"Only TCP protocol is supported because HAProxy can only do TCP.");
}
if (getAlgorithm() != null && !NetUtils.isValidAlgorithm(getAlgorithm())) {
throw new InvalidParameterValueException("Only source/roundrobin/leastconn are supported loadbalance algorithms.");
}
try {
final LoadBalancer result =
_lbService.createPublicLoadBalancerRule(getXid(), getName(), getDescription(), getSourcePortStart(), getSourcePortEnd(), getDefaultPortStart(),
getDefaultPortEnd(), getSourceIpAddressId(), getProtocol(), getAlgorithm(), getNetworkId(), getEntityOwnerId(), getOpenFirewall(),
getLbProtocol(),
isDisplay(), getClientTimeout(), getServerTimeout());
this.setEntityId(result.getId());
this.setEntityUuid(result.getUuid());
} catch (final NetworkRuleConflictException e) {
s_logger.warn("Exception: ", e);
throw new ServerApiException(ApiErrorCode.NETWORK_RULE_CONFLICT_ERROR, e.getMessage());
} catch (final InsufficientAddressCapacityException e) {
s_logger.warn("Exception: ", e);
throw new ServerApiException(ApiErrorCode.INSUFFICIENT_CAPACITY_ERROR, e.getMessage());
} catch (final InvalidParameterValueException e) {
throw new ServerApiException(ApiErrorCode.PARAM_ERROR, e.getMessage());
}
}
public Integer getSourcePortStart() {
return publicPort.intValue();
}
public Integer getSourcePortEnd() {
return publicPort.intValue();
}
public String getProtocol() {
return NetUtils.TCP_PROTO;
}
public int getDefaultPortStart() {
return privatePort.intValue();
}
public int getDefaultPortEnd() {
return privatePort.intValue();
}
public void setPublicIpId(final Long publicIpId) {
this.publicIpId = publicIpId;
}
@Override
public String getEventType() {
return EventTypes.EVENT_LOAD_BALANCER_CREATE;
}
@Override
public String getEventDescription() {
return "creating load balancer: " + getName() + " account: " + getAccountName();
}
public String getName() {
return loadBalancerRuleName;
}
public String getAccountName() {
return accountName;
}
@Override
public ApiCommandJobType getInstanceType() {
return ApiCommandJobType.FirewallRule;
}
@Override
public String getSyncObjType() {
return BaseAsyncCmd.networkSyncObject;
}
@Override
public Long getSyncObjId() {
return getNetworkId();
}
public long getNetworkId() {
if (networkId != null) {
return networkId;
}
final Long zoneId = getZoneId();
if (zoneId == null) {
final Long ipId = getSourceIpAddressId();
if (ipId == null) {
throw new InvalidParameterValueException("Either networkId or zoneId or publicIpId has to be specified");
}
}
if (zoneId != null) {
final DataCenter zone = _entityMgr.findById(DataCenter.class, zoneId);
if (zone.getNetworkType() == NetworkType.Advanced) {
final List<? extends Network> networks = _networkService.getIsolatedNetworksOwnedByAccountInZone(getZoneId(), _accountService.getAccount(getEntityOwnerId()));
if (networks.size() == 0) {
final String domain = _domainService.getDomain(getDomainId()).getName();
throw new InvalidParameterValueException("Account name=" + getAccountName() + " domain=" + domain + " doesn't have virtual networks in zone=" +
zone.getName());
}
if (networks.size() < 1) {
throw new InvalidParameterValueException("Account doesn't have any isolated networks in the zone");
} else if (networks.size() > 1) {
throw new InvalidParameterValueException("Account has more than one isolated network in the zone");
}
return networks.get(0).getId();
} else {
final Network defaultGuestNetwork = _networkService.getExclusiveGuestNetwork(zoneId);
if (defaultGuestNetwork == null) {
throw new InvalidParameterValueException("Unable to find a default guest network for account " + getAccountName() + " in domain ID=" + getDomainId());
} else {
return defaultGuestNetwork.getId();
}
}
} else {
final IpAddress ipAddr = _networkService.getIp(publicIpId);
if (ipAddr.getAssociatedWithNetworkId() != null) {
return ipAddr.getAssociatedWithNetworkId();
} else {
throw new InvalidParameterValueException("IP address ID=" + publicIpId + " is not associated with any network");
}
}
}
public Long getZoneId() {
return zoneId;
}
public long getDomainId() {
if (publicIpId != null) {
return _networkService.getIp(getSourceIpAddressId()).getDomainId();
}
if (domainId != null) {
return domainId;
}
return CallContext.current().getCallingAccount().getDomainId();
}
public long getAccountId() {
if (publicIpId != null) {
return _networkService.getIp(getSourceIpAddressId()).getAccountId();
}
Account account = null;
if ((domainId != null) && (accountName != null)) {
account = _responseGenerator.findAccountByNameDomain(accountName, domainId);
if (account != null) {
return account.getId();
} else {
throw new InvalidParameterValueException("Unable to find account " + accountName + " in domain ID=" + domainId);
}
} else {
throw new InvalidParameterValueException("Can't define IP owner. Either specify account/domainId or publicIpId");
}
}
public String getXid() {
/*FIXME*/
return null;
}
}
| |
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.integration.test.css;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.auraframework.Aura;
import org.auraframework.def.DefDescriptor;
import org.auraframework.def.StyleDef;
import org.auraframework.def.TokenDescriptorProvider;
import org.auraframework.def.TokenMapProvider;
import org.auraframework.def.TokensDef;
import org.auraframework.impl.css.StyleTestCase;
import org.auraframework.system.Annotations.Provider;
import org.auraframework.throwable.AuraRuntimeException;
import org.auraframework.throwable.quickfix.QuickFixException;
import org.junit.Test;
import java.util.List;
import java.util.Map;
/**
* Unit tests for resolving token function values in CSS files.
*/
public class TokenResolutionTest extends StyleTestCase {
private void assertStyle(DefDescriptor<StyleDef> style, String expected) throws QuickFixException {
expected = expected.replace(".THIS", "." + definitionService.getDefinition(style).getClassName());
assertEquals("Did not get the expected css code", expected, getParsedCssUseAppTokens(style));
}
/** where the token value is unquoted */
@Test
public void testUnquoted() throws Exception {
addNsTokens(tokens().token("color", "red"));
String src = ".THIS {color: token(color);}";
assertStyle(addStyleDef(src), ".THIS {color:red}");
}
/** where the token value is double quoted */
@Test
public void testDoubleQuoted() throws Exception {
addNsTokens(tokens().token("color", "red"));
String src = ".THIS {color: token(\"color\");}";
assertStyle(addStyleDef(src), ".THIS {color:red}");
}
/** proper stripping of outer quotes when using concatenation */
@Test
public void testQuotedConcatenation() throws Exception {
addNsTokens(tokens()
.token("margin", "10px")
.token("font", "Times")
.token("gradient", "red, yellow"));
String src = ".THIS {\n" +
" margin: token('5px ' + margin);" + // quotes should not be stripped
" font-family: token(\"font + ',Arial'\");\n" + // quotes should be stripped
" margin: token('5px ' + margin + ' 10px');\n" + // quotes should not be stripped
" background: t('linear-grad('+ gradient +')');\n" + // quotes should not be stripped
"}";
String expected = ".THIS {" +
"margin:5px 10px; " +
"font-family:Times,Arial; " +
"margin:5px 10px 10px; " +
"background:linear-grad(red, yellow)" +
"}";
assertStyle(addStyleDef(src), expected);
}
/** where the token value is inherited */
@Test
public void testInherited() throws Exception {
DefDescriptor<TokensDef> parent = addSeparateTokens(tokens().token("color", "red"));
addNsTokens(tokens().parent(parent));
String src = ".THIS {color: token(color);}";
assertStyle(addStyleDef(src), ".THIS {color:red}");
}
/** where the token is imported */
@Test
public void testImported() throws Exception {
DefDescriptor<TokensDef> imported = addSeparateTokens(tokens().token("color", "red"));
addNsTokens(tokens().imported(imported));
String src = ".THIS {color: token(color);}";
assertStyle(addStyleDef(src), ".THIS {color:red}");
}
/** where the token value is overridden */
@Test
public void testOverridden() throws Exception {
DefDescriptor<TokensDef> parent = addSeparateTokens(tokens().token("color", "red"));
addNsTokens(tokens().parent(parent).token("color", "blue"));
String src = ".THIS {color: token(color);}";
assertStyle(addStyleDef(src), ".THIS {color:blue}");
}
/** using the 't' alternative function name */
@Test
public void testShorthand() throws Exception {
addNsTokens(tokens().token("color", "red"));
String src = ".THIS {color: t(color);}";
assertStyle(addStyleDef(src), ".THIS {color:red}");
}
/** using multiple token functions in one declaration value */
@Test
public void testMultipleTokenFunctions() throws Exception {
addNsTokens(tokens().token("marginTB", "7px").token("marginLR", "5px"));
String src = ".THIS {margin:t(marginTB) t(marginLR)}";
assertStyle(addStyleDef(src), ".THIS {margin:7px 5px}");
}
/** errors when the def does not exist */
@Test
public void testNonexistentDef() throws Exception {
try {
definitionService.getDefinition(addStyleDef(".THIS{color: token(color)")).getCode();
fail("expected exception");
} catch (Exception e) {
}
}
/** errors when the token does not exist */
@Test
public void testNonexistentToken() throws Exception {
addNsTokens(tokens().token("color", "red"));
try {
definitionService.getDefinition(addStyleDef(".THIS{color: token(dolor)")).getCode();
fail("expected exception");
} catch (Exception e) {
}
}
/** if the token value is an empty string then the declaration should be removed */
@Test
public void testDeclarationRemoval() throws Exception {
addNsTokens(tokens().token("color", ""));
String src = ".THIS {color: token(color); font: arial}";
assertStyle(addStyleDef(src), ".THIS {font:arial}");
}
/** test expressions */
@Test
public void testExpression() throws Exception {
addNsTokens(tokens()
.token("margin", "10px")
.token("spacious", "true")
.token("lineHeight", "5"));
String src = ".THIS {\n" +
" margin: t('5px ' + margin);\n" +
" margin: t(margin + ' 5px');\n" +
" margin: t(margin + ' 5px ' + margin);\n" +
// " padding: t(spacious == true ? '30px' : '5px');\n" +
// " line-height: t(lineHeight * 2);\n" +
"}";
String expected = ".THIS {" +
"margin:5px 10px; " +
"margin:10px 5px; " +
"margin:10px 5px 10px" +
// "padding:30px; " +
// "line-height:10;" +
"}";
assertStyle(addStyleDef(src), expected);
}
/** token in media query */
@Test
public void testInMediaQuery() throws Exception {
addNsTokens(tokens().token("normal", "only screen and (max-width: 999px) and (orientation: portrait)"));
String src = "@media token(normal) {\n" +
" .THIS {color:red} \n" +
"}";
String expected = "@media only screen and (max-width: 999px) and (orientation: portrait) {\n" +
" .THIS {color:red}\n" +
"}";
assertStyle(addStyleDef(src), expected);
}
/** token in media query has error */
@Test
public void testInMediaQueryHasError() throws Exception {
addNsTokens(tokens().token("normal", "screen (max-width: 999px)"));
String src = "@media token(normal) {\n" +
" .THIS {color:red} \n" +
"}";
try {
definitionService.getDefinition(addStyleDef(src)).getCode();
fail("expected exception");
} catch (Exception e) {
checkExceptionContains(e, AuraRuntimeException.class, "Expected to find keyword");
}
}
/** token in media query cannot evaluate to an empty string */
@Test
public void testInMediaQueryEvalsToEmpty() throws Exception {
addNsTokens(tokens().token("normal", ""));
String src = "@media token(normal) {\n" +
" .THIS {color:red} \n" +
"}";
try {
definitionService.getDefinition(addStyleDef(src)).getCode();
fail("expected exception");
} catch (Exception e) {
checkExceptionContains(e, AuraRuntimeException.class, "must not evaluate to an empty string");
}
}
/** token values with auto-prefixable properties */
@Test
public void testTokenAutoPrefix() throws Exception {
addNsTokens(tokens()
.token("userSelect", "none")
.token("gradient", "red, yellow"));
String src = ".THIS {\n" +
" user-select: t(userSelect);\n" +
" background: token('linear-gradient(' + gradient + ')');\n" +
"}";
String expected = ".THIS {" +
"-webkit-user-select:none; " +
"-moz-user-select:none; " +
"-ms-user-select:none; " +
"user-select:none; " +
"background:linear-gradient(red, yellow)" +
"}";
assertStyle(addStyleDef(src), expected);
}
/** test that cross referencing own token works */
@Test
public void testSelfCrossReference() throws Exception {
addNsTokens(tokens().token("color", "red").token("bg", "{!color}"));
String src = ".THIS {background: token(bg);}";
assertStyle(addStyleDef(src), ".THIS {background:red}");
}
/** test that multiple level cross references work */
@Test
public void testMultiCrossReference() throws Exception {
addNsTokens(tokens()
.token("bright", "purple")
.token("color", "{!bright}")
.token("bg", "{!color}"));
String src = ".THIS {background: token(bg);}";
assertStyle(addStyleDef(src), ".THIS {background:purple}");
}
/** test that the correct app tokens are used under various combinations of which files exist */
@Test
public void testAppTokenOverrides() throws Exception {
// component in a different ns, with ns default tokens
addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// our app is in a different ns from the cmp, specifying explicit app tokens
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().token("color", "blue"));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the explicit tokens should override the namespace-defaults
assertStyle(toTest, ".THIS {color:blue}");
}
/** test that the explicit override isn't confused with the app's ns default */
@Test
public void testAppTokenOverridesAndNsDefault() throws Exception {
// component in a different ns, with ns default tokens
addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// a namespace-default exists in the app's namespace
addNsTokens(tokens().token("color", "blue"));
// our app is in a different ns from the cmp, specifying explicit tokens
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().token("color", "green"));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the explicit tokens def outranks the cmp's namespace-default and the app's namespace-default
assertStyle(toTest, ".THIS {color:green}");
}
/** test that the overrides don't prevent usage of ns-default when the token is not present in the overrides */
@Test
public void testAppTokenOverridesNotRelevant() throws Exception {
// component in a different ns, with ns default tokens
addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// our app is in a different ns from the cmp, specifying explicit tokens
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().token("font", "arial"));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the cmp should fallback to it's namespace-default, without an error
assertStyle(toTest, ".THIS {color:red}");
}
/** test that inherited tokens are applied */
@Test
public void testAppTokenOverridesInheritedToken() throws Exception {
// component in a different ns, with ns default tokens
addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// the explicit tokens will inherit from this one
DefDescriptor<TokensDef> parent = addSeparateTokens(tokens().token("color", "blue"));
// our app is in a different ns from the cmp, specifying explicit app tokens
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().parent(parent).token("font", "arial"));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the inherited value should be used
assertStyle(toTest, ".THIS {color:blue}");
}
/** test when app tokens inherit from the other cmp's namespace-defaults */
@Test
public void testAppTokenOverridesCircleAround() throws Exception {
// component in a different ns, with ns default tokens
DefDescriptor<TokensDef> other = addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// the tokens to use as an override inherits from the cmp's namespace-default tokens
DefDescriptor<TokensDef> override = addNsTokens(tokens().parent(other));
// our app is in a different ns from the cmp
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the effect should be the same as if the override wasn't there
assertStyle(toTest, ".THIS {color:red}");
}
/** test when app tokens inherit from the other cmp's namespace-defaults, but then changes the val */
@Test
public void testAppTokenOverridesCircleAroundChanged() throws Exception {
// component in a different ns, with ns default tokens
DefDescriptor<TokensDef> other = addNsTokensOtherNamespace(tokens().token("color", "red"));
DefDescriptor<StyleDef> toTest = addStyleDefOtherNamespace(".THIS {color: token(color)}");
// the tokens to use as an override inherits from the cmp's namespace-default tokens
DefDescriptor<TokensDef> override = addNsTokens(tokens().parent(other).token("color", "blue"));
// our app is in a different ns from the cmp
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// the app's implicit tokens value should be used
assertStyle(toTest, ".THIS {color:blue}");
}
@Provider
public static final class Provider1 implements TokenDescriptorProvider {
@Override
public DefDescriptor<TokensDef> provide() throws QuickFixException {
return Aura.getDefinitionService().getDefDescriptor("tokenProviderTest:tokenResolutionProvider", TokensDef.class);
}
}
/** test that a provided def works */
@Test
public void testAppExplicitTokensUsesProvider() throws Exception {
addNsTokens(tokens().token("color", "red"));
DefDescriptor<StyleDef> styleDef = addStyleDef(".THIS {color:t(color)}");
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().descriptorProvider(
"java://" + Provider1.class.getName()));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// should get the value from the provided def
assertStyle(styleDef, ".THIS {color:blue}");
}
@Provider
public static final class Provider2 implements TokenMapProvider {
@Override
public Map<String, String> provide() throws QuickFixException {
return ImmutableMap.of("color", "green");
}
}
@Test
public void testAppExplicitTokensUsesMapProvider() throws Exception {
addNsTokens(tokens().token("color", "red"));
DefDescriptor<StyleDef> styleDef = addStyleDef(".THIS {color:t(color)}");
DefDescriptor<TokensDef> override = addSeparateTokens(tokens().mapProvider(
"java://" + Provider2.class.getName()));
addContextApp(String.format("<aura:application tokens='%s'/>", override.getDescriptorName()));
// should get the value from the provided def
assertStyle(styleDef, ".THIS {color:green}");
}
@Provider
public static final class TokenComboTestProvider implements TokenDescriptorProvider {
@Override
public DefDescriptor<TokensDef> provide() throws QuickFixException {
return Aura.getDefinitionService().getDefDescriptor("tokenProviderTest:tokenComboTest", TokensDef.class);
}
}
@Provider
public static final class TokenComboTestP1 implements TokenMapProvider {
@Override
public Map<String, String> provide() throws QuickFixException {
return ImmutableMap.of("font", "trebuchet", "margin", "20px");
}
}
@Provider
public static final class TokenComboTestP2 implements TokenMapProvider {
@Override
public Map<String, String> provide() throws QuickFixException {
return ImmutableMap.of("font", "georgia");
}
}
@Test
public void testVariousTokenTypesCombination() throws Exception {
// "*" next to the ones that should be used
// namespace default (color, font, padding, margin, borderRadius*)
// app override static tokens (color, font, padding*)
// app override provided tokens (color*)
// app override map tokens (font, margin*)
// app override map tokens (font*)
addNsTokens(tokens()
.token("color", "red")
.token("font", "arial")
.token("padding", "5px")
.token("margin", "7px")
.token("borderRadius", "3px"));
DefDescriptor<TokensDef> staticOverride = addSeparateTokens(tokens()
.token("color", "green")
.token("font", "times")
.token("padding", "12px"));
DefDescriptor<TokensDef> usesProvider = addSeparateTokens(tokens().descriptorProvider(
"java://" + TokenComboTestProvider.class.getName()));
DefDescriptor<TokensDef> mapTokens1 = addSeparateTokens(tokens().mapProvider(
"java://" + TokenComboTestP1.class.getName()));
DefDescriptor<TokensDef> mapTokens2 = addSeparateTokens(tokens().mapProvider(
"java://" + TokenComboTestP2.class.getName()));
String src = ".THIS {color: t(color); " +
"font-family: t(font); " +
"padding: t(padding); " +
"margin: t(margin); " +
"border-radius: t(borderRadius);}";
String expected = ".THIS {color:yellow; " +
"font-family:georgia; " +
"padding:12px; " +
"margin:20px; " +
"border-radius:3px}";
DefDescriptor<StyleDef> styleDef = addStyleDef(src);
List<DefDescriptor<TokensDef>> tokens = Lists.newArrayList();
tokens.add(staticOverride);
tokens.add(usesProvider);
tokens.add(mapTokens1);
tokens.add(mapTokens2);
addContextApp(String.format("<aura:application tokens='%s'/>", Joiner.on(", ").join(tokens)));
assertStyle(styleDef, expected);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.internal.InternalStatisticsDisabledException;
import org.apache.geode.internal.cache.DiskId;
import org.apache.geode.internal.cache.DiskStoreImpl;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.PlaceHolderDiskRegion;
import org.apache.geode.internal.cache.RegionEntry;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.eviction.EvictionNode;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VMStatsDiskLRURegionEntryOffHeapLongKey extends VMStatsDiskLRURegionEntryOffHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VMStatsDiskLRURegionEntryOffHeapLongKey> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMStatsDiskLRURegionEntryOffHeapLongKey.class,
"lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
// --------------------------------------- offheap fields ---------------------------------------
/**
* All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not
* tell it is.
*/
@SuppressWarnings("unused")
@Retained
@Released
private volatile long offHeapAddress;
/**
* I needed to add this because I wanted clear to call setValue which normally can only be called
* while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the
* disk regions because they also get a rw lock in clear. Some hardware platforms do not support
* CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the
* RegionEntry and we will once again be deadlocked. I don't know if we support any of the
* hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk
* regions.
*/
private static final AtomicLongFieldUpdater<VMStatsDiskLRURegionEntryOffHeapLongKey> OFF_HEAP_ADDRESS_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMStatsDiskLRURegionEntryOffHeapLongKey.class,
"offHeapAddress");
// ---------------------------------------- disk fields -----------------------------------------
/**
* @since GemFire 5.1
*/
protected DiskId id;
// --------------------------------------- stats fields -----------------------------------------
private volatile long lastAccessed;
private volatile int hitCount;
private volatile int missCount;
private static final AtomicIntegerFieldUpdater<VMStatsDiskLRURegionEntryOffHeapLongKey> HIT_COUNT_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(VMStatsDiskLRURegionEntryOffHeapLongKey.class,
"hitCount");
private static final AtomicIntegerFieldUpdater<VMStatsDiskLRURegionEntryOffHeapLongKey> MISS_COUNT_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(VMStatsDiskLRURegionEntryOffHeapLongKey.class,
"missCount");
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final long key;
public VMStatsDiskLRURegionEntryOffHeapLongKey(final RegionEntryContext context, final long key,
@Retained final Object value) {
super(context, (value instanceof RecoveredEntry ? null : value));
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
this.key = key;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
protected Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
@Unretained
protected void setValueField(@Unretained final Object value) {
OffHeapRegionEntryHelper.setValue(this, value);
}
@Override
@Retained
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context);
}
@Override
public long getAddress() {
return OFF_HEAP_ADDRESS_UPDATER.get(this);
}
@Override
public boolean setAddress(final long expectedAddress, long newAddress) {
return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress);
}
@Override
@Released
public void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
@Override
public void returnToPool() {
// never implemented
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// ----------------------------------------- disk code ------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
protected void initialize(final RegionEntryContext context, final Object value) {
boolean isBackup;
if (context instanceof InternalRegion) {
isBackup = ((InternalRegion) context).getDiskRegion().isBackup();
} else if (context instanceof PlaceHolderDiskRegion) {
isBackup = true;
} else {
throw new IllegalArgumentException("expected a InternalRegion or PlaceHolderDiskRegion");
}
// Delay the initialization of DiskID if overflow only
if (isBackup) {
diskInitialize(context, value);
}
}
@Override
public synchronized int updateAsyncEntrySize(final EvictionController evictionController) {
int oldSize = getEntrySize();
int newSize = evictionController.entrySize(getKeyForSizing(), null);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public DiskId getDiskId() {
return this.id;
}
@Override
public void setDiskId(final RegionEntry oldEntry) {
this.id = ((DiskEntry) oldEntry).getDiskId();
}
private void diskInitialize(final RegionEntryContext context, final Object value) {
DiskRecoveryStore diskRecoveryStore = (DiskRecoveryStore) context;
DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore();
long maxOplogSize = diskStore.getMaxOplogSize();
// get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true, diskStore.needsLinkedList());
Helper.initialize(this, diskRecoveryStore, value);
}
// --------------------------------------- eviction code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) {
DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore();
long maxOplogSize = diskStore.getMaxOplogSize();
this.id = DiskId.createDiskId(maxOplogSize, false, diskStore.needsLinkedList());
}
@Override
public synchronized int updateEntrySize(final EvictionController evictionController) {
// OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size
return updateEntrySize(evictionController, getValue());
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public synchronized int updateEntrySize(final EvictionController evictionController,
final Object value) {
int oldSize = getEntrySize();
int newSize = evictionController.entrySize(getKeyForSizing(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
@Override
public boolean isRecentlyUsed() {
return areAnyBitsSet(RECENTLY_USED);
}
@Override
public void setRecentlyUsed(RegionEntryContext context) {
if (!isRecentlyUsed()) {
setBits(RECENTLY_USED);
context.incRecentlyUsed();
}
}
@Override
public void unsetRecentlyUsed() {
clearBits(~RECENTLY_USED);
}
@Override
public boolean isEvicted() {
return areAnyBitsSet(EVICTED);
}
@Override
public void setEvicted() {
setBits(EVICTED);
}
@Override
public void unsetEvicted() {
clearBits(~EVICTED);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private EvictionNode nextEvictionNode;
private EvictionNode previousEvictionNode;
private int size;
@Override
public void setNext(final EvictionNode nextEvictionNode) {
this.nextEvictionNode = nextEvictionNode;
}
@Override
public EvictionNode next() {
return this.nextEvictionNode;
}
@Override
public void setPrevious(final EvictionNode previousEvictionNode) {
this.previousEvictionNode = previousEvictionNode;
}
@Override
public EvictionNode previous() {
return this.previousEvictionNode;
}
@Override
public int getEntrySize() {
return this.size;
}
protected void setEntrySize(final int size) {
this.size = size;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKeyForSizing() {
// inline keys always report null for sizing since the size comes from the entry size
return null;
}
// ---------------------------------------- stats code ------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void updateStatsForGet(final boolean isHit, final long time) {
setLastAccessed(time);
if (isHit) {
incrementHitCount();
} else {
incrementMissCount();
}
}
@Override
protected void setLastModifiedAndAccessedTimes(final long lastModified, final long lastAccessed) {
_setLastModified(lastModified);
if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) {
setLastAccessed(lastAccessed);
}
}
@Override
public long getLastAccessed() throws InternalStatisticsDisabledException {
return this.lastAccessed;
}
@Override
public void setLastAccessed(final long lastAccessed) {
this.lastAccessed = lastAccessed;
}
@Override
public long getHitCount() throws InternalStatisticsDisabledException {
return this.hitCount & 0xFFFFFFFFL;
}
@Override
public long getMissCount() throws InternalStatisticsDisabledException {
return this.missCount & 0xFFFFFFFFL;
}
private void incrementHitCount() {
HIT_COUNT_UPDATER.incrementAndGet(this);
}
private void incrementMissCount() {
MISS_COUNT_UPDATER.incrementAndGet(this);
}
@Override
public void resetCounts() throws InternalStatisticsDisabledException {
HIT_COUNT_UPDATER.set(this, 0);
MISS_COUNT_UPDATER.set(this, 0);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void txDidDestroy(long timeStamp) {
setLastModified(timeStamp);
setLastAccessed(timeStamp);
this.hitCount = 0;
this.missCount = 0;
}
@Override
public boolean hasStats() {
return true;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Object getKey() {
return this.key;
}
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof Long) {
return ((Long) key).longValue() == this.key;
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/*
* Copyright 2005 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.javascript.rhino.Node;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link RenameVars}. */
@RunWith(JUnit4.class)
public final class RenameVarsTest extends CompilerTestCase {
private static final String DEFAULT_PREFIX = "";
private String prefix = DEFAULT_PREFIX;
private VariableMap previouslyUsedMap =
new VariableMap(ImmutableMap.<String, String>of());
private RenameVars renameVars;
private boolean withClosurePass = false;
private boolean localRenamingOnly = false;
private boolean preserveFunctionExpressionNames = false;
private boolean useGoogleCodingConvention = true;
private boolean generatePseudoNames = false;
private boolean shouldShadow = false;
private boolean preferStableNames = false;
private boolean withNormalize = false;
// NameGenerator to use, or null for a default.
private DefaultNameGenerator nameGenerator = null;
@Override
protected CodingConvention getCodingConvention() {
if (useGoogleCodingConvention) {
return new GoogleCodingConvention();
} else {
return CodingConventions.getDefault();
}
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
CompilerPass pass;
if (withClosurePass) {
pass = new ClosurePassAndRenameVars(compiler);
} else if (nameGenerator != null) {
pass = renameVars = new RenameVars(compiler, prefix,
localRenamingOnly, preserveFunctionExpressionNames,
generatePseudoNames, shouldShadow, preferStableNames,
previouslyUsedMap, null, null, nameGenerator);
} else {
pass = renameVars = new RenameVars(compiler, prefix,
localRenamingOnly, preserveFunctionExpressionNames,
generatePseudoNames, shouldShadow, preferStableNames,
previouslyUsedMap, null, null, new DefaultNameGenerator());
}
if (withNormalize) {
// Don't use the standard CompilerTestCase normalization options
// as renaming is a post denormalization operation, but we do still
// want to run the normal normalizations on the input in some cases.
pass = new NormalizePassWrapper(compiler, pass);
}
return pass;
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
disableValidateAstChangeMarking();
previouslyUsedMap = new VariableMap(ImmutableMap.<String, String>of());
prefix = DEFAULT_PREFIX;
withClosurePass = false;
withNormalize = false;
localRenamingOnly = false;
preserveFunctionExpressionNames = false;
generatePseudoNames = false;
shouldShadow = false;
preferStableNames = false;
nameGenerator = null;
}
@Test
public void testRenameSimple() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
}
@Test
public void testRenameGlobals() {
test("var Foo; var Bar, y; function x() { Bar++; }",
"var a; var b, c; function d() { b++; }");
}
@Test
public void testRenameLocals() {
test("(function (v1, v2) {}); (function (v3, v4) {});",
"(function (a, b) {}); (function (a, b) {});");
test("function f1(v1, v2) {}; function f2(v3, v4) {};",
"function c(a, b) {}; function d(a, b) {};");
}
@Test
public void testRenameLocals_let() {
test(
"(function () { let var1 = 0; let another = 1; });",
"(function () { let a = 0; let b = 1; });");
}
@Test
public void testRenameLocals_const() {
test(
"(function () { const var1 = 0; const another = 1; });",
"(function () { const a = 0; const b = 1; });");
}
@Test
public void testRenameLocalsToSame() {
preferStableNames = true;
testSame("(function(a) {})");
testSame("(function(a, b) {})");
testSame("(function(a, b, c) {})");
testSame("(function() { var a; })");
testSame("(function() { var a, b; })");
testSame("(function() { var a, b, c; })");
}
@Test
public void testRenameRedeclaredGlobals() {
test(
lines(
"function f1(v1, v2) {f1()};",
"/** @suppress {duplicate} */",
"function f1(v3, v4) {f1()};"),
lines(
"function a(b, c) {a()};",
"/** @suppress {duplicate} */",
"function a(b, c) {a()};"));
localRenamingOnly = true;
test(
lines(
"function f1(v1, v2) {f1()};",
"/** @suppress {duplicate} */",
"function f1(v3, v4) {f1()};"),
lines(
"function f1(a, b) {f1()};",
"/** @suppress {duplicate} */",
"function f1(a, b) {f1()};"));
}
@Test
public void testRecursiveFunctions1() {
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var a = function a(b, c) {" +
" a(b, c);" +
"};");
localRenamingOnly = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var walk = function walk(a, b) {" +
" walk(a, b);" +
"};");
}
@Test
public void testRecursiveFunctions2() {
preserveFunctionExpressionNames = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var c = function walk(a, b) {" +
" walk(a, b);" +
"};");
localRenamingOnly = true;
test("var walk = function walk(node, aFunction) {" +
" walk(node, aFunction);" +
"};",
"var walk = function walk(a, b) {" +
" walk(a, b);" +
"};");
}
@Test
public void testRenameLocalsClashingWithGlobals() {
test("function a(v1, v2) {return v1;} a();",
"function a(b, c) {return b;} a();");
}
@Test
public void testRenameNested() {
test("function f1(v1, v2) { (function(v3, v4) {}) }",
"function a(b, c) { (function(d, e) {}) }");
test("function f1(v1, v2) { function f2(v3, v4) {} }",
"function a(b, c) { function d(e, f) {} }");
}
@Test
public void testBleedingRecursiveFunctions1() {
// On IE, bleeding functions will interfere with each other if
// they are in the same scope. In the below example, we want to be
// sure that a and b get separate names.
test("var x = function a(x) { return x ? 1 : a(1); };" +
"var y = function b(x) { return x ? 2 : b(2); };",
"var c = function b(a) { return a ? 1 : b(1); };" +
"var e = function d(a) { return a ? 2 : d(2); };");
}
@Test
public void testBleedingRecursiveFunctions2() {
test(
lines(
"function f() {",
" var x = function a(x) { return x ? 1 : a(1); };",
" var y = function b(x) { return x ? 2 : b(2); };",
"}"),
lines(
"function d() {",
" var e = function a(b) { return b ? 1 : a(1); };",
" var f = function c(a) { return a ? 2 : c(2); };",
"}"));
}
@Test
public void testBleedingRecursiveFunctions3() {
test(
lines(
"function f() {",
" var x = function a(x) { return x ? 1 : a(1); };",
" var y = function b(x) { return x ? 2 : b(2); };",
" var z = function c(x) { return x ? y : c(2); };",
"}"),
lines(
"function f() {",
" var g = function a(c) { return c ? 1 : a(1); };",
" var d = function b(a) { return a ? 2 : b(2); };",
" var h = function e(b) { return b ? d : e(2); };",
"}"));
}
@Test
public void testBleedingFunctionInBlocks() {
test(lines(
"if (true) {",
" var x = function a(x) {return x;}",
"}"),
lines(
"if (true) {",
" var c = function b(a) {return a;}",
"}"));
}
@Test
public void testRenameWithExterns1() {
String externs = "var foo;";
test(
externs(externs),
srcs("var bar; foo(bar);"),
expected("var a; foo(a);"));
}
@Test
public void testRenameWithExterns2() {
String externs = "var a;";
test(
externs(externs),
srcs("var b = 5"),
expected("var b = 5"));
}
@Test
public void testDoNotRenameExportedName() {
testSame("_foo()");
}
@Test
public void testDoNotRenameArguments() {
testSame("function a() { arguments; }");
}
@Test
public void testRenameWithNameOverlap() {
testSame("var a = 1; var b = 2; b + b;");
}
@Test
public void testRenameWithPrefix1() {
prefix = "PRE_";
test("function Foo(v1, v2) {return v1} Foo();",
"function PRE_(a, b) {return a} PRE_();");
prefix = DEFAULT_PREFIX;
}
@Test
public void testRenameWithPrefix2() {
prefix = "PRE_";
test("function Foo(v1, v2) {var v3 = v1 + v2; return v3;} Foo();",
"function PRE_(a, b) {var c = a + b; return c;} PRE_();");
prefix = DEFAULT_PREFIX;
}
@Test
public void testRenameWithPrefix3() {
prefix = "a";
test("function Foo() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
prefix = DEFAULT_PREFIX;
}
@Test
public void testNamingBasedOnOrderOfOccurrence() {
test("var q,p,m,n,l,k; " +
"try { } catch(r) {try {} catch(s) {}}; var t = q + q;",
"var a,b,c,d,e,f; " +
"try { } catch(g) {try {} catch(h) {}}; var i = a + a;"
);
test("(function(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z," +
"a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,$){});" +
"var a4,a3,a2,a1,b4,b3,b2,b1,ab,ac,ad,fg;function foo(){};",
"(function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
"A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$){});" +
"var aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la;function ma(){};");
}
@Test
public void testTryCatchLifeTime() {
test("var q,p,m,n,l,k; " +
"(function (r) {}); try { } catch(s) {}; var t = q + q;",
"var a,c,d,e,f,g; " +
"(function(b) {}); try { } catch(b) {}; var h = a + a;"
);
test("try {try {} catch(p) {}} catch(s) {};",
"try {try {} catch(a) {}} catch(a) {};"
);
test(
lines(
"try {",
" try { ",
" } catch(p) {",
" try { ",
" } catch(r) {}",
" }",
"} catch(s) {",
" try { ",
" } catch(q) {}",
"};"),
lines(
"try {",
" try { ",
" } catch(a) {",
" try { ",
" } catch(b) {}",
" }",
"} catch(a) {",
" try { ",
" } catch(b) {}",
"};"));
}
@Test
public void testStableRenameSimple() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();", expectedVariableMap);
expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c", "L 2", "d");
testRenameMapUsingOldMap("function Foo(v1, v2, v3) {return v1;} Foo();",
"function a(b, c, d) {return b;} a();", expectedVariableMap);
}
@Test
public void testStableRenameGlobals() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "Bar", "b", "y", "c", "x", "d");
testRenameMap("var Foo; var Bar, y; function x() { Bar++; }",
"var a; var b, c; function d() { b++; }",
expectedVariableMap);
expectedVariableMap = makeVariableMap(
"Foo", "a", "Bar", "b", "y", "c", "x", "d", "Baz", "f", "L 0" , "e");
testRenameMapUsingOldMap(
"var Foo, Baz; var Bar, y; function x(R) { return R + Bar++; }",
"var a, f; var b, c; function d(e) { return e + b++; }",
expectedVariableMap);
}
@Test
public void testStableRenameWithPointlesslyAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b");
testRenameMap("(function (v1, v2) {}); (function (v3, v4) {});",
"(function (a, b) {}); (function (a, b) {});",
expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "a", "L 1", "b", "L 2", "c");
testRenameMapUsingOldMap("(function (v0, v1, v2) {});" +
"(function (v3, v4) {});",
"(function (a, b, c) {});" +
"(function (a, b) {});",
expectedVariableMap);
}
@Test
public void testStableRenameLocalsClashingWithGlobals() {
test("function a(v1, v2) {return v1;} a();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
test("function bar(){return;}function a(v1, v2) {return v1;} a();",
"function d(){return;}function a(b, c) {return b;} a();");
}
@Test
public void testStableRenameNested() {
VariableMap expectedVariableMap = makeVariableMap(
"f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e");
testRenameMap("function f1(v1, v2) { (function(v3, v4) {}) }",
"function a(b, c) { (function(d, e) {}) }",
expectedVariableMap);
expectedVariableMap = makeVariableMap(
"f1", "a", "L 0", "b", "L 1", "c", "L 2", "d", "L 3", "e", "L 4", "f");
testRenameMapUsingOldMap(
"function f1(v1, v2) { (function(v3, v4, v5) {}) }",
"function a(b, c) { (function(d, e, f) {}) }",
expectedVariableMap);
}
@Test
public void testStableRenameWithExterns1() {
String externs = "var foo;";
test(
externs(externs),
srcs("var bar; foo(bar);"),
expected("var a; foo(a);"));
previouslyUsedMap = renameVars.getVariableMap();
test(
externs(externs),
srcs("var bar, baz; foo(bar, baz);"),
expected("var a, b; foo(a, b);"));
}
@Test
public void testStableRenameWithExterns2() {
String externs = "var a;";
test(
externs(externs),
srcs("var b = 5"),
expected("var b = 5"));
previouslyUsedMap = renameVars.getVariableMap();
test(
externs(externs),
srcs("var b = 5, catty = 9;"),
expected("var b = 5, c=9;"));
}
@Test
public void testStableRenameWithNameOverlap() {
testSame("var a = 1; var b = 2; b + b;");
previouslyUsedMap = renameVars.getVariableMap();
testSame("var a = 1; var c, b = 2; b + b;");
}
@Test
public void testStableRenameWithAnonymousFunctions() {
VariableMap expectedVariableMap = makeVariableMap("L 0", "a", "foo", "b");
testRenameMap("function foo(bar){return bar;}foo(function(h){return h;});",
"function b(a){return a}b(function(a){return a;})",
expectedVariableMap);
expectedVariableMap = makeVariableMap("foo", "b", "L 0", "a", "L 1", "c");
testRenameMapUsingOldMap(
"function foo(bar) {return bar;}foo(function(g,h) {return g+h;});",
"function b(a){return a}b(function(a,c){return a+c;})",
expectedVariableMap);
}
@Test
public void testStableRenameSimpleExternsChanges() {
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "a", "L 0", "b", "L 1", "c");
testRenameMap("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();", expectedVariableMap);
expectedVariableMap = makeVariableMap("L 0", "b", "L 1", "c", "L 2", "a");
String externs = "var Foo;";
testRenameMapUsingOldMap(externs,
"function Foo(v1, v2, v0) {return v1;} Foo();",
"function Foo(b, c, a) {return b;} Foo();",
expectedVariableMap);
}
@Test
public void testStableRenameSimpleLocalNameExterned() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var b;";
test(
externs(externs),
srcs("function Foo(v1, v2) {return v1;} Foo(b);"),
expected("function a(d, c) {return d;} a(b);"));
}
@Test
public void testStableRenameSimpleGlobalNameExterned() {
test("function Foo(v1, v2) {return v1;} Foo();",
"function a(b, c) {return b;} a();");
previouslyUsedMap = renameVars.getVariableMap();
String externs = "var Foo;";
test(
externs(externs),
srcs("function Foo(v1, v2, v0) {return v1;} Foo();"),
expected("function Foo(b, c, a) {return b;} Foo();"));
}
@Test
public void testStableRenameWithPrefix1AndUnstableLocalNames() {
prefix = "PRE_";
test("function Foo(v1, v2) {return v1} Foo();",
"function PRE_(a, b) {return a} PRE_();");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "PRE_";
test("function Foo(v0, v1, v2) {return v1} Foo();",
"function PRE_(a, b, c) {return b} PRE_();");
}
@Test
public void testStableRenameWithPrefix2() {
prefix = "a";
test("function Foo() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
previouslyUsedMap = renameVars.getVariableMap();
prefix = "a";
test("function Foo() {return 1;}" +
"function Baz() {return 1;}" +
"function Bar() {" +
" var a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z," +
" A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,aa,ab;" +
" Foo();" +
"} Bar();",
"function a() {return 1;}" +
"function ab() {return 1;}" +
"function aa() {" +
" var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A," +
" B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,ba,ca;" +
" a();" +
"} aa();");
}
@Test
public void testContrivedExampleWhereConsistentRenamingIsWorse() {
previouslyUsedMap = makeVariableMap(
"Foo", "LongString", "L 0", "b", "L 1", "c");
test("function Foo(v1, v2) {return v1;} Foo();",
"function LongString(b, c) {return b;} LongString();");
previouslyUsedMap = renameVars.getVariableMap();
VariableMap expectedVariableMap = makeVariableMap(
"Foo", "LongString", "L 0", "b", "L 1", "c");
assertVariableMapsEqual(expectedVariableMap, previouslyUsedMap);
}
@Test
public void testPrevUsedMapWithDuplicates() {
try {
makeVariableMap("Foo", "z", "Bar", "z");
testSame("");
throw new AssertionError();
} catch (java.lang.IllegalArgumentException expected) {
}
}
@Test
public void testExportSimpleSymbolReservesName() {
test("var goog, x; goog.exportSymbol('a', x);",
"var a, b; a.exportSymbol('a', b);");
withClosurePass = true;
test("var goog, x; goog.exportSymbol('a', x);",
"var b, c; b.exportSymbol('a', c);");
}
@Test
public void testExportComplexSymbolReservesName() {
test("var goog, x; goog.exportSymbol('a.b', x);",
"var a, b; a.exportSymbol('a.b', b);");
withClosurePass = true;
test("var goog, x; goog.exportSymbol('a.b', x);",
"var b, c; b.exportSymbol('a.b', c);");
}
@Test
public void testExportToNonStringDoesntExplode() {
withClosurePass = true;
test("var goog, a, b; goog.exportSymbol(a, b);",
"var a, b, c; a.exportSymbol(b, c);");
}
@Test
public void testDollarSignSuperExport1() {
useGoogleCodingConvention = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test("var x = function($super,duper,$fantastic){}",
"var c = function($super, a, b){}");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test("var x = function($super,duper,$fantastic){}",
"var c = function($super,a,b){}");
}
@Test
public void testDollarSignSuperExport2() {
withNormalize = true;
useGoogleCodingConvention = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test("var x = function($super,duper,$fantastic){};" +
"var y = function($super,duper){};",
"var c = function($super, a, b){};" +
"var d = function($super, a){};");
localRenamingOnly = false;
test("var $super = 1", "var a = 1");
useGoogleCodingConvention = true;
test("var x = function($super,duper,$fantastic){};" +
"var y = function($super,duper){};",
"var c = function($super, a, b ){};" +
"var d = function($super,a){};");
}
@Test
public void testBias() {
nameGenerator = new DefaultNameGenerator(new HashSet<String>(), "", null);
nameGenerator.favors("AAAAAAAAHH");
test("var x, y", "var A, H");
}
@Test
public void testPseudoNames() {
generatePseudoNames = false;
// See http://blickly.github.io/closure-compiler-issues/#32
test("var foo = function(a, b, c){}",
"var d = function(a, b, c){}");
generatePseudoNames = true;
test("var foo = function(a, b, c){}",
"var $foo$$ = function($a$$, $b$$, $c$$){}");
test("var a = function(a, b, c){}",
"var $a$$ = function($a$$, $b$$, $c$$){}");
}
@Test
public void testArrowFunctions() {
test("foo => {return foo + 3;}",
"a => {return a + 3;}");
test("(foo, bar) => {return foo + bar + 3;}",
"(a, b) => {return a + b + 3;}");
}
@Test
public void testClasses() {
test("class fooBar {}",
"class a {}");
test(
lines(
"class fooBar {",
" constructor(foo, bar) {",
" this.foo = foo;",
" this.bar = bar;",
" }",
"}",
"var x = new fooBar(2, 3);"),
lines(
"class a {",
" constructor(b, c) {",
" this.foo = b;",
" this.bar = c;",
" }",
"}",
"var d = new a(2, 3);"));
test(
lines(
"class fooBar {",
" constructor(foo, bar) {",
" this.foo = foo;",
" this.bar = bar;",
" }",
" func(x) {",
" return this.foo + x;",
" }",
"}",
"var x = new fooBar(2,3);",
"var abcd = x.func(5);"),
lines(
"class b {",
" constructor(a, c) {",
" this.foo = a;",
" this.bar = c;",
" }",
" func(a) {",
" return this.foo + a;",
" }",
"}",
"var d = new b(2,3);",
"var e = d.func(5);"
));
}
@Test
public void testLetConst() {
test("let xyz;",
"let a;"
);
test("const xyz = 1;",
"const a = 1");
test(
lines(
"let zyx = 1; {",
" const xyz = 1;",
" let zyx = 2;",
" zyx = 3;",
"}",
"let xyz = 'potato';",
"zyx = 4;"
),
lines(
"let a = 1; {",
" const c = 1;",
" let b = 2;",
" b = 3;",
"}",
"let d = 'potato';",
"a = 4;"));
}
@Test
public void testGenerators() {
test(
lines(
"function* gen() {",
" var xyz = 3;",
" yield xyz + 4;",
"}",
"gen().next()"
),
lines(
"function* a() {",
" var b = 3;",
" yield b + 4;",
"}",
"a().next()"));
}
@Test
public void testForOf() {
test(
"for (var item of items) {}",
"for (var a of items) {}");
}
@Test
public void testTemplateStrings() {
test(
lines(
"var name = 'Foo';",
"`My name is ${name}`;"
),
lines(
"var a = 'Foo';",
"`My name is ${a}`;"));
}
@Test
public void testArrayDestructuring() {
test("var [x, y, z] = [1, 2, 3];",
"var [a, b, c] = [1, 2, 3];");
}
@Test
public void testObjectDestructuring() {
// TODO(sdh): Teach RenameVars to take advantage of shorthand properties by
// building up a Map from var name strings to property name multisets. We
// should be able to treat this similar to the "previous names" map, where
// we preferentially pick names with the most lined-up properties, provided
// the property names are short (should be easy enough to do the math).
// Note, the same property name could get different var names in different
// scopes, so we probably need to do the comparison per scope.
// Also, this is only relevant if language_out >= ES6.
test(
lines(
"var obj = {p: 5, h: false};",
"var {p, h} = obj;"),
lines(
"var a = {p: 5, h: false};",
"var {p: b, h: c} = a;"));
test(
lines(
"var obj = {p: 5, h: false};",
"var {p: x, h: y} = obj;"),
lines(
"var a = {p: 5, h: false};",
"var {p: b, h: c} = a;"));
}
@Test
public void testDefaultFunction() {
test(
lines(
"function f(x, y=12) {",
" return x * y;",
"}"
),
lines(
"function c(a, b=12) {",
" return a * b;",
"}"));
}
@Test
public void testRestFunction() {
test(
lines(
"function f(x, ...y) {",
" return x * y[0];",
"}"
),
lines(
"function c(a, ...b) {",
" return a * b[0];",
"}"));
}
@Test
public void testObjectLiterals() {
test(
lines(
"var objSuper = {",
" f: 'potato'",
"};",
"var obj = {",
" __proto__: objSuper,",
" g: false,",
" x() {",
" return super.f;",
" }",
"};",
"obj.x();"
),
lines(
"var a = {",
" f: 'potato'",
"};",
"var b = {",
" __proto__: a,",
" g: false,",
" x() {",
" return super.f;",
" }",
"};",
"b.x();"));
}
@Test
public void testImport1() {
test("import name from './other.js'; use(name);", "import a from './other.js'; use(a);");
test(
"import * as name from './other.js'; use(name);",
"import * as a from './other.js'; use(a);");
test(
"import {default as name} from './other.js'; use(name);",
"import {default as a} from './other.js'; use(a);");
}
@Test
public void testImport2() {
withNormalize = true;
test(
"import {name} from './other.js'; use(name);",
"import {name as a} from './other.js'; use(a);");
}
private void testRenameMapUsingOldMap(String input, String expected,
VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap("", input, expected, expectedMap);
}
private void testRenameMapUsingOldMap(String externs, String input,
String expected,
VariableMap expectedMap) {
previouslyUsedMap = renameVars.getVariableMap();
testRenameMap(externs, input, expected, expectedMap);
}
private void testRenameMap(String input, String expected,
VariableMap expectedRenameMap) {
testRenameMap("", input, expected, expectedRenameMap);
}
private void testRenameMap(String externs, String input, String expected,
VariableMap expectedRenameMap) {
test(
externs(externs),
srcs(input),
expected(expected));
VariableMap renameMap = renameVars.getVariableMap();
assertVariableMapsEqual(expectedRenameMap, renameMap);
}
@Test
public void testPreferStableNames() {
preferStableNames = true;
// Locals in scopes with too many local variables (>1000) should
// not receive temporary names (eg, 'L 123'). These locals will
// appear in the name maps with the same name as in the code (eg,
// 'a0' in this case).
test(createManyVarFunction(1000), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
test(createManyVarFunction(1001), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isEqualTo("b");
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isNull();
// With {@code preferStableNames} off locals should
// unconditionally receive temporary names.
preferStableNames = false;
test(createManyVarFunction(1000), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
test(createManyVarFunction(1001), null);
assertThat(renameVars.getVariableMap().lookupNewName("a0")).isNull();
assertThat(renameVars.getVariableMap().lookupNewName("L 0")).isEqualTo("b");
}
private static String createManyVarFunction(int numVars) {
List<String> locals = new ArrayList<>();
for (int i = 0; i < numVars; i++) {
locals.add("a" + i);
}
return "function foo() { var " + Joiner.on(",").join(locals) + "; }";
}
private VariableMap makeVariableMap(String... keyValPairs) {
checkArgument(keyValPairs.length % 2 == 0);
ImmutableMap.Builder<String, String> renameMap = ImmutableMap.builder();
for (int i = 0; i < keyValPairs.length; i += 2) {
renameMap.put(keyValPairs[i], keyValPairs[i + 1]);
}
return new VariableMap(renameMap.build());
}
private static void assertVariableMapsEqual(VariableMap a, VariableMap b) {
Map<String, String> ma = a.getOriginalNameToNewNameMap();
Map<String, String> mb = b.getOriginalNameToNewNameMap();
assertWithMessage("VariableMaps not equal").that(mb).isEqualTo(ma);
}
private class ClosurePassAndRenameVars implements CompilerPass {
private final Compiler compiler;
private ClosurePassAndRenameVars(Compiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
ProcessClosurePrimitives closurePass =
new ProcessClosurePrimitives(
compiler, null, CheckLevel.WARNING, false);
closurePass.process(externs, root);
renameVars = new RenameVars(compiler, prefix,
false, false, false, false, false, previouslyUsedMap, null,
closurePass.getExportedVariableNames(),
new DefaultNameGenerator());
renameVars.process(externs, root);
}
}
private static class NormalizePassWrapper implements CompilerPass {
private final Compiler compiler;
private final CompilerPass wrappedPass;
private NormalizePassWrapper(Compiler compiler,
CompilerPass wrappedPass) {
this.compiler = compiler;
this.wrappedPass = wrappedPass;
}
@Override
public void process(Node externs, Node root) {
Normalize normalize = new Normalize(compiler, false);
normalize.process(externs, root);
wrappedPass.process(externs, root);
}
}
}
| |
package com.accela.TestCases.authorityCenter;
import java.util.List;
import com.accela.AuthorityCenter.authorityBase.AuthorityBase;
import com.accela.AuthorityCenter.authorityBase.AuthorityBaseOperatingException;
import com.accela.AuthorityCenter.authorityBase.SimplePasswordManager;
import com.accela.AuthorityCenter.ruleFilter.AuthorityFilter;
import com.accela.AuthorityCenter.ruleFilter.AuthorityFilteringException;
import com.accela.AuthorityCenter.shared.CommandWithAuthority;
import junit.framework.TestCase;
public class TestingAuthorityFilter extends TestCase
{
private AuthorityFilter authorityFilter=new AuthorityFilter();
private final String[] commandHeads=new String[]{
"command1",
"command2",
"command3",
};
private final CommanderIDForTest[] commanders=new CommanderIDForTest[]{
new CommanderIDForTest(1),
new CommanderIDForTest(2),
new CommanderIDForTest(3),
};
private final CommandWithAuthority[][] commands=new CommandWithAuthority[][]{
new CommandWithAuthority[]{
new CommandWithAuthority(commanders[0], commandHeads[0]),
new CommandWithAuthority(commanders[0], commandHeads[1]),
new CommandWithAuthority(commanders[0], commandHeads[2]),
},
new CommandWithAuthority[]{
new CommandWithAuthority(commanders[1], commandHeads[0]),
new CommandWithAuthority(commanders[1], commandHeads[1]),
new CommandWithAuthority(commanders[1], commandHeads[2]),
},
new CommandWithAuthority[]{
new CommandWithAuthority(commanders[2], commandHeads[0]),
new CommandWithAuthority(commanders[2], commandHeads[1]),
new CommandWithAuthority(commanders[2], commandHeads[2]),
},
};
private RuleForTest[][] rules=new RuleForTest[][]{
new RuleForTest[]{
new RuleForTest(true),
new RuleForTest(true),
new RuleForTest(true, new Runnable(){
public void run()
{
throw new NullPointerException();
}
}),
new RuleForTest(true, new Runnable(){
public void run()
{
throw new IllegalArgumentException();
}
})
},
new RuleForTest[]{
new RuleForTest(false),
new RuleForTest(false),
new RuleForTest(false, new Runnable(){
public void run()
{
throw new NullPointerException();
}
}),
new RuleForTest(false, new Runnable(){
public void run()
{
throw new IllegalArgumentException();
}
})
},
};
protected void setUp() throws Exception
{
super.setUp();
authorityFilter=new AuthorityFilter();
clearInvokeCount();
}
private void clearInvokeCount()
{
for(int i=0;i<rules.length;i++)
{
for(int j=0;j<rules[i].length;j++)
{
rules[i][j].clearInvokeCount();
}
}
}
protected void tearDown() throws Exception
{
super.tearDown();
authorityFilter=null;
}
public void testDefaultRule()
{
AuthorityBase base = new AuthorityBase(new SimplePasswordManager());
try
{
base.addHighestLevel("l1", "l1");
base.addHighestLevel("l2", "l2");
base.addHighestLevel("l3", "l3");
base.setCommandHeadAuthority(commandHeads[1], "l1");
base.setCommandHeadAuthority(commandHeads[2], "l2");
base.setCommanderAuthority(commanders[1], "l1");
base.setCommanderAuthority(commanders[2], "l2");
assert (authorityFilter.shouldFilter(commands[0][0], base) == false);
assert (authorityFilter.shouldFilter(commands[0][1], base) == true);
assert (authorityFilter.shouldFilter(commands[0][2], base) == true);
assert (authorityFilter.shouldFilter(commands[1][0], base) == false);
assert (authorityFilter.shouldFilter(commands[1][1], base) == false);
assert (authorityFilter.shouldFilter(commands[1][2], base) == true);
assert (authorityFilter.shouldFilter(commands[2][0], base) == false);
assert (authorityFilter.shouldFilter(commands[2][1], base) == false);
assert (authorityFilter.shouldFilter(commands[2][2], base) == false);
authorityFilter.removeAllAuthorityRules();
for (int i = 0; i < commands.length; i++)
{
for (int j = 0; j < commands[i].length; j++)
{
assert (authorityFilter.shouldFilter(commands[i][j], base) == false);
}
}
} catch (AuthorityFilteringException ex)
{
assert (false);
} catch (AuthorityBaseOperatingException ex)
{
assert (false);
}
}
public void testRules()
{
AuthorityBase base=new AuthorityBase(new SimplePasswordManager());
for(int i=0;i<rules.length;i++)
{
for(int j=0;j<rules[i].length;j++)
{
authorityFilter.addAuthorityRule(rules[i][j]);
}
}
for(int i=0;i<commands.length;i++)
{
for(int j=0;j<commands[i].length;j++)
{
try
{
assert(authorityFilter.shouldFilter(commands[i][j], base)==true);
} catch (AuthorityFilteringException ex)
{
assert(false);
}
boolean findActivedRule=false;
for(int inner_i=0;inner_i<rules.length;inner_i++)
{
for(int inner_j=0;inner_j<rules[inner_i].length;inner_j++)
{
if(rules[inner_i][inner_j].getInvokeCount()==1)
{
try
{
if(rules[inner_i][inner_j].shouldFilter(commands[0][0], base))
{
findActivedRule=true;
}
} catch (Exception e)
{
}
}
}
}
assert(findActivedRule);
clearInvokeCount();
}
}
authorityFilter.removeAllAuthorityRules();
//////////////
for(int i=0;i<rules[1].length;i++)
{
authorityFilter.addAuthorityRule(rules[1][i]);
}
for(int i=0;i<commands.length;i++)
{
for(int j=0;j<commands[i].length;j++)
{
List<Throwable> exceptions=null;
try
{
assert(authorityFilter.shouldFilter(commands[i][j], base)==false);
} catch (AuthorityFilteringException ex)
{
exceptions=ex.getCauseList();
}
assert(exceptions!=null);
int nullPointerExceptionCount=0;
int illegalArgumentExceptionCount=0;
for(Throwable t : exceptions)
{
if(t instanceof NullPointerException)
{
nullPointerExceptionCount++;
}
else if(t instanceof IllegalArgumentException)
{
illegalArgumentExceptionCount++;
}
else
{
assert(false);
}
}
assert(1==nullPointerExceptionCount);
assert(1==illegalArgumentExceptionCount);
clearInvokeCount();
}
}
/////////////////////
authorityFilter.addAuthorityRule(rules[0][0]);
for(int i=0;i<commands.length;i++)
{
for(int j=0;j<commands[i].length;j++)
{
try
{
assert(authorityFilter.shouldFilter(commands[i][j], base)==true);
} catch (AuthorityFilteringException ex)
{
assert(false);
}
clearInvokeCount();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// ----------------------------------------------------------------------------
// This class is largely adapted from "com.google.common.base.Preconditions",
// which is part of the "Guava" library.
//
// Because of frequent issues with dependency conflicts, this class was
// added to the Flink code base to reduce dependency on Guava.
// ----------------------------------------------------------------------------
package org.apache.flink.util;
import org.apache.flink.annotation.Internal;
import javax.annotation.Nullable;
/**
* A collection of static utility methods to validate input.
*
* <p>This class is modelled after Google Guava's Preconditions class, and partly takes code
* from that class. We add this code to the Flink code base in order to reduce external
* dependencies.
*/
@Internal
public final class Preconditions {
// ------------------------------------------------------------------------
// Null checks
// ------------------------------------------------------------------------
/**
* Ensures that the given object reference is not null.
* Upon violation, a {@code NullPointerException} with no message is thrown.
*
* @param reference The object reference
* @return The object reference itself (generically typed).
*
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(T reference) {
if (reference == null) {
throw new NullPointerException();
}
return reference;
}
/**
* Ensures that the given object reference is not null.
* Upon violation, a {@code NullPointerException} with the given message is thrown.
*
* @param reference The object reference
* @param errorMessage The message for the {@code NullPointerException} that is thrown if the check fails.
* @return The object reference itself (generically typed).
*
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(T reference, @Nullable String errorMessage) {
if (reference == null) {
throw new NullPointerException(String.valueOf(errorMessage));
}
return reference;
}
/**
* Ensures that the given object reference is not null.
* Upon violation, a {@code NullPointerException} with the given message is thrown.
*
* <p>The error message is constructed from a template and an arguments array, after
* a similar fashion as {@link String#format(String, Object...)}, but supporting only
* {@code %s} as a placeholder.
*
* @param reference The object reference
* @param errorMessageTemplate The message template for the {@code NullPointerException}
* that is thrown if the check fails. The template substitutes its
* {@code %s} placeholders with the error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the
* message template for the {@code %s} placeholders.
*
* @return The object reference itself (generically typed).
*
* @throws NullPointerException Thrown, if the passed reference was null.
*/
public static <T> T checkNotNull(T reference,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (reference == null) {
throw new NullPointerException(format(errorMessageTemplate, errorMessageArgs));
}
return reference;
}
// ------------------------------------------------------------------------
// Boolean Condition Checking (Argument)
// ------------------------------------------------------------------------
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if
* the condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
*
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition) {
if (!condition) {
throw new IllegalArgumentException();
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if
* the condition is not met (evaluates to {@code false}). The exception will have the
* given error message.
*
* @param condition The condition to check
* @param errorMessage The message for the {@code IllegalArgumentException} that is thrown if the check fails.
*
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition, @Nullable Object errorMessage) {
if (!condition) {
throw new IllegalArgumentException(String.valueOf(errorMessage));
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalArgumentException} if
* the condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @param errorMessageTemplate The message template for the {@code IllegalArgumentException}
* that is thrown if the check fails. The template substitutes its
* {@code %s} placeholders with the error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the
* message template for the {@code %s} placeholders.
*
* @throws IllegalArgumentException Thrown, if the condition is violated.
*/
public static void checkArgument(boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (!condition) {
throw new IllegalArgumentException(format(errorMessageTemplate, errorMessageArgs));
}
}
// ------------------------------------------------------------------------
// Boolean Condition Checking (State)
// ------------------------------------------------------------------------
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if
* the condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
*
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition) {
if (!condition) {
throw new IllegalStateException();
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if
* the condition is not met (evaluates to {@code false}). The exception will have the
* given error message.
*
* @param condition The condition to check
* @param errorMessage The message for the {@code IllegalStateException} that is thrown if the check fails.
*
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition, @Nullable Object errorMessage) {
if (!condition) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
}
/**
* Checks the given boolean condition, and throws an {@code IllegalStateException} if
* the condition is not met (evaluates to {@code false}).
*
* @param condition The condition to check
* @param errorMessageTemplate The message template for the {@code IllegalStateException}
* that is thrown if the check fails. The template substitutes its
* {@code %s} placeholders with the error message arguments.
* @param errorMessageArgs The arguments for the error message, to be inserted into the
* message template for the {@code %s} placeholders.
*
* @throws IllegalStateException Thrown, if the condition is violated.
*/
public static void checkState(boolean condition,
@Nullable String errorMessageTemplate,
@Nullable Object... errorMessageArgs) {
if (!condition) {
throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures that the given index is valid for an array, list or string of the given size.
*
* @param index index to check
* @param size size of the array, list or string
*
* @throws IllegalArgumentException Thrown, if size is negative.
* @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to size
*/
public static void checkElementIndex(int index, int size) {
checkArgument(size >= 0, "Size was negative.");
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + size);
}
}
/**
* Ensures that the given index is valid for an array, list or string of the given size.
*
* @param index index to check
* @param size size of the array, list or string
* @param errorMessage The message for the {@code IndexOutOfBoundsException} that is thrown if the check fails.
*
* @throws IllegalArgumentException Thrown, if size is negative.
* @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to size
*/
public static void checkElementIndex(int index, int size, @Nullable String errorMessage) {
checkArgument(size >= 0, "Size was negative.");
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException(String.valueOf(errorMessage) + " Index: " + index + ", Size: " + size);
}
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
/**
* A simplified formatting method. Similar to {@link String#format(String, Object...)}, but
* with lower overhead (only String parameters, no locale, no format validation).
*
* <p>This method is taken quasi verbatim from the Guava Preconditions class.
*/
private static String format(@Nullable String template, @Nullable Object... args) {
final int numArgs = args == null ? 0 : args.length;
template = String.valueOf(template); // null -> "null"
// start substituting the arguments into the '%s' placeholders
StringBuilder builder = new StringBuilder(template.length() + 16 * numArgs);
int templateStart = 0;
int i = 0;
while (i < numArgs) {
int placeholderStart = template.indexOf("%s", templateStart);
if (placeholderStart == -1) {
break;
}
builder.append(template.substring(templateStart, placeholderStart));
builder.append(args[i++]);
templateStart = placeholderStart + 2;
}
builder.append(template.substring(templateStart));
// if we run out of placeholders, append the extra args in square braces
if (i < numArgs) {
builder.append(" [");
builder.append(args[i++]);
while (i < numArgs) {
builder.append(", ");
builder.append(args[i++]);
}
builder.append(']');
}
return builder.toString();
}
// ------------------------------------------------------------------------
/** Private constructor to prevent instantiation. */
private Preconditions() {}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.xml.crypto.test.dsig;
import java.nio.charset.StandardCharsets;
import java.security.*;
import java.security.spec.ECGenParameterSpec;
import java.util.Collections;
import javax.xml.crypto.KeySelector;
import javax.xml.crypto.dom.*;
import javax.xml.crypto.dsig.*;
import javax.xml.crypto.dsig.dom.*;
import javax.xml.crypto.dsig.keyinfo.*;
import javax.xml.crypto.dsig.spec.*;
import org.w3c.dom.*;
import javax.xml.crypto.test.KeySelectors;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* This is a testcase to generate all the W3C xmldsig XMLDSig11 testcases.
*
*/
public class CreateInteropXMLDSig11Test {
private KeySelector kvks, sks;
private CanonicalizationMethod withoutComments;
private DigestMethod sha1, sha256, sha384, sha512;
private SignatureMethod ecdsaSha1, ecdsaSha256, ecdsaSha384, ecdsaSha512,
rsaSha256, rsaSha384, rsaSha512, hmacSha256,
hmacSha384, hmacSha512;
private KeyInfo p256ki, p384ki, p521ki, rsaki, rsa2048ki;
private XMLSignatureFactory fac;
private KeyPair p256, p384, p521, rsa2048;
private boolean ecSupport = true;
static {
Security.insertProviderAt
(new org.apache.jcp.xml.dsig.internal.dom.XMLDSigRI(), 1);
}
public CreateInteropXMLDSig11Test() throws Exception {
// Create KeyPairs
try {
KeyPairGenerator kpg = KeyPairGenerator.getInstance("EC");
kpg.initialize(new ECGenParameterSpec("1.2.840.10045.3.1.7"));
p256 = kpg.generateKeyPair();
kpg.initialize(new ECGenParameterSpec("1.3.132.0.34"));
p384 = kpg.generateKeyPair();
kpg.initialize(new ECGenParameterSpec("1.3.132.0.35"));
p521 = kpg.generateKeyPair();
} catch (NoSuchAlgorithmException nsae) {
// EC not supported on this platform
ecSupport = false;
}
if ("IBM Corporation".equals(System.getProperty("java.vendor"))) {
ecSupport = false;
}
KeyPairGenerator rsakpg = KeyPairGenerator.getInstance("RSA");
rsakpg.initialize(2048);
rsa2048 = rsakpg.generateKeyPair();
// create common objects
fac = XMLSignatureFactory.getInstance("DOM", new org.apache.jcp.xml.dsig.internal.dom.XMLDSigRI());
KeyInfoFactory kifac = fac.getKeyInfoFactory();
withoutComments = fac.newCanonicalizationMethod
(CanonicalizationMethod.INCLUSIVE, (C14NMethodParameterSpec) null);
sha1 = fac.newDigestMethod(DigestMethod.SHA1, null);
sha256 = fac.newDigestMethod(DigestMethod.SHA256, null);
sha384 = fac.newDigestMethod(
"http://www.w3.org/2001/04/xmldsig-more#sha384", null);
sha512 = fac.newDigestMethod(DigestMethod.SHA512, null);
if (ecSupport) {
p256ki = kifac.newKeyInfo(Collections.singletonList(
kifac.newKeyValue(p256.getPublic())));
p384ki = kifac.newKeyInfo(Collections.singletonList(
kifac.newKeyValue(p384.getPublic())));
p521ki = kifac.newKeyInfo(Collections.singletonList(
kifac.newKeyValue(p521.getPublic())));
ecdsaSha1 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha1", null);
ecdsaSha256 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha256", null);
ecdsaSha384 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha384", null);
ecdsaSha512 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#ecdsa-sha512", null);
}
rsaki = kifac.newKeyInfo(Collections.singletonList
(kifac.newKeyValue(
TestUtils.getPublicKey("RSA"))));
rsa2048ki = kifac.newKeyInfo(Collections.singletonList
(kifac.newKeyValue(rsa2048.getPublic())));
rsaSha256 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", null);
rsaSha384 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#rsa-sha384", null);
rsaSha512 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#rsa-sha512", null);
hmacSha256 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#hmac-sha256", null);
hmacSha384 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#hmac-sha384", null);
hmacSha512 = fac.newSignatureMethod
("http://www.w3.org/2001/04/xmldsig-more#hmac-sha512", null);
kvks = new KeySelectors.KeyValueKeySelector();
sks = new KeySelectors.SecretKeySelector("testkey".getBytes(StandardCharsets.US_ASCII));
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p256_sha1() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha1, sha1, p256ki,
p256.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p256_sha256() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha256, sha256, p256ki,
p256.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p256_sha384() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha384, sha384, p256ki,
p256.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p256_sha512() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha512, sha512, p256ki,
p256.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p384_sha1() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha1, sha1, p384ki,
p384.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p384_sha256() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha256, sha256, p384ki,
p384.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p384_sha384() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha384, sha384, p384ki,
p384.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p384_sha512() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha512, sha512, p384ki,
p384.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p521_sha1() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha1, sha1, p521ki,
p521.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p521_sha256() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha256, sha256, p521ki,
p521.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p521_sha384() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha384, sha384, p521ki,
p521.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_p521_sha512() throws Exception {
if (ecSupport) {
test_create_signature_enveloping(ecdsaSha512, sha512, p521ki,
p521.getPrivate(), kvks);
}
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_rsa_sha256() throws Exception {
test_create_signature_enveloping(rsaSha256, sha1, rsaki,
TestUtils.getPrivateKey("RSA"), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_rsa_sha384() throws Exception {
test_create_signature_enveloping(rsaSha384, sha1, rsa2048ki,
rsa2048.getPrivate(), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_rsa_sha512() throws Exception {
test_create_signature_enveloping(rsaSha512, sha1, rsa2048ki,
rsa2048.getPrivate(), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_sha256_rsa_sha256() throws Exception {
test_create_signature_enveloping(rsaSha256, sha256, rsaki,
TestUtils.getPrivateKey("RSA"), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_sha384_rsa_sha256() throws Exception {
test_create_signature_enveloping(rsaSha256, sha384, rsaki,
TestUtils.getPrivateKey("RSA"), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_sha512_rsa_sha256() throws Exception {
test_create_signature_enveloping(rsaSha256, sha512, rsaki,
TestUtils.getPrivateKey("RSA"), kvks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_hmac_sha256() throws Exception {
test_create_signature_enveloping(hmacSha256, sha1, rsaki,
TestUtils.getSecretKey
("testkey".getBytes(StandardCharsets.US_ASCII)), sks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_hmac_sha384() throws Exception {
test_create_signature_enveloping(hmacSha384, sha1, rsaki,
TestUtils.getSecretKey
("testkey".getBytes(StandardCharsets.US_ASCII)), sks);
}
@org.junit.jupiter.api.Test
public void test_create_enveloping_hmac_sha512() throws Exception {
test_create_signature_enveloping(hmacSha512, sha1, rsaki,
TestUtils.getSecretKey
("testkey".getBytes(StandardCharsets.US_ASCII)), sks);
}
private void test_create_signature_enveloping(
SignatureMethod sm, DigestMethod dm, KeyInfo ki, Key signingKey, KeySelector ks
) throws Exception {
// create reference
Reference ref = fac.newReference("#DSig.Object_1", dm, null,
XMLObject.TYPE, null);
// create SignedInfo
SignedInfo si = fac.newSignedInfo(withoutComments, sm,
Collections.singletonList(ref));
Document doc = TestUtils.newDocument();
// create Objects
Element webElem = doc.createElementNS(null, "Web");
Text text = doc.createTextNode("up up and away");
webElem.appendChild(text);
XMLObject obj = fac.newXMLObject(Collections.singletonList
(new DOMStructure(webElem)), "DSig.Object_1", "text/xml", null);
// create XMLSignature
XMLSignature sig = fac.newXMLSignature
(si, ki, Collections.singletonList(obj), null, null);
DOMSignContext dsc = new DOMSignContext(signingKey, doc);
dsc.setDefaultNamespacePrefix("dsig");
sig.sign(dsc);
DOMValidateContext dvc = new DOMValidateContext
(ks, doc.getDocumentElement());
XMLSignature sig2 = fac.unmarshalXMLSignature(dvc);
assertEquals(sig, sig2);
assertTrue(sig2.validate(dvc));
}
}
| |
/*L
* Copyright Oracle Inc
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details.
*/
/*
* eXist Open Source Native XML Database
* Copyright (C) 2001-07 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* $Id: XmldbURL.java 218 2007-04-13 15:06:43Z dizzzz $
*/
package org.exist.protocolhandler.xmldb;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import org.exist.xmldb.XmldbURI;
/**
* A utility class for xmldb URLs. Since, java.net.URL is final this class
* acts as a wrapper, convenience methods have been added.<BR>
* <BR>
* Example:<BR>
* <I>xmldb:exist://username:password@hostname:8080/exist/xmlrpc/db/collection/document.xml</I><BR>
* <BR>
* Note: A collection URL ends with a "/":<BR>
* <I>xmldb:exist://hostname:8080/exist/xmlrpc/db/collection/</I>
*
* @see java.net.URI
* @see java.net.URL
* @see org.exist.xmldb.XmldbURI
*
* @author Dannes Wessels
*/
public class XmldbURL {
private static final int USERNAME=1;
private static final int PASSWORD=2;
private URL myUrl;
/**
* Creates a new instance of XmldbURL using an XmldbURI object.
*
* @param xmldbURI Resource location.
* @throws java.net.MalformedURLException
*/
public XmldbURL(XmldbURI xmldbURI) throws MalformedURLException {
this(xmldbURI.toURL());
}
/**
* Creates a new instance of XmldbURL using an URL object.
* @param url Resource location.
* @throws java.net.MalformedURLException
*/
public XmldbURL(URL url) throws MalformedURLException {
// check protocol
if(url.getProtocol().equals("xmldb")){
myUrl = url;
} else {
throw new MalformedURLException("URL is not an \"xmldb:\" URL: "+url.toString() );
}
}
/**
* Creates a new instance of XmldbURL using an URI object.
*
* @param uri Resource location.
* @throws java.net.MalformedURLException
*/
public XmldbURL(URI uri) throws MalformedURLException {
this(uri.toURL());
}
/**
* Creates a new instance of XmldbURL using an String.
* @param txt Resource location.
* @throws java.net.MalformedURLException
*/
public XmldbURL(String txt) throws MalformedURLException {
this(new URL(txt));
}
/**
* xmldb:exist://<B>username:password</B>@hostname:8080/exist/xmlrpc/db/collection/document.xml
* @see java.net.URL#getUserInfo
*
* @return username:password
*/
public String getUserInfo() {
return myUrl.getUserInfo();
}
/**
* xmldb:exist://<B>username</B>:password@hostname:8080/exist/xmlrpc/db/collection/document.xml
* @return username
*/
public String getUsername(){
return extractCredentials(USERNAME);
}
/**
* xmldb:exist://username:<B>password</B>@hostname:8080/exist/xmlrpc/db/collection/document.xml
* @return password
*/
public String getPassword(){
return extractCredentials(PASSWORD);
}
/**
* @return URL representation of location.
*/
public URL getURL(){
return myUrl;
}
/**
* xmldb:exist://<B>username:password@hostname:8080/exist/xmlrpc/db/collection/document.xml</B>?query#fragment
* @see java.net.URL#getAuthority
* @return authority
*/
public String getAuthority() {
return myUrl.getAuthority();
}
/**
* xmldb:exist://username:password@hostname:8080<B>/exist/xmlrpc</B>/db/collection/document.xml?query#fragment
* Return context, null if not available.
*/
public String getContext() {
String path = myUrl.getPath();
int dbPosition=path.indexOf("/db");
String context=null;
if(dbPosition!=-1){
// since all paths begin with this pattern..
context=path.substring(0,dbPosition);
}
if(context!=null && context.equals("")){
context=null;
}
return context;
}
// /exist/xmlrpc/db/shakespeare/plays/macbeth.xml
// /exist/xmlrpc/db/shakespeare/plays/
// /db/shakespeare/plays/macbeth.xml
// /db/shakespeare/plays/
/**
* xmldb:exist://username:password@hostname:8080/exist/xmlrpc<B>/db/collection</B>/document.xml
* @return collection
*/
public String getCollection(){
String path=myUrl.getPath();
String collectionName=null;
int dbLocation=path.indexOf("/db");
if(dbLocation!=-1){
// found pattern "/db"
if(path.endsWith("/")){
// -1 removes the slash
collectionName=path.substring(dbLocation, (path.length()-1) );
} else {
int lastSep=path.lastIndexOf('/');
if(lastSep==0){
collectionName="/";
} else if(lastSep!=-1){
collectionName=path.substring(dbLocation, lastSep);
} else {
collectionName=path;
}
}
} else { // TODO not very well tested
// pattern not found, taking full path
if(path.endsWith("/")){
// -1 removes the slash
collectionName=path.substring(0, (path.length()-1) );
} else {
int lastSep=path.lastIndexOf('/');
if(lastSep!=-1){
collectionName=path.substring(dbLocation, lastSep);
} else {
collectionName="/";
}
}
}
return collectionName;
}
/**
* xmldb:exist://username:password@hostname:8080/exist/xmlrpc/db/collection/<B>document.xml</B>
* @return collection
*/
public String getDocumentName(){
String serverPath=myUrl.getPath();
String documentName=null;
if(!serverPath.endsWith("/")){
int lastSep=serverPath.lastIndexOf('/');
if(lastSep==-1){
documentName=serverPath;
} else {
documentName=serverPath.substring(lastSep+1);
}
}
return documentName;
}
// Get username or password
private String extractCredentials(int part) {
String userInfo = myUrl.getUserInfo();
String username = null;
String password = null;
if(userInfo!=null){
int separator = userInfo.indexOf(':');
if(separator==-1){
username=userInfo;
password=null;
} else {
username=userInfo.substring(0,separator);
password=userInfo.substring(separator+1);
}
}
// Fix credentials. If not found (empty string) fill NULL
if(username!=null && username.equals("")){
username=null;
}
// Fix credentials. If not found (empty string) fill NULL
if(password!=null && password.equals("")){
password=null;
}
if(part==USERNAME){
return username;
} else if(part==PASSWORD){
return password;
}
return null;
}
/**
* <B>xmldb</B>:exist://username:password@hostname:8080/exist/xmlrpc/db/collection/document.xml
* @see java.net.URL#getProtocol
* @return protocol
*/
public String getProtocol(){
return myUrl.getProtocol();
}
/**
* xmldb:exist://username:password@<B>hostname</B>:8080/exist/xmlrpc/db/collection/document.xml
* @see java.net.URL#getProtocol
* @return protocol
*/
public String getHost(){
String hostname=myUrl.getHost();
if(hostname.equals("")){
return null;
} else {
return hostname;
}
}
/**
* xmldb:exist://username:password@hostname:<B>8080</B>/exist/xmlrpc/db/collection/document.xml
* @see java.net.URL#getPort
* @return port
*/
public int getPort(){
return myUrl.getPort();
}
/**
* xmldb:exist://username:password@hostname:8080:<B>/exist/xmlrpc/db/collection/document.xml</B>
* @see java.net.URL#getPath
* @return port
*/
public String getPath(){
return myUrl.getPath();
}
/**
* xmldb:exist://username:password@hostname:8080/exist/xmlrpc/db/collection/document.xml?<B>query</B>#fragment
* @see java.net.URL#getQuery
* @return query
*/
public String getQuery(){
return myUrl.getQuery();
}
/**
* xmldb:exist://username:password@hostname:8080:/exist/xmlrpc<B>/db/collection/document.xml</B>
* @return collectionpath
*/
public String getCollectionPath(){
return myUrl.getPath().substring(13);
}
/**
* Get http:// URL from xmldb:exist:// URL
* xmldb:exist://username:password@hostname:8080:/exist/xmlrpc/db/collection/document.xml
* @return http://username:password@hostname:8080:/exist/xmlrpc/db/collection/document.xml
*/
public String getXmlRpcURL(){
return "http://" + myUrl.getAuthority() + getContext();
}
/**
* Does the URL have at least a username?
* @return TRUE when URL contains username
*/
public boolean hasUserInfo(){
return (getUserInfo()!=null && getUsername()!=null);
}
/**
* Get eXist instance name.
*
* @return eXist-db instance name, at this moment fixed to exist
*/
public String getInstanceName() {
return "exist"; // No other choice
}
/**
* Get textual representation of URL.
*
* @see java.net.URL#toString
* @return Text representation of URL.
*/
public String toString(){
return myUrl.toString();
}
/**
* Get information wether URL is an embedded URL.
*
* @return TRUE when URL refers to resource in embedded eXist-db.
*/
public boolean isEmbedded(){
return (getHost()==null);
}
}
| |
package org.sagebionetworks.table.cluster;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.sagebionetworks.repo.model.dbo.dao.table.TableModelTestUtils;
import org.sagebionetworks.repo.model.entity.IdAndVersion;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.table.cluster.columntranslation.RowMetadataColumnTranslationReference;
import org.sagebionetworks.table.cluster.columntranslation.SchemaColumnTranslationReference;
import org.sagebionetworks.table.query.ParseException;
import org.sagebionetworks.table.query.TableQueryParser;
import org.sagebionetworks.table.query.model.ColumnReference;
import org.sagebionetworks.table.query.model.QuerySpecification;
import org.sagebionetworks.table.query.model.SelectList;
import org.sagebionetworks.table.query.model.TableNameCorrelation;
public class TableAndColumnMapperTest {
private List<ColumnModel> allColumns;
private Map<String, ColumnModel> columnMap;
@BeforeEach
public void before() {
allColumns = Arrays.asList(
TableModelTestUtils.createColumn(111L, "foo", ColumnType.STRING),
TableModelTestUtils.createColumn(222L, "has space", ColumnType.STRING),
TableModelTestUtils.createColumn(333L, "bar", ColumnType.INTEGER),
TableModelTestUtils.createColumn(444L, "foo_bar", ColumnType.STRING),
TableModelTestUtils.createColumn(555L, "Foo", ColumnType.STRING),
TableModelTestUtils.createColumn(666L, "datetype", ColumnType.DATE),
TableModelTestUtils.createColumn(777L, "has\"quote", ColumnType.STRING),
TableModelTestUtils.createColumn(888L, "aDouble", ColumnType.DOUBLE));
columnMap = allColumns.stream()
.collect(Collectors.toMap(ColumnModel::getName, Function.identity()));
}
@Test
public void testConstructorWithSingleTable() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), allColumns);
// call under test
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
assertEquals(Arrays.asList(IdAndVersion.parse("syn123")), mapper.getTableIds());
assertEquals(allColumns, mapper.getUnionOfAllTableSchemas());
}
@Test
public void testConstructorWithMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space"), columnMap.get("bar")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("foo_bar"), allColumns.get(4), allColumns.get(5)));
// call under test
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
assertEquals(Arrays.asList(IdAndVersion.parse("syn123"), IdAndVersion.parse("syn456")), mapper.getTableIds());
assertEquals(allColumns.subList(0, 6), mapper.getUnionOfAllTableSchemas());
}
@Test
public void testConstructorWithNullModel() throws ParseException {
QuerySpecification model = null;
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), allColumns);
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
new TableAndColumnMapper(model, new TestSchemaProvider(map));
}).getMessage();
assertEquals("QuerySpecification is required.", message);
}
@Test
public void testConstructorWithNullProvider() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
SchemaProvider provider = null;
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
new TableAndColumnMapper(model, provider);
}).getMessage();
assertEquals("SchemaProvider is required.", message);
}
@Test
public void testConstructorWithEmptySchema() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Collections.emptyList());
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
new TableAndColumnMapper(model, new TestSchemaProvider(map));
}).getMessage();
assertEquals("Schema for syn123 is empty.", message);
}
@Test
public void testBuildSelectAllColumns() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), allColumns);
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
SelectList selectList = mapper.buildSelectAllColumns();
assertNotNull(selectList);
assertEquals("\"foo\", \"has space\", \"bar\", \"foo_bar\", \"Foo\", \"datetype\", \"has\"\"quote\", \"aDouble\"",
selectList.toSql());
}
@Test
public void testBuildSelectAllColumnsWithJoinWithoutAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
SelectList selectList = mapper.buildSelectAllColumns();
assertNotNull(selectList);
assertEquals("syn123.\"foo\", syn123.\"has space\", syn456.\"bar\", syn456.\"foo_bar\"", selectList.toSql());
}
@Test
public void testBuildSelectAllColumnsWithJoinWithAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456 r").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
SelectList selectList = mapper.buildSelectAllColumns();
assertNotNull(selectList);
assertEquals("t.\"foo\", t.\"has space\", r.\"bar\", r.\"foo_bar\"", selectList.toSql());
}
@Test
public void testBuildSelectAllColumnsWithJoinWithMixedAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
SelectList selectList = mapper.buildSelectAllColumns();
assertNotNull(selectList);
assertEquals("t.\"foo\", t.\"has space\", syn456.\"bar\", syn456.\"foo_bar\"", selectList.toSql());
}
@Test
public void testLookupColumnReferenceWithNullRef() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
assertEquals(Optional.empty(), mapper.lookupColumnReference(null));
}
@Test
public void testLookupColumnReferenceWithMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn456.foo_bar").columnReference();
// call under test
assertEquals(Optional.of(new SchemaColumnTranslationReference(columnMap.get("foo_bar"))),
mapper.lookupColumnReference(columnReference));
}
@Test
public void testLookupColumnReferenceWithMultipleTablesTranslated() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("_A1._C444_").columnReference();
// call under test
assertEquals(Optional.of(new SchemaColumnTranslationReference(columnMap.get("foo_bar"))),
mapper.lookupColumnReference(columnReference));
}
@Test
public void testLookupColumnReferenceWithMultipleTablesNoMatch() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn456.nothere").columnReference();
// call under test
assertEquals(Optional.empty(), mapper.lookupColumnReference(columnReference));
}
@Test
public void testLookupColumnReferenceWithNullLHSAndMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("foo").columnReference();
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
mapper.lookupColumnReference(columnReference);
}).getMessage();
assertEquals("Expected a table name or table alias for column: foo", message);
}
@Test
public void testLookupColumnReferenceWithNullLHSAndSingle() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("foo").columnReference();
// call under test
assertEquals(Optional.of(new SchemaColumnTranslationReference(columnMap.get("foo"))),
mapper.lookupColumnReference(columnReference));
}
@Test
public void testLookupColumnReferenceMatchWithNullRef() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
assertEquals(Optional.empty(), mapper.lookupColumnReferenceMatch(null));
}
@Test
public void testLookupColumnReferenceMatchWithMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn456.foo_bar").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(new SchemaColumnTranslationReference(columnMap.get("foo_bar")), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn456", tableInfo.getOriginalTableName());
assertEquals(1, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceMatchWithBenefactor() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("T123.ROW_BENEFACTOR").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(RowMetadataColumnTranslationReference.ROW_BENEFACTOR.getColumnTranslationReference(), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceMatchWithBenefactorAndVersion() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456.3").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456.3"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("T456_3.ROW_BENEFACTOR").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(RowMetadataColumnTranslationReference.ROW_BENEFACTOR.getColumnTranslationReference(), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn456.3", tableInfo.getOriginalTableName());
assertEquals(1, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceMatchWithMultipleTablesFirstTable() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn123.foo").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(new SchemaColumnTranslationReference(columnMap.get("foo")), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceWithROW_ID() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn123.ROW_ID").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(RowMetadataColumnTranslationReference.ROW_ID.getColumnTranslationReference(), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceMatchWithMultipleTablesNoMatch() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("syn456.nothere").columnReference();
// call under test
assertEquals(Optional.empty(), mapper.lookupColumnReferenceMatch(columnReference));
}
@Test
public void testLookupColumnReferenceMatchWithMultipleAliasOfSameTable() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t1 join syn456 join syn123 t2")
.querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper
.lookupColumnReferenceMatch(new TableQueryParser("t2.`has space`").columnReference());
assertTrue(optionalMatch.isPresent());
assertEquals(new SchemaColumnTranslationReference(columnMap.get("has space")),
optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(2, tableInfo.getTableIndex());
}
@Test
public void testLookupColumnReferenceMatchWithNullLHSAndMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 t join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("foo").columnReference();
String message = assertThrows(IllegalArgumentException.class, () -> {
// call under test
mapper.lookupColumnReferenceMatch(columnReference);
}).getMessage();
assertEquals("Expected a table name or table alias for column: foo", message);
}
@Test
public void testLookupColumnReferenceMatchWithNullLHSAndSingle() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
ColumnReference columnReference = new TableQueryParser("foo").columnReference();
// call under test
Optional<ColumnReferenceMatch> optionalMatch = mapper.lookupColumnReferenceMatch(columnReference);
assertTrue(optionalMatch.isPresent());
assertEquals(new SchemaColumnTranslationReference(columnMap.get("foo")), optionalMatch.get().getColumnTranslationReference());
TableInfo tableInfo = optionalMatch.get().getTableInfo();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithSingleTable() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn123").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithSingleTableWithMatchingAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 a").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn123 a").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
// alias is not used for lookup
@Test
public void testLookupTableNameCorrelationWithSingleTableWithNonmatchingAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 r").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn123 _A0").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithSingleTableWithTranslated() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("T123").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithSingleTableWithNoMatch() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn456").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertEquals(Optional.empty(), optionalMatch);
}
@Test
public void testLookupTableNameCorrelationWithMultipleTablesMatchFrist() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn123").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals(0, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithMultipleTablesMatchLast() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn456").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn456", tableInfo.getOriginalTableName());
assertEquals(1, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithMultipleTablesNoMatch() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
TableNameCorrelation tableNameCorrelation = new TableQueryParser("syn789").tableNameCorrelation();
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(tableNameCorrelation);
assertEquals(Optional.empty(), optionalMatch);
}
@Test
public void testLookupTableNameCorrelationWithMultipleTablesSameName() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 r1 join syn456 join syn123 r2").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(new TableQueryParser("syn123 r2").tableNameCorrelation());
assertTrue(optionalMatch.isPresent());
TableInfo tableInfo = optionalMatch.get();
assertEquals("syn123", tableInfo.getOriginalTableName());
assertEquals("r2", tableInfo.getTableAlias().get());
assertEquals(2, tableInfo.getTableIndex());
}
@Test
public void testLookupTableNameCorrelationWithMultipleTablesMissingAlias() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 r1 join syn456 join syn123 r2").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
Optional<TableInfo> optionalMatch = mapper.lookupTableNameCorrelation(new TableQueryParser("syn123").tableNameCorrelation());
assertEquals(Optional.empty(), optionalMatch);
}
@Test
public void testgetSingleTableIdWithSingleTable() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
assertEquals(Optional.of(IdAndVersion.parse("syn123")), mapper.getSingleTableId());
}
@Test
public void testgetSingleTableIdWithMultipleTables() throws ParseException {
QuerySpecification model = new TableQueryParser("select * from syn123 join syn456").querySpecification();
Map<IdAndVersion, List<ColumnModel>> map = new LinkedHashMap<>();
map.put(IdAndVersion.parse("syn123"), Arrays.asList(columnMap.get("foo"), columnMap.get("has space")));
map.put(IdAndVersion.parse("syn456"), Arrays.asList(columnMap.get("bar"), columnMap.get("foo_bar")));
TableAndColumnMapper mapper = new TableAndColumnMapper(model, new TestSchemaProvider(map));
// call under test
assertEquals(Optional.empty(), mapper.getSingleTableId());
}
}
| |
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb.test;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import org.rocksdb.*;
public class ColumnFamilyTest {
static final String db_path = "/tmp/rocksdbjni_columnfamily_test";
static {
RocksDB.loadLibrary();
}
public static void main(String[] args) {
RocksDB db = null;
Options options = new Options();
options.setCreateIfMissing(true);
DBOptions dbOptions = new DBOptions();
dbOptions.setCreateIfMissing(true);
try {
db = RocksDB.open(options, db_path);
} catch (RocksDBException e) {
assert(false);
}
// Test listColumnFamilies
List<byte[]> columnFamilyNames;
try {
columnFamilyNames = RocksDB.listColumnFamilies(options, db_path);
if (columnFamilyNames != null && columnFamilyNames.size() > 0) {
assert(columnFamilyNames.size() == 1);
assert(new String(columnFamilyNames.get(0)).equals("default"));
} else {
assert(false);
}
} catch (RocksDBException e) {
assert(false);
}
// Test createColumnFamily
try {
db.createColumnFamily(new ColumnFamilyDescriptor("new_cf",
new ColumnFamilyOptions()));
} catch (RocksDBException e) {
assert(false);
}
if (db != null) {
db.close();
}
// Test listColumnFamilies after create "new_cf"
try {
columnFamilyNames = RocksDB.listColumnFamilies(options, db_path);
if (columnFamilyNames != null && columnFamilyNames.size() > 0) {
assert(columnFamilyNames.size() == 2);
assert(new String(columnFamilyNames.get(0)).equals("default"));
assert(new String(columnFamilyNames.get(1)).equals("new_cf"));
} else {
assert(false);
}
} catch (RocksDBException e) {
assert(false);
}
// Test open database with column family names
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<>();
cfNames.add(new ColumnFamilyDescriptor("default"));
cfNames.add(new ColumnFamilyDescriptor("new_cf"));
try {
db = RocksDB.open(dbOptions, db_path, cfNames, columnFamilyHandleList);
assert(columnFamilyHandleList.size() == 2);
db.put("dfkey1".getBytes(), "dfvalue".getBytes());
db.put(columnFamilyHandleList.get(0), "dfkey2".getBytes(),
"dfvalue".getBytes());
db.put(columnFamilyHandleList.get(1), "newcfkey1".getBytes(),
"newcfvalue".getBytes());
String retVal = new String(db.get(columnFamilyHandleList.get(1),
"newcfkey1".getBytes()));
assert(retVal.equals("newcfvalue"));
assert( (db.get(columnFamilyHandleList.get(1),
"dfkey1".getBytes())) == null);
db.remove(columnFamilyHandleList.get(1), "newcfkey1".getBytes());
assert( (db.get(columnFamilyHandleList.get(1),
"newcfkey1".getBytes())) == null);
db.remove("dfkey2".getBytes());
assert( (db.get(columnFamilyHandleList.get(0),
"dfkey2".getBytes())) == null);
} catch (RocksDBException e) {
assert(false);
}
// Test create write to and drop ColumnFamily
ColumnFamilyHandle tmpColumnFamilyHandle = null;
try {
tmpColumnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("tmpCF", new ColumnFamilyOptions()));
db.put(tmpColumnFamilyHandle, "key".getBytes(), "value".getBytes());
db.dropColumnFamily(tmpColumnFamilyHandle);
tmpColumnFamilyHandle.dispose();
} catch (Exception e) {
assert(false);
}
// Put to disposed column family tmpColumnFamilyHandle must fail
try {
db.put(tmpColumnFamilyHandle, "key".getBytes(), "value".getBytes());
assert(false);
} catch (RocksDBException e) {
assert(true);
}
// Remove to disposed column family tmpColumnFamilyHandle must fail
try {
db.remove(tmpColumnFamilyHandle, "key".getBytes());
assert(false);
} catch (RocksDBException e) {
assert(true);
}
// Get on a disposed column family tmpColumnFamilyHandle must fail
try {
db.get(tmpColumnFamilyHandle, "key".getBytes());
assert(false);
} catch (RocksDBException e) {
assert(true);
}
// Test WriteBatch
try {
WriteBatch writeBatch = new WriteBatch();
WriteOptions writeOpt = new WriteOptions();
writeBatch.put("key".getBytes(), "value".getBytes());
writeBatch.put(columnFamilyHandleList.get(1), "newcfkey".getBytes(),
"value".getBytes());
writeBatch.put(columnFamilyHandleList.get(1), "newcfkey2".getBytes(),
"value2".getBytes());
writeBatch.remove("xyz".getBytes());
writeBatch.remove(columnFamilyHandleList.get(1), "xyz".getBytes());
db.write(writeOpt, writeBatch);
writeBatch.dispose();
assert(db.get(columnFamilyHandleList.get(1),
"xyz".getBytes()) == null);
assert(new String(db.get(columnFamilyHandleList.get(1),
"newcfkey".getBytes())).equals("value"));
assert(new String(db.get(columnFamilyHandleList.get(1),
"newcfkey2".getBytes())).equals("value2"));
assert(new String(db.get("key".getBytes())).equals("value"));
} catch (Exception e) {
e.printStackTrace();
assert(false);
}
// Test iterator on column family
try {
RocksIterator rocksIterator = db.newIterator(
columnFamilyHandleList.get(1));
rocksIterator.seekToFirst();
Map<String, String> refMap = new HashMap<String, String>();
refMap.put("newcfkey", "value");
refMap.put("newcfkey2", "value2");
int i = 0;
while(rocksIterator.isValid()) {
i++;
refMap.get(new String(rocksIterator.key())).equals(
new String(rocksIterator.value()));
rocksIterator.next();
}
assert(i == 2);
rocksIterator.dispose();
} catch(Exception e) {
assert(false);
}
// Test property handling on column families
try {
assert(db.getProperty("rocksdb.estimate-num-keys") != null);
assert(db.getProperty("rocksdb.stats") != null);
assert(db.getProperty(columnFamilyHandleList.get(0),
"rocksdb.sstables") != null);
assert(db.getProperty(columnFamilyHandleList.get(1),
"rocksdb.estimate-num-keys") != null);
assert(db.getProperty(columnFamilyHandleList.get(1),
"rocksdb.stats") != null);
assert(db.getProperty(columnFamilyHandleList.get(1),
"rocksdb.sstables") != null);
} catch(Exception e) {
assert(false);
}
// MultiGet test
List<ColumnFamilyHandle> cfCustomList = new ArrayList<ColumnFamilyHandle>();
try {
List<byte[]> keys = new ArrayList<byte[]>();
keys.add("key".getBytes());
keys.add("newcfkey".getBytes());
Map<byte[], byte[]> retValues = db.multiGet(columnFamilyHandleList,keys);
assert(retValues.size() == 2);
assert(new String(retValues.get(keys.get(0)))
.equals("value"));
assert(new String(retValues.get(keys.get(1)))
.equals("value"));
cfCustomList.add(columnFamilyHandleList.get(0));
cfCustomList.add(columnFamilyHandleList.get(0));
retValues = db.multiGet(cfCustomList, keys);
assert(retValues.size() == 1);
assert(new String(retValues.get(keys.get(0)))
.equals("value"));
} catch (RocksDBException e) {
assert(false);
}
// Test multiget without correct number of column
// families
try {
List<byte[]> keys = new ArrayList<byte[]>();
keys.add("key".getBytes());
keys.add("newcfkey".getBytes());
cfCustomList.remove(1);
db.multiGet(cfCustomList, keys);
assert(false);
} catch (RocksDBException e) {
assert(false);
} catch (IllegalArgumentException e) {
assert(true);
}
try {
// iterate over default key/value pairs
List<RocksIterator> iterators =
db.newIterators(columnFamilyHandleList);
assert(iterators.size() == 2);
RocksIterator iter = iterators.get(0);
iter.seekToFirst();
Map<String,String> defRefMap = new HashMap<String, String>();
defRefMap.put("dfkey1", "dfvalue");
defRefMap.put("key", "value");
while (iter.isValid()) {
defRefMap.get(new String(iter.key())).equals(
new String(iter.value()));
iter.next();
}
// iterate over new_cf key/value pairs
Map<String,String> cfRefMap = new HashMap<String, String>();
cfRefMap.put("newcfkey", "value");
cfRefMap.put("newcfkey2", "value2");
iter = iterators.get(1);
iter.seekToFirst();
while (iter.isValid()) {
cfRefMap.get(new String(iter.key())).equals(
new String(iter.value()));
iter.next();
}
// free iterators
for (RocksIterator iterator : iterators) {
iterator.dispose();
}
assert(true);
} catch (RocksDBException e) {
assert(false);
}
System.out.println("Passed ColumnFamilyTest");
// free cf handles before database close
for (ColumnFamilyHandle columnFamilyHandle : columnFamilyHandleList) {
columnFamilyHandle.dispose();
}
// close database
db.close();
// be sure to dispose c++ pointers
options.dispose();
}
}
| |
/*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.bither.image.glcrop;
import android.app.Activity;
import android.app.Dialog;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.Animation;
import android.view.animation.Animation.AnimationListener;
import android.widget.FrameLayout;
import android.widget.FrameLayout.LayoutParams;
import android.widget.ImageButton;
import net.bither.BitherApplication;
import net.bither.R;
import net.bither.animation.FlipAndZoomAnimation;
import net.bither.bitherj.utils.Utils;
import net.bither.runnable.BaseRunnable;
import net.bither.runnable.HandlerMessage;
import net.bither.util.FileUtil;
import net.bither.util.ImageFileUtil;
import net.bither.util.ImageManageUtil;
import net.bither.util.NativeUtil;
import java.io.File;
import java.util.concurrent.CountDownLatch;
/**
* The activity can crop specific region of interest from an image.
*/
public abstract class CropImageGlActivityBase extends Activity {
private boolean mCircleCrop = false;
private final Handler mHandler = new Handler();
public boolean mWaitingToPick; // Whether we are wait the user to pick a
// face.
public boolean mSaving; // Whether the "save" button is already clicked.
private CropImageView mImageView;
private FrameLayout flCameraIrisFrame;
private FrameLayout flFilterImage;
private FrameLayout flImageContainer;
private FrameLayout flCamContainer;
private FrameLayout flFrameToggle;
private ImageButton ibtnTiltShift;
private Bitmap mBitmap;
private Bitmap orBitmap;
private Bitmap filterBitmap;
private ImageButton btnCrop;
int orientation = 0;
private ImageButton IV90R;
private static final float zTransition = 400;
public HighlightView mCrop;
private static final int CropSide = 0;
private static final int FilterSide = 1;
private int side = CropSide;
private String fromFileName;
private long timeMillis;
private Dialog pdSaving;
private boolean isPaused = false;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
setContentView(R.layout.activity_gl_crop_image);
Intent intent = getIntent();
if (Utils.compareString(intent.getAction(), Intent.ACTION_SEND)) {
if (intent.getExtras().containsKey("android.intent.extra.STREAM")) {
if (BitherApplication.initialActivity != null) {
BitherApplication.initialActivity.finish();
}
Uri formUri = (Uri) intent.getExtras().get(
"android.intent.extra.STREAM");
File fromFile = FileUtil.convertUriToFile(
CropImageGlActivityBase.this, formUri);
if (fromFile != null) {
fromFileName = fromFile.getAbsolutePath();
}
}
}
if (Utils.isEmpty(fromFileName)) {
Uri formUri = intent.getData();
if (formUri != null) {
File fromFile = FileUtil.convertUriToFile(
CropImageGlActivityBase.this, formUri);
if (fromFile != null) {
fromFileName = fromFile.getAbsolutePath();
}
}
}
if (Utils.isEmpty(fromFileName)) {
finish();
return;
}
orBitmap = getOrBitmap();
if (orBitmap == null) {
finish();
return;
}
mImageView = (CropImageView) findViewById(R.id.image);
flCameraIrisFrame = (FrameLayout) findViewById(R.id.fl_camera_iris_frame);
LayoutParams lp = (LayoutParams) flCameraIrisFrame
.getLayoutParams();
lp.height = ImageManageUtil.getScreenWidth() - lp.rightMargin
- lp.leftMargin;
flFilterImage = (FrameLayout) findViewById(R.id.fl_filter_image);
flImageContainer = (FrameLayout) findViewById(R.id.fl_image_container);
flCamContainer = (FrameLayout) findViewById(R.id.fl_cam_container);
flFrameToggle = (FrameLayout) findViewById(R.id.fl_frame_toggle);
ibtnTiltShift = (ImageButton) findViewById(R.id.ibtn_tilt_shift);
findViewById(R.id.discard).setOnClickListener(
new OnClickListener() {
public void onClick(View v) {
setResult(RESULT_CANCELED);
finish();
overridePendingTransition(0, R.anim.slide_out_bottom);
}
}
);
findViewById(R.id.ibtn_discard).setOnClickListener(
new OnClickListener() {
public void onClick(View v) {
setResult(RESULT_CANCELED);
finish();
overridePendingTransition(0, R.anim.slide_out_bottom);
}
}
);
findViewById(R.id.ibtn_save).setOnClickListener(
new OnClickListener() {
public void onClick(View v) {
onSaveClicked();
}
}
);
IV90R = (ImageButton) findViewById(R.id.image_cw_90r);
btnCrop = (ImageButton) findViewById(R.id.btn_crop);
IV90R.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
if (side == CropSide) {
orientation += 90;
rotateImage(90);
startRedrawImageView();
}
}
});
btnCrop.setOnClickListener(cropOnClick);
ibtnTiltShift.setOnClickListener(tiltShiftClick);
orientation = FileUtil.getOrientationOfFile(fromFileName);
rotateImage(orientation);
startRedrawImageView();
pdSaving = getProgressDialog(getString(R.string.saving));
pdSaving.setCancelable(true);
}
private OnClickListener tiltShiftClick = new OnClickListener() {
public void onClick(View v) {
}
};
private Bitmap getCropedBitmap() {
Rect r = mCrop.getCropRect();
// OpenGl texture must be pow of 2
int size = Math.min(r.width(), r.height());
if (size % 2 != 0) {
size--;
}
r.bottom = r.top + size;
r.right = r.left + size;
int width = r.width();
int height = r.height();
width = Math.min(width,
ImageManageUtil.IMAGE_SIZE);
height = Math.min(height,
ImageManageUtil.IMAGE_SIZE);
Bitmap croppedImage = Bitmap.createBitmap(width, height,
Bitmap.Config.ARGB_8888);
{
Canvas canvas = new Canvas(croppedImage);
Rect dstRect = new Rect(0, 0, width, height);
if (orBitmap == null) {
rotateImage(orientation);
}
canvas.drawBitmap(orBitmap, r, dstRect, null);
}
return croppedImage;
}
private OnClickListener cropOnClick = new OnClickListener() {
public void onClick(View v) {
try {
orBitmap = null;
turnToCrop();
rotateImage(orientation);
mImageView.setImageBitmapResetBase(orBitmap, false);
} catch (Exception e) {
e.printStackTrace();
}
}
};
private void rotateImage(int rotation) {
this.orBitmap = getOrBitmap();
if (this.orBitmap != null) {
int width = this.orBitmap.getWidth();
int hegith = this.orBitmap.getHeight();
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
this.orBitmap = Bitmap.createBitmap(this.orBitmap, 0, 0, width,
hegith, matrix, false);
}
}
private void startRedrawImageView() {
if (isFinishing()) {
return;
}
mImageView.setImageBitmapResetBase(orBitmap, true);
BaseRunnable runnable = new BaseRunnable() {
@Override
public void run() {
final CountDownLatch latch = new CountDownLatch(1);
mHandler.post(new Runnable() {
public void run() {
if (mImageView.getScale() == 1.0f) {
mImageView.center(true, true);
}
latch.countDown();
}
});
try {
latch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
mRunDrawImageView.run();
}
};
runnable.setHandler(mHandler);
new Thread(runnable).start();
}
@Override
protected void onResume() {
isPaused = false;
super.onResume();
}
@Override
protected void onPause() {
isPaused = true;
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
completeRecycle();
}
private void completeRecycle() {
if (mImageView != null) {
mImageView.clear();
}
if (mBitmap != null && !mBitmap.isRecycled()
&& (side == CropSide)) {
mBitmap = null;
}
if (orBitmap != null && !orBitmap.isRecycled()) {
orBitmap = null;
}
if (filterBitmap != null && !filterBitmap.isRecycled()
&& (side == CropSide)) {
filterBitmap = null;
}
}
private void onSaveClicked() {
try {
if (mSaving) {
return;
}
if (side == CropSide && mCrop == null) {
return;
}
mSaving = true;
timeMillis = System.currentTimeMillis();
final Bitmap croppedImage;
final Rect imagePlace = new Rect();
final String photoName = ImageFileUtil.getAvatarFileName(timeMillis);
croppedImage = getCropedBitmap();
Rect crop = mCrop.getCropRect();
int[] location = new int[2];
flImageContainer.getLocationOnScreen(location);
Matrix imageMatrix = mImageView.getImageMatrix();
Rect bitmapRect = mImageView.getDrawable().copyBounds();
float[] values = new float[9];
imageMatrix.getValues(values);
float visualWidth = bitmapRect.width() * values[0];
float visualHeight = bitmapRect.height() * values[0];
Rect drawRect = new Rect((int) values[2], (int) values[5],
(int) (values[2] + visualWidth),
(int) (values[5] + visualHeight));
imagePlace.left = (int) (location[0] + drawRect.left + crop.left
* values[0]);
imagePlace.top = (int) (location[1] + drawRect.top + crop.top
* values[0]);
imagePlace.right = imagePlace.left
+ (int) (crop.width() * values[0]);
imagePlace.bottom = imagePlace.top
+ (int) (crop.height() * values[0]);
SaveRunnable save = new SaveRunnable(croppedImage, photoName
);
save.setHandler(new Handler() {
@Override
public void dispatchMessage(Message msg) {
switch (msg.what) {
case HandlerMessage.MSG_PREPARE:
pdSaving.show();
break;
case HandlerMessage.MSG_SUCCESS:
mSaving = false;
pdSaving.dismiss();
handleSaveSuccess(photoName);
break;
case HandlerMessage.MSG_FAILURE:
mSaving = false;
pdSaving.dismiss();
break;
}
}
});
new Thread(save).start();
} catch (Exception e) {
e.printStackTrace();
}
}
private static class SaveRunnable extends BaseRunnable {
private Bitmap b;
private String photoName;
public SaveRunnable(Bitmap bmp, String photoName) {
this.b = bmp;
this.photoName = photoName;
}
@Override
public void run() {
try {
obtainMessage(HandlerMessage.MSG_PREPARE);
File file = ImageFileUtil.getUploadAvatarFile(photoName);
NativeUtil.compressBitmap(b, file.getAbsolutePath(), true);
file = ImageFileUtil.getAvatarFile(photoName);
NativeUtil.compressBitmap(b, file.getAbsolutePath(), true);
file = ImageFileUtil.getSmallAvatarFile(photoName);
Bitmap smallBit = ImageManageUtil.getMatrixBitmap(b,
ImageManageUtil.IMAGE_SMALL_SIZE, ImageManageUtil.IMAGE_SMALL_SIZE, false);
NativeUtil.compressBitmap(smallBit, file.getAbsolutePath(), true);
obtainMessage(HandlerMessage.MSG_SUCCESS);
} catch (Exception e) {
e.printStackTrace();
obtainMessage(HandlerMessage.MSG_FAILURE);
}
}
}
private void turnToCrop() {
applyRotation(flImageContainer, 0, -90, 0, zTransition, toCropPreRotate);
btnCrop.setClickable(false);
ibtnTiltShift.setVisibility(View.GONE);
}
private Bitmap getOrBitmap() {
try {
if (orBitmap == null || orBitmap.isRecycled()) {
if (!Utils.isEmpty(fromFileName)) {
orBitmap = ImageManageUtil
.getBitmapNearestSize(new File(fromFileName),
ImageManageUtil.IMAGE_SIZE);
if (orBitmap == null) {
finish();
return null;
}
orBitmap = ImageManageUtil
.getMatrixBitmap(
orBitmap,
ImageManageUtil.IMAGE_SIZE,
ImageManageUtil.IMAGE_SIZE,
true
);
}
}
return orBitmap;
} catch (Exception e) {
e.printStackTrace();
finish();
return null;
}
}
private void applyRotation(View view, float startDegree, float endDegree,
float fromZ, float toZ, AnimationListener listener) {
final float centerX = view.getWidth() / 2.0f;
final float centerY = view.getHeight() / 2.0f;
final FlipAndZoomAnimation rotation = new FlipAndZoomAnimation(
startDegree, endDegree, fromZ, toZ, centerX, centerY);
rotation.setDuration(300);
rotation.setFillAfter(true);
rotation.setInterpolator(new AccelerateInterpolator());
if (listener != null) {
rotation.setAnimationListener(listener);
}
view.startAnimation(rotation);
}
private AnimationListener toCropPreRotate = new AnimationListener() {
public void onAnimationStart(Animation animation) {
mImageView.setTouchable(false);
}
public void onAnimationRepeat(Animation animation) {
}
public void onAnimationEnd(Animation animation) {
mImageView.bringToFront();
applyRotation(flImageContainer, 90, 0, zTransition, 0,
new PostRotate(CropSide));
}
};
private class PostRotate implements AnimationListener {
private int side;
public PostRotate(int side) {
this.side = side;
}
public void onAnimationStart(Animation animation) {
}
public void onAnimationRepeat(Animation animation) {
}
public void onAnimationEnd(Animation animation) {
CropImageGlActivityBase.this.side = side;
if (side == CropSide) {
mImageView.setTouchable(true);
IV90R.setClickable(true);
IV90R.setVisibility(View.VISIBLE);
btnCrop.setVisibility(View.GONE);
} else if (side == FilterSide) {
btnCrop.setClickable(true);
mImageView.setTouchable(false);
IV90R.setVisibility(View.GONE);
btnCrop.setVisibility(View.VISIBLE);
ibtnTiltShift.setVisibility(View.VISIBLE);
mImageView.setImageBitmapResetBase(null, true);
}
}
}
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
finish();
overridePendingTransition(0, R.anim.slide_out_bottom);
}
return super.onKeyDown(keyCode, event);
}
;
Runnable mRunDrawImageView = new Runnable() {
float mScale = 1F;
Matrix mImageMatrix;
// Create a default HightlightView if we found no face in the picture.
private void makeDefault() {
HighlightView hv = new HighlightView(mImageView);
int width = orBitmap.getWidth();
int height = orBitmap.getHeight();
Rect imageRect = new Rect(0, 0, width, height);
// CR: sentences!
// make the default size about 4/5 of the width or height
int cropWidth = Math.min(width, height);
int cropHeight = cropWidth;
int x = (width - cropWidth) / 2;
int y = (height - cropHeight) / 2;
RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
hv.setup(mImageMatrix, imageRect, cropRect, mCircleCrop, false);
mImageView.add(hv);
}
public void run() {
mImageMatrix = mImageView.getImageMatrix();
mScale = 1.0F / mScale;
mHandler.post(new Runnable() {
public void run() {
makeDefault();
mImageView.invalidate();
if (mImageView.mHighlightViews.size() == 1) {
mCrop = mImageView.mHighlightViews.get(0);
mCrop.setFocus(true);
}
}
});
}
};
protected abstract void handleSaveSuccess(String photoName);
protected abstract Dialog getProgressDialog(String msg);
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore.thrift;
import com.facebook.presto.common.predicate.Domain;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.hive.SchemaAlreadyExistsException;
import com.facebook.presto.hive.TableAlreadyExistsException;
import com.facebook.presto.hive.metastore.Column;
import com.facebook.presto.hive.metastore.HivePrivilegeInfo;
import com.facebook.presto.hive.metastore.MetastoreUtil;
import com.facebook.presto.hive.metastore.PartitionStatistics;
import com.facebook.presto.hive.metastore.PartitionWithStatistics;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.Table;
import javax.annotation.concurrent.GuardedBy;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.facebook.presto.hive.HiveBasicStatistics.createEmptyStatistics;
import static com.facebook.presto.hive.metastore.MetastoreUtil.convertPredicateToParts;
import static com.facebook.presto.hive.metastore.MetastoreUtil.toPartitionValues;
import static com.facebook.presto.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiPartition;
import static com.facebook.presto.spi.StandardErrorCode.SCHEMA_NOT_EMPTY;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static java.util.Locale.US;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hive.metastore.TableType.EXTERNAL_TABLE;
import static org.apache.hadoop.hive.metastore.TableType.MANAGED_TABLE;
import static org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW;
public class InMemoryHiveMetastore
implements HiveMetastore
{
@GuardedBy("this")
private final Map<String, Database> databases = new HashMap<>();
@GuardedBy("this")
private final Map<SchemaTableName, Table> relations = new HashMap<>();
@GuardedBy("this")
private final Map<SchemaTableName, Table> views = new HashMap<>();
@GuardedBy("this")
private final Map<PartitionName, Partition> partitions = new HashMap<>();
@GuardedBy("this")
private final Map<SchemaTableName, PartitionStatistics> columnStatistics = new HashMap<>();
@GuardedBy("this")
private final Map<PartitionName, PartitionStatistics> partitionColumnStatistics = new HashMap<>();
@GuardedBy("this")
private final Map<PrincipalTableKey, Set<HivePrivilegeInfo>> tablePrivileges = new HashMap<>();
private final File baseDirectory;
public InMemoryHiveMetastore(File baseDirectory)
{
this.baseDirectory = requireNonNull(baseDirectory, "baseDirectory is null");
checkArgument(!baseDirectory.exists(), "Base directory already exists");
checkArgument(baseDirectory.mkdirs(), "Could not create base directory");
}
@Override
public synchronized void createDatabase(Database database)
{
requireNonNull(database, "database is null");
File directory;
if (database.getLocationUri() != null) {
directory = new File(URI.create(database.getLocationUri()));
}
else {
// use Hive default naming convention
directory = new File(baseDirectory, database.getName() + ".db");
database = database.deepCopy();
database.setLocationUri(directory.toURI().toString());
}
checkArgument(!directory.exists(), "Database directory already exists");
checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory");
checkArgument(directory.mkdirs(), "Could not create database directory");
if (databases.putIfAbsent(database.getName(), database) != null) {
throw new SchemaAlreadyExistsException(database.getName());
}
}
@Override
public synchronized void dropDatabase(String databaseName)
{
if (!databases.containsKey(databaseName)) {
throw new SchemaNotFoundException(databaseName);
}
if (!getAllTables(databaseName).orElse(ImmutableList.of()).isEmpty()) {
throw new PrestoException(SCHEMA_NOT_EMPTY, "Schema not empty: " + databaseName);
}
databases.remove(databaseName);
}
@Override
public synchronized void alterDatabase(String databaseName, Database newDatabase)
{
String newDatabaseName = newDatabase.getName();
if (databaseName.equals(newDatabaseName)) {
if (databases.replace(databaseName, newDatabase) == null) {
throw new SchemaNotFoundException(databaseName);
}
return;
}
Database database = databases.get(databaseName);
if (database == null) {
throw new SchemaNotFoundException(databaseName);
}
if (databases.putIfAbsent(newDatabaseName, database) != null) {
throw new SchemaAlreadyExistsException(newDatabaseName);
}
databases.remove(databaseName);
rewriteKeys(relations, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
rewriteKeys(views, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
rewriteKeys(partitions, name -> name.withSchemaName(newDatabaseName));
rewriteKeys(tablePrivileges, name -> name.withDatabase(newDatabaseName));
}
@Override
public synchronized List<String> getAllDatabases()
{
return ImmutableList.copyOf(databases.keySet());
}
@Override
public synchronized void createTable(Table table)
{
TableType tableType = TableType.valueOf(table.getTableType());
checkArgument(EnumSet.of(MANAGED_TABLE, EXTERNAL_TABLE, VIRTUAL_VIEW).contains(tableType), "Invalid table type: %s", tableType);
if (tableType == VIRTUAL_VIEW) {
checkArgument(table.getSd().getLocation() == null, "Storage location for view must be null");
}
else {
File directory = new File(new Path(table.getSd().getLocation()).toUri());
checkArgument(directory.exists(), "Table directory does not exist");
if (tableType == MANAGED_TABLE) {
checkArgument(isParentDir(directory, baseDirectory), "Table directory must be inside of the metastore base directory");
}
}
SchemaTableName schemaTableName = new SchemaTableName(table.getDbName(), table.getTableName());
Table tableCopy = table.deepCopy();
if (relations.putIfAbsent(schemaTableName, tableCopy) != null) {
throw new TableAlreadyExistsException(schemaTableName);
}
if (tableType == VIRTUAL_VIEW) {
views.put(schemaTableName, tableCopy);
}
PrincipalPrivilegeSet privileges = table.getPrivileges();
if (privileges != null) {
throw new UnsupportedOperationException();
}
}
@Override
public synchronized void dropTable(String databaseName, String tableName, boolean deleteData)
{
List<String> locations = listAllDataPaths(this, databaseName, tableName);
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
Table table = relations.remove(schemaTableName);
if (table == null) {
throw new TableNotFoundException(schemaTableName);
}
views.remove(schemaTableName);
partitions.keySet().removeIf(partitionName -> partitionName.matches(databaseName, tableName));
// remove data
if (deleteData && table.getTableType().equals(MANAGED_TABLE.name())) {
for (String location : locations) {
if (location != null) {
File directory = new File(new Path(location).toUri());
checkArgument(isParentDir(directory, baseDirectory), "Table directory must be inside of the metastore base directory");
deleteDirectory(directory);
}
}
}
}
private static List<String> listAllDataPaths(HiveMetastore metastore, String schemaName, String tableName)
{
ImmutableList.Builder<String> locations = ImmutableList.builder();
Table table = metastore.getTable(schemaName, tableName).get();
if (table.getSd().getLocation() != null) {
// For unpartitioned table, there should be nothing directly under this directory.
// But including this location in the set makes the directory content assert more
// extensive, which is desirable.
locations.add(table.getSd().getLocation());
}
Optional<List<String>> partitionNames = metastore.getPartitionNames(schemaName, tableName);
if (partitionNames.isPresent()) {
metastore.getPartitionsByNames(schemaName, tableName, partitionNames.get()).stream()
.map(partition -> partition.getSd().getLocation())
.filter(location -> !location.startsWith(table.getSd().getLocation()))
.forEach(locations::add);
}
return locations.build();
}
@Override
public synchronized void alterTable(String databaseName, String tableName, Table newTable)
{
SchemaTableName oldName = new SchemaTableName(databaseName, tableName);
SchemaTableName newName = new SchemaTableName(newTable.getDbName(), newTable.getTableName());
// if the name did not change, this is a simple schema change
if (oldName.equals(newName)) {
if (relations.replace(oldName, newTable) == null) {
throw new TableNotFoundException(oldName);
}
return;
}
// remove old table definition and add the new one
Table table = relations.get(oldName);
if (table == null) {
throw new TableNotFoundException(oldName);
}
if (relations.putIfAbsent(newName, newTable) != null) {
throw new TableAlreadyExistsException(newName);
}
relations.remove(oldName);
}
@Override
public synchronized Optional<List<String>> getAllTables(String databaseName)
{
ImmutableList.Builder<String> tables = ImmutableList.builder();
for (SchemaTableName schemaTableName : this.relations.keySet()) {
if (schemaTableName.getSchemaName().equals(databaseName)) {
tables.add(schemaTableName.getTableName());
}
}
return Optional.of(tables.build());
}
@Override
public synchronized Optional<List<String>> getAllViews(String databaseName)
{
ImmutableList.Builder<String> tables = ImmutableList.builder();
for (SchemaTableName schemaTableName : this.views.keySet()) {
if (schemaTableName.getSchemaName().equals(databaseName)) {
tables.add(schemaTableName.getTableName());
}
}
return Optional.of(tables.build());
}
@Override
public synchronized Optional<Database> getDatabase(String databaseName)
{
return Optional.ofNullable(databases.get(databaseName));
}
@Override
public synchronized void addPartitions(String databaseName, String tableName, List<PartitionWithStatistics> partitionsWithStatistics)
{
for (PartitionWithStatistics partitionWithStatistics : partitionsWithStatistics) {
Partition partition = toMetastoreApiPartition(partitionWithStatistics.getPartition());
if (partition.getParameters() == null) {
partition.setParameters(ImmutableMap.of());
}
PartitionName partitionKey = PartitionName.partition(databaseName, tableName, partitionWithStatistics.getPartitionName());
partitions.put(partitionKey, partition);
partitionColumnStatistics.put(partitionKey, partitionWithStatistics.getStatistics());
}
}
@Override
public synchronized void dropPartition(String databaseName, String tableName, List<String> parts, boolean deleteData)
{
partitions.entrySet().removeIf(entry ->
entry.getKey().matches(databaseName, tableName) && entry.getValue().getValues().equals(parts));
}
@Override
public synchronized void alterPartition(String databaseName, String tableName, PartitionWithStatistics partitionWithStatistics)
{
Partition partition = toMetastoreApiPartition(partitionWithStatistics.getPartition());
if (partition.getParameters() == null) {
partition.setParameters(ImmutableMap.of());
}
PartitionName partitionKey = PartitionName.partition(databaseName, tableName, partitionWithStatistics.getPartitionName());
partitions.put(partitionKey, partition);
partitionColumnStatistics.put(partitionKey, partitionWithStatistics.getStatistics());
}
@Override
public synchronized Optional<List<String>> getPartitionNames(String databaseName, String tableName)
{
return Optional.of(ImmutableList.copyOf(partitions.entrySet().stream()
.filter(entry -> entry.getKey().matches(databaseName, tableName))
.map(entry -> entry.getKey().getPartitionName())
.collect(toList())));
}
@Override
public synchronized Optional<Partition> getPartition(String databaseName, String tableName, List<String> partitionValues)
{
PartitionName name = PartitionName.partition(databaseName, tableName, partitionValues);
Partition partition = partitions.get(name);
if (partition == null) {
return Optional.empty();
}
return Optional.of(partition.deepCopy());
}
@Override
public synchronized Optional<List<String>> getPartitionNamesByParts(String databaseName, String tableName, List<String> parts)
{
return Optional.of(partitions.entrySet().stream()
.filter(entry -> partitionMatches(entry.getValue(), databaseName, tableName, parts))
.map(entry -> entry.getKey().getPartitionName())
.collect(toList()));
}
@Override
public List<String> getPartitionNamesByFilter(String databaseName, String tableName, Map<Column, Domain> partitionPredicates)
{
List<String> parts = convertPredicateToParts(partitionPredicates);
return getPartitionNamesByParts(databaseName, tableName, parts).orElse(ImmutableList.of());
}
private static boolean partitionMatches(Partition partition, String databaseName, String tableName, List<String> parts)
{
if (!partition.getDbName().equals(databaseName) ||
!partition.getTableName().equals(tableName)) {
return false;
}
List<String> values = partition.getValues();
if (values.size() != parts.size()) {
return false;
}
for (int i = 0; i < values.size(); i++) {
String part = parts.get(i);
if (!part.isEmpty() && !values.get(i).equals(part)) {
return false;
}
}
return true;
}
@Override
public synchronized List<Partition> getPartitionsByNames(String databaseName, String tableName, List<String> partitionNames)
{
ImmutableList.Builder<Partition> builder = ImmutableList.builder();
for (String name : partitionNames) {
PartitionName partitionName = PartitionName.partition(databaseName, tableName, name);
Partition partition = partitions.get(partitionName);
if (partition == null) {
return ImmutableList.of();
}
builder.add(partition.deepCopy());
}
return builder.build();
}
@Override
public synchronized Optional<Table> getTable(String databaseName, String tableName)
{
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
return Optional.ofNullable(relations.get(schemaTableName));
}
@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(Type type)
{
return MetastoreUtil.getSupportedColumnStatistics(type);
}
@Override
public synchronized PartitionStatistics getTableStatistics(String databaseName, String tableName)
{
SchemaTableName schemaTableName = new SchemaTableName(databaseName, tableName);
PartitionStatistics statistics = columnStatistics.get(schemaTableName);
if (statistics == null) {
statistics = new PartitionStatistics(createEmptyStatistics(), ImmutableMap.of());
}
return statistics;
}
@Override
public synchronized Map<String, PartitionStatistics> getPartitionStatistics(String databaseName, String tableName, Set<String> partitionNames)
{
ImmutableMap.Builder<String, PartitionStatistics> result = ImmutableMap.builder();
for (String partitionName : partitionNames) {
PartitionName partitionKey = PartitionName.partition(databaseName, tableName, partitionName);
PartitionStatistics statistics = partitionColumnStatistics.get(partitionKey);
if (statistics == null) {
statistics = new PartitionStatistics(createEmptyStatistics(), ImmutableMap.of());
}
result.put(partitionName, statistics);
}
return result.build();
}
@Override
public synchronized void updateTableStatistics(String databaseName, String tableName, Function<PartitionStatistics, PartitionStatistics> update)
{
columnStatistics.put(new SchemaTableName(databaseName, tableName), update.apply(getTableStatistics(databaseName, tableName)));
}
@Override
public synchronized void updatePartitionStatistics(String databaseName, String tableName, String partitionName, Function<PartitionStatistics, PartitionStatistics> update)
{
PartitionName partitionKey = PartitionName.partition(databaseName, tableName, partitionName);
partitionColumnStatistics.put(partitionKey, update.apply(getPartitionStatistics(databaseName, tableName, ImmutableSet.of(partitionName)).get(partitionName)));
}
@Override
public void createRole(String role, String grantor)
{
throw new UnsupportedOperationException();
}
@Override
public void dropRole(String role)
{
throw new UnsupportedOperationException();
}
@Override
public Set<String> listRoles()
{
throw new UnsupportedOperationException();
}
@Override
public void grantRoles(Set<String> roles, Set<PrestoPrincipal> grantees, boolean withAdminOption, PrestoPrincipal grantor)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeRoles(Set<String> roles, Set<PrestoPrincipal> grantees, boolean adminOptionFor, PrestoPrincipal grantor)
{
throw new UnsupportedOperationException();
}
@Override
public Set<RoleGrant> listRoleGrants(PrestoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String tableName, PrestoPrincipal principal)
{
throw new UnsupportedOperationException();
}
@Override
public void grantTablePrivileges(String databaseName, String tableName, PrestoPrincipal grantee, Set<HivePrivilegeInfo> privileges)
{
throw new UnsupportedOperationException();
}
@Override
public void revokeTablePrivileges(String databaseName, String tableName, PrestoPrincipal grantee, Set<HivePrivilegeInfo> privileges)
{
throw new UnsupportedOperationException();
}
private static boolean isParentDir(File directory, File baseDirectory)
{
for (File parent = directory.getParentFile(); parent != null; parent = parent.getParentFile()) {
if (parent.equals(baseDirectory)) {
return true;
}
}
return false;
}
private static void deleteDirectory(File dir)
{
try {
deleteRecursively(dir.toPath(), ALLOW_INSECURE);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static class PartitionName
{
private final String schemaName;
private final String tableName;
private final List<String> partitionValues;
private final String partitionName; // does not participate in equals and hashValue
private PartitionName(String schemaName, String tableName, List<String> partitionValues, String partitionName)
{
this.schemaName = requireNonNull(schemaName, "schemaName is null").toLowerCase(US);
this.tableName = requireNonNull(tableName, "tableName is null").toLowerCase(US);
this.partitionValues = requireNonNull(partitionValues, "partitionValues is null");
this.partitionName = partitionName;
}
public static PartitionName partition(String schemaName, String tableName, String partitionName)
{
return new PartitionName(schemaName.toLowerCase(US), tableName.toLowerCase(US), toPartitionValues(partitionName), partitionName);
}
public static PartitionName partition(String schemaName, String tableName, List<String> partitionValues)
{
return new PartitionName(schemaName.toLowerCase(US), tableName.toLowerCase(US), partitionValues, null);
}
public String getPartitionName()
{
return requireNonNull(partitionName, "partitionName is null");
}
public boolean matches(String schemaName, String tableName)
{
return this.schemaName.equals(schemaName) &&
this.tableName.equals(tableName);
}
public boolean matches(String schemaName, String tableName, String partitionName)
{
return this.schemaName.equals(schemaName) &&
this.tableName.equals(tableName) &&
this.partitionName.equals(partitionName);
}
public PartitionName withSchemaName(String schemaName)
{
return new PartitionName(schemaName, tableName, partitionValues, partitionName);
}
@Override
public int hashCode()
{
return Objects.hash(schemaName, tableName, partitionValues);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PartitionName other = (PartitionName) obj;
return Objects.equals(this.schemaName, other.schemaName)
&& Objects.equals(this.tableName, other.tableName)
&& Objects.equals(this.partitionValues, other.partitionValues);
}
@Override
public String toString()
{
return schemaName + "/" + tableName + "/" + partitionName;
}
}
private static class PrincipalTableKey
{
private final String principalName;
private final PrincipalType principalType;
private final String database;
private final String table;
public PrincipalTableKey(String principalName, PrincipalType principalType, String table, String database)
{
this.principalName = requireNonNull(principalName, "principalName is null");
this.principalType = requireNonNull(principalType, "principalType is null");
this.table = requireNonNull(table, "table is null");
this.database = requireNonNull(database, "database is null");
}
public PrincipalTableKey withDatabase(String database)
{
return new PrincipalTableKey(principalName, principalType, table, database);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PrincipalTableKey that = (PrincipalTableKey) o;
return Objects.equals(principalName, that.principalName) &&
Objects.equals(principalType, that.principalType) &&
Objects.equals(table, that.table) &&
Objects.equals(database, that.database);
}
@Override
public int hashCode()
{
return Objects.hash(principalName, principalType, table, database);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("principalName", principalName)
.add("principalType", principalType)
.add("table", table)
.add("database", database)
.toString();
}
}
private static <K, V> void rewriteKeys(Map<K, V> map, Function<K, K> keyRewriter)
{
for (K key : ImmutableSet.copyOf(map.keySet())) {
K newKey = keyRewriter.apply(key);
if (!newKey.equals(key)) {
map.put(newKey, map.remove(key));
}
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.server.files;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URLEncoder;
import java.util.Iterator;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Event;
import javax.enterprise.inject.Any;
import javax.jcr.RepositoryException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemFactory;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.drools.compiler.DroolsParserException;
import org.drools.guvnor.client.common.HTMLFileManagerFields;
import org.drools.guvnor.server.builder.BRMSPackageBuilder;
import org.drools.guvnor.server.builder.DSLLoader;
import org.drools.guvnor.server.builder.ModuleAssembler;
import org.drools.guvnor.server.builder.ModuleAssemblerManager;
import org.drools.guvnor.server.contenthandler.ContentHandler;
import org.drools.guvnor.server.contenthandler.ContentManager;
import org.drools.guvnor.server.contenthandler.ICanHasAttachment;
import org.drools.guvnor.server.contenthandler.IRuleAsset;
import org.drools.guvnor.server.repository.FileUploadedEvent;
import org.drools.guvnor.server.repository.Preferred;
import org.drools.guvnor.server.security.AdminType;
import org.drools.guvnor.server.security.RoleType;
import org.drools.guvnor.server.util.ClassicDRLImporter;
import org.drools.guvnor.server.util.ClassicDRLImporter.Asset;
import org.drools.guvnor.server.util.DroolsHeader;
import org.drools.guvnor.server.util.FormData;
import org.drools.repository.AssetItem;
import org.drools.repository.ModuleItem;
import org.drools.repository.RulesRepository;
import org.drools.repository.RulesRepositoryException;
import javax.inject.Inject;
import javax.inject.Named;
import org.jboss.seam.security.annotations.LoggedIn;
import org.jboss.seam.security.Identity;
/**
* This assists the file manager servlets.
*/
@Named("fileManager")
@ApplicationScoped
public class FileManagerService {
@Inject @Preferred
private RulesRepository repository;
@Inject
private Identity identity;
@Inject
@Any
private Event<FileUploadedEvent> fileUploadedEventEvent;
/**
* This attach a file to an asset.
*/
@LoggedIn
public void attachFile(FormData uploadItem) throws IOException {
String uuid = uploadItem.getUuid();
InputStream fileData = uploadItem.getFile().getInputStream();
String fileName = uploadItem.getFile().getName();
attachFileToAsset(uuid,
fileData,
fileName);
uploadItem.getFile().getInputStream().close();
}
/**
* This utility method attaches a file to an asset.
* @throws IOException
*/
@LoggedIn
public void attachFileToAsset(String uuid,
InputStream fileData,
String fileName) throws IOException {
//here we should mark the binary data as invalid on the package (which means moving something into repo modle)
AssetItem item = repository.loadAssetByUUID( uuid );
item.updateBinaryContentAttachment( fileData );
item.updateBinaryContentAttachmentFileName( fileName );
item.getModule().updateBinaryUpToDate( false );
item.checkin( "Attached file: " + fileName );
// Special treatment for model and ruleflow attachments.
ContentHandler handler = ContentManager.getHandler( item.getFormat() );
if ( handler instanceof ICanHasAttachment ) {
((ICanHasAttachment) handler).onAttachmentAdded( item );
}
}
public void setRepository(RulesRepository repository) {
this.repository = repository;
}
/**
* The get returns files based on UUID of an asset.
*/
@LoggedIn
public String loadFileAttachmentByUUID(String uuid,
OutputStream out) throws IOException {
AssetItem item = repository.loadAssetByUUID( uuid );
byte[] data = item.getBinaryContentAsBytes();
if ( data == null ) {
data = new byte[0];
}
out.write( data );
out.flush();
String fileName = null;
String binaryContentAttachmentFileName = item.getBinaryContentAttachmentFileName();
//Note the file extension name may not be same as asset format name in some cases.
if(binaryContentAttachmentFileName !=null && !"".equals(binaryContentAttachmentFileName)) {
fileName = binaryContentAttachmentFileName;
} else {
fileName = item.getName() + "." + item.getFormat();
}
return fileName;
}
/**
* Get the form data from the inbound request.
*/
@SuppressWarnings("rawtypes")
public static FormData getFormData(HttpServletRequest request) {
FileItemFactory factory = new DiskFileItemFactory();
ServletFileUpload upload = new ServletFileUpload( factory );
upload.setHeaderEncoding( "UTF-8" );
FormData data = new FormData();
try {
List items = upload.parseRequest( request );
Iterator it = items.iterator();
while ( it.hasNext() ) {
FileItem item = (FileItem) it.next();
if ( item.isFormField() && item.getFieldName().equals( HTMLFileManagerFields.FORM_FIELD_UUID ) ) {
data.setUuid( item.getString() );
} else if ( !item.isFormField() ) {
data.setFile( item );
}
}
return data;
} catch ( FileUploadException e ) {
throw new RulesRepositoryException( e );
}
}
/**
* Load up the appropriate package version.
* @param packageName The name of the package.
* @param packageVersion The version (if it is a snapshot).
* @param isLatest true if the latest package binary will be used (ie NOT a snapshot).
* @return The filename if its all good.
* @deprecated Use JAX-RS based REST API instead
*/
public String loadBinaryPackage(String packageName,
String packageVersion,
boolean isLatest,
OutputStream out) throws IOException {
ModuleItem item = null;
if ( isLatest ) {
item = repository.loadModule( packageName );
byte[] data = item.getCompiledBinaryBytes();
out.write( data );
out.flush();
return packageName + ".pkg";
} else {
item = repository.loadModuleSnapshot( packageName,
packageVersion );
byte[] data = item.getCompiledBinaryBytes();
out.write( data );
out.flush();
return packageName + "_" + URLEncoder.encode( packageVersion,
"UTF-8" ) + ".pkg";
}
}
/**
* Load up the approproate package version.
* @param packageName The name of the package.
* @param packageVersion The version (if it is a snapshot).
* @param isLatest true if the latest package binary will be used (ie NOT a snapshot).
* @return The filename if its all good.
*/
public String loadSourcePackage(String packageName,
String packageVersion,
boolean isLatest,
OutputStream out) throws IOException {
ModuleItem item = null;
if ( isLatest ) {
item = repository.loadModule( packageName );
ModuleAssembler moduleAssembler = ModuleAssemblerManager.getModuleAssembler(item.getFormat(), item, null);
String drl = moduleAssembler.getCompiledSource();
out.write( drl.getBytes() );
out.flush();
return packageName + ".drl";
} else {
item = repository.loadModuleSnapshot( packageName,
packageVersion );
ModuleAssembler moduleAssembler = ModuleAssemblerManager.getModuleAssembler(item.getFormat(), item, null);
String drl = moduleAssembler.getCompiledSource();
out.write( drl.getBytes() );
out.flush();
return packageName + "_" + URLEncoder.encode( packageVersion,
"UTF-8" ) + ".drl";
}
}
public byte[] exportPackageFromRepository(String packageName) {
try {
return this.repository.exportModuleFromRepository( packageName );
} catch ( RepositoryException e ) {
throw new RulesRepositoryException( e );
} catch ( IOException e ) {
throw new RulesRepositoryException( e );
}
}
public boolean isPackageExist(String packageName) {
return this.repository.containsModule(packageName);
}
public void exportRulesRepository(OutputStream out) {
this.repository.exportRepositoryToStream( out );
}
@LoggedIn
public void importRulesRepository(InputStream in) {
identity.checkPermission( new AdminType(),
RoleType.ADMIN.getName() );
repository.importRulesRepositoryFromStream( in );
fileUploadedEventEvent.fire(new FileUploadedEvent());
}
@LoggedIn
public void importPackageToRepository(byte[] data,
boolean importAsNew) {
repository.importPackageToRepository(data,
importAsNew);
fileUploadedEventEvent.fire(new FileUploadedEvent());
}
/**
* This will import DRL from a drl file into a more normalised structure.
* If the package does not exist, it will be created.
* If it does, it will be "merged" in the sense that any new rules in the drl
* will be created as new assets in the repo, everything else will stay as it was
* in the repo.
*
* @param drlInputStream will be closed after it's read
* @param packageName Name for this package. Overrides the one in the DRL.
*/
@LoggedIn
public String importClassicDRL(InputStream drlInputStream,
String packageName) {
ClassicDRLImporter imp;
try {
imp = new ClassicDRLImporter(drlInputStream);
} catch (DroolsParserException e) {
throw new IllegalArgumentException(
"Could not parse the drlInputStream for package (" + packageName + "): " + e.getMessage(), e);
}
ModuleItem pkg = null;
if ( packageName == null ) {
packageName = imp.getPackageName();
}
if ( packageName == null || "".equals( packageName ) ) {
throw new IllegalArgumentException( "Missing package name." );
}
boolean existing = repository.containsModule( packageName );
// Check if the package is archived
if ( existing && repository.isModuleArchived( packageName ) ) {
// Remove the package so it can be created again.
ModuleItem item = repository.loadModule( packageName );
item.remove();
existing = false;
}
if ( existing ) {
pkg = repository.loadModule( packageName );
DroolsHeader.updateDroolsHeader( ClassicDRLImporter.mergeLines( DroolsHeader.getDroolsHeader( pkg ),
imp.getPackageHeader() ),
pkg );
existing = true;
} else {
pkg = repository.createModule( packageName,
"<imported>" );
DroolsHeader.updateDroolsHeader( imp.getPackageHeader(),
pkg );
}
boolean newVer = Boolean.parseBoolean( System.getProperty( "drools.createNewVersionOnImport",
"true" ) );
for ( Asset as : imp.getAssets() ) {
if ( existing && pkg.containsAsset( as.name ) ) {
AssetItem asset = pkg.loadAsset( as.name );
if ( asset.getFormat().equals( as.format ) ) {
asset.updateContent( as.content );
if ( newVer ) asset.checkin( "Imported change from external DRL" );
} //skip it if not the right format
} else {
AssetItem asset = pkg.addAsset( as.name,
"<imported>" );
asset.updateFormat( as.format );
asset.updateContent( as.content );
asset.updateExternalSource( "Imported from external DRL" );
if ( newVer ) asset.checkin( "Imported change from external DRL" );
}
}
pkg.updateBinaryUpToDate(false);
repository.save();
/* Return the name of the new package to the caller */
return packageName;
}
/**
* This will return the last time the package was built.
*/
public long getLastModified(String name,
String version) {
ModuleItem item = null;
if ( version.equals( "LATEST" ) ) {
item = repository.loadModule( name );
} else {
item = repository.loadModuleSnapshot( name,
version );
}
return item.getLastModified().getTimeInMillis();
}
public String loadSourceAsset(String packageName,
String packageVersion,
boolean isLatest,
String assetName,
ByteArrayOutputStream out) throws IOException {
ModuleItem pkg = null;
if ( isLatest ) {
pkg = repository.loadModule( packageName );
} else {
pkg = repository.loadModuleSnapshot( packageName,
packageVersion );
}
AssetItem item = pkg.loadAsset( assetName );
ContentHandler handler = ContentManager.getHandler( item.getFormat() );
StringBuilder stringBuilder = new StringBuilder();
if ( handler.isRuleAsset() ) {
BRMSPackageBuilder builder = new BRMSPackageBuilder();
builder.setDSLFiles( DSLLoader.loadDSLMappingFiles( item.getModule() ) );
((IRuleAsset) handler).assembleDRL( builder,
item,
stringBuilder );
out.write( stringBuilder.toString().getBytes() );
return item.getName() + ".drl";
} else {
out.write( item.getContent().getBytes() );
return item.getName() + ".drl";
}
}
}
| |
/*
* Copyright (C) 2007-2008 Esmertec AG.
* Copyright (C) 2007-2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mms.dom.smil;
import org.w3c.dom.DOMException;
import org.w3c.dom.events.DocumentEvent;
import org.w3c.dom.events.Event;
import org.w3c.dom.smil.ElementTime;
import org.w3c.dom.smil.SMILMediaElement;
import org.w3c.dom.smil.TimeList;
import android.util.Log;
import com.android.mms.dom.events.EventImpl;
public class SmilMediaElementImpl extends SmilElementImpl implements
SMILMediaElement {
public final static String SMIL_MEDIA_START_EVENT = "SmilMediaStart";
public final static String SMIL_MEDIA_END_EVENT = "SmilMediaEnd";
public final static String SMIL_MEDIA_PAUSE_EVENT = "SmilMediaPause";
public final static String SMIL_MEDIA_SEEK_EVENT = "SmilMediaSeek";
private final static String TAG = "Mms:smil";
private static final boolean DEBUG = false;
private static final boolean LOCAL_LOGV = false;
ElementTime mElementTime = new ElementTimeImpl(this) {
private Event createEvent(String eventType) {
DocumentEvent doc =
(DocumentEvent)SmilMediaElementImpl.this.getOwnerDocument();
Event event = doc.createEvent("Event");
event.initEvent(eventType, false, false);
if (LOCAL_LOGV) {
Log.v(TAG, "Dispatching 'begin' event to "
+ SmilMediaElementImpl.this.getTagName() + " "
+ SmilMediaElementImpl.this.getSrc() + " at "
+ System.currentTimeMillis());
}
return event;
}
private Event createEvent(String eventType, int seekTo) {
DocumentEvent doc =
(DocumentEvent)SmilMediaElementImpl.this.getOwnerDocument();
EventImpl event = (EventImpl) doc.createEvent("Event");
event.initEvent(eventType, false, false, seekTo);
if (LOCAL_LOGV) {
Log.v(TAG, "Dispatching 'begin' event to "
+ SmilMediaElementImpl.this.getTagName() + " "
+ SmilMediaElementImpl.this.getSrc() + " at "
+ System.currentTimeMillis());
}
return event;
}
public boolean beginElement() {
Event startEvent = createEvent(SMIL_MEDIA_START_EVENT);
dispatchEvent(startEvent);
return true;
}
public boolean endElement() {
Event endEvent = createEvent(SMIL_MEDIA_END_EVENT);
dispatchEvent(endEvent);
return true;
}
public void resumeElement() {
Event resumeEvent = createEvent(SMIL_MEDIA_START_EVENT);
dispatchEvent(resumeEvent);
}
public void pauseElement() {
Event pauseEvent = createEvent(SMIL_MEDIA_PAUSE_EVENT);
dispatchEvent(pauseEvent);
}
public void seekElement(float seekTo) {
Event seekEvent = createEvent(SMIL_MEDIA_SEEK_EVENT, (int) seekTo);
dispatchEvent(seekEvent);
}
@Override
public float getDur() {
float dur = super.getDur();
if (dur == 0) {
// Duration is not specified, So get the implicit duration.
String tag = getTagName();
if (tag.equals("video") || tag.equals("audio")) {
// Continuous media
// FIXME Should get the duration of the media. "indefinite" instead here.
dur = -1.0F;
} else if (tag.equals("text") || tag.equals("img")) {
// Discrete media
dur = 0;
} else {
Log.w(TAG, "Unknown media type");
}
}
return dur;
}
@Override
ElementTime getParentElementTime() {
return ((SmilParElementImpl) mSmilElement.getParentNode()).mParTimeContainer;
}
};
/*
* Internal Interface
*/
SmilMediaElementImpl(SmilDocumentImpl owner, String tagName) {
super(owner, tagName);
}
/*
* SMILMediaElement Interface
*/
public String getAbstractAttr() {
return this.getAttribute("abstract");
}
public String getAlt() {
return this.getAttribute("alt");
}
public String getAuthor() {
return this.getAttribute("author");
}
public String getClipBegin() {
return this.getAttribute("clipBegin");
}
public String getClipEnd() {
return this.getAttribute("clipEnd");
}
public String getCopyright() {
return this.getAttribute("copyright");
}
public String getLongdesc() {
return this.getAttribute("longdesc");
}
public String getPort() {
return this.getAttribute("port");
}
public String getReadIndex() {
return this.getAttribute("readIndex");
}
public String getRtpformat() {
return this.getAttribute("rtpformat");
}
public String getSrc() {
return this.getAttribute("src");
}
public String getStripRepeat() {
return this.getAttribute("stripRepeat");
}
public String getTitle() {
return this.getAttribute("title");
}
public String getTransport() {
return this.getAttribute("transport");
}
public String getType() {
return this.getAttribute("type");
}
public void setAbstractAttr(String abstractAttr) throws DOMException {
this.setAttribute("abstract", abstractAttr);
}
public void setAlt(String alt) throws DOMException {
this.setAttribute("alt", alt);
}
public void setAuthor(String author) throws DOMException {
this.setAttribute("author", author);
}
public void setClipBegin(String clipBegin) throws DOMException {
this.setAttribute("clipBegin", clipBegin);
}
public void setClipEnd(String clipEnd) throws DOMException {
this.setAttribute("clipEnd", clipEnd);
}
public void setCopyright(String copyright) throws DOMException {
this.setAttribute("copyright", copyright);
}
public void setLongdesc(String longdesc) throws DOMException {
this.setAttribute("longdesc", longdesc);
}
public void setPort(String port) throws DOMException {
this.setAttribute("port", port);
}
public void setReadIndex(String readIndex) throws DOMException {
this.setAttribute("readIndex", readIndex);
}
public void setRtpformat(String rtpformat) throws DOMException {
this.setAttribute("rtpformat", rtpformat);
}
public void setSrc(String src) throws DOMException {
this.setAttribute("src", src);
}
public void setStripRepeat(String stripRepeat) throws DOMException {
this.setAttribute("stripRepeat", stripRepeat);
}
public void setTitle(String title) throws DOMException {
this.setAttribute("title", title);
}
public void setTransport(String transport) throws DOMException {
this.setAttribute("transport", transport);
}
public void setType(String type) throws DOMException {
this.setAttribute("type", type);
}
/*
* TimeElement Interface
*/
public boolean beginElement() {
return mElementTime.beginElement();
}
public boolean endElement() {
return mElementTime.endElement();
}
public TimeList getBegin() {
return mElementTime.getBegin();
}
public float getDur() {
return mElementTime.getDur();
}
public TimeList getEnd() {
return mElementTime.getEnd();
}
public short getFill() {
return mElementTime.getFill();
}
public short getFillDefault() {
return mElementTime.getFillDefault();
}
public float getRepeatCount() {
return mElementTime.getRepeatCount();
}
public float getRepeatDur() {
return mElementTime.getRepeatDur();
}
public short getRestart() {
return mElementTime.getRestart();
}
public void pauseElement() {
mElementTime.pauseElement();
}
public void resumeElement() {
mElementTime.resumeElement();
}
public void seekElement(float seekTo) {
mElementTime.seekElement(seekTo);
}
public void setBegin(TimeList begin) throws DOMException {
mElementTime.setBegin(begin);
}
public void setDur(float dur) throws DOMException {
mElementTime.setDur(dur);
}
public void setEnd(TimeList end) throws DOMException {
mElementTime.setEnd(end);
}
public void setFill(short fill) throws DOMException {
mElementTime.setFill(fill);
}
public void setFillDefault(short fillDefault) throws DOMException {
mElementTime.setFillDefault(fillDefault);
}
public void setRepeatCount(float repeatCount) throws DOMException {
mElementTime.setRepeatCount(repeatCount);
}
public void setRepeatDur(float repeatDur) throws DOMException {
mElementTime.setRepeatDur(repeatDur);
}
public void setRestart(short restart) throws DOMException {
mElementTime.setRestart(restart);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.net.Peer;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.InOrder;
public class TestDistributedFileSystem {
private static final Random RAN = new Random();
static {
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
}
private boolean dualPortTesting = false;
private boolean noXmlDefaults = false;
private HdfsConfiguration getTestConfiguration() {
HdfsConfiguration conf;
if (noXmlDefaults) {
conf = new HdfsConfiguration(false);
String namenodeDir = new File(MiniDFSCluster.getBaseDirectory(), "name").
getAbsolutePath();
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, namenodeDir);
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, namenodeDir);
} else {
conf = new HdfsConfiguration();
}
if (dualPortTesting) {
conf.set(DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
"localhost:0");
}
conf.setLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, 0);
return conf;
}
@Test
public void testEmptyDelegationToken() throws IOException {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
FileSystem fileSys = cluster.getFileSystem();
fileSys.getDelegationToken("");
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testFileSystemCloseAll() throws Exception {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).
build();
URI address = FileSystem.getDefaultUri(conf);
try {
FileSystem.closeAll();
conf = getTestConfiguration();
FileSystem.setDefaultUri(conf, address);
FileSystem.get(conf);
FileSystem.get(conf);
FileSystem.closeAll();
}
finally {
if (cluster != null) {cluster.shutdown();}
}
}
/**
* Tests DFSClient.close throws no ConcurrentModificationException if
* multiple files are open.
* Also tests that any cached sockets are closed. (HDFS-3359)
*/
@Test
public void testDFSClose() throws Exception {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
DistributedFileSystem fileSys = cluster.getFileSystem();
// create two files, leaving them open
fileSys.create(new Path("/test/dfsclose/file-0"));
fileSys.create(new Path("/test/dfsclose/file-1"));
// create another file, close it, and read it, so
// the client gets a socket in its SocketCache
Path p = new Path("/non-empty-file");
DFSTestUtil.createFile(fileSys, p, 1L, (short)1, 0L);
DFSTestUtil.readFile(fileSys, p);
fileSys.close();
DFSClient dfsClient = fileSys.getClient();
verifyOpsUsingClosedClient(dfsClient);
} finally {
if (cluster != null) {cluster.shutdown();}
}
}
private void verifyOpsUsingClosedClient(DFSClient dfsClient) {
Path p = new Path("/non-empty-file");
try {
dfsClient.getBlockSize(p.getName());
fail("getBlockSize using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getServerDefaults();
fail("getServerDefaults using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.reportBadBlocks(new LocatedBlock[0]);
fail("reportBadBlocks using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getBlockLocations(p.getName(), 0, 1);
fail("getBlockLocations using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.createSymlink("target", "link", true);
fail("createSymlink using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getLinkTarget(p.getName());
fail("getLinkTarget using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setReplication(p.getName(), (short) 3);
fail("setReplication using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setStoragePolicy(p.getName(),
HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
fail("setStoragePolicy using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getStoragePolicies();
fail("getStoragePolicies using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
fail("setSafeMode using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.refreshNodes();
fail("refreshNodes using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.metaSave(p.getName());
fail("metaSave using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setBalancerBandwidth(1000L);
fail("setBalancerBandwidth using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.finalizeUpgrade();
fail("finalizeUpgrade using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.rollingUpgrade(RollingUpgradeAction.QUERY);
fail("rollingUpgrade using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getInotifyEventStream();
fail("getInotifyEventStream using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getInotifyEventStream(100L);
fail("getInotifyEventStream using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.saveNamespace(1000L, 200L);
fail("saveNamespace using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.rollEdits();
fail("rollEdits using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.restoreFailedStorage("");
fail("restoreFailedStorage using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.getContentSummary(p.getName());
fail("getContentSummary using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setQuota(p.getName(), 1000L, 500L);
fail("setQuota using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
try {
dfsClient.setQuotaByStorageType(p.getName(), StorageType.DISK, 500L);
fail("setQuotaByStorageType using a closed filesystem!");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains("Filesystem closed", ioe);
}
}
@Test
public void testDFSCloseOrdering() throws Exception {
DistributedFileSystem fs = new MyDistributedFileSystem();
Path path = new Path("/a");
fs.deleteOnExit(path);
fs.close();
InOrder inOrder = inOrder(fs.dfs);
inOrder.verify(fs.dfs).closeOutputStreams(eq(false));
inOrder.verify(fs.dfs).delete(eq(path.toString()), eq(true));
inOrder.verify(fs.dfs).close();
}
private static class MyDistributedFileSystem extends DistributedFileSystem {
MyDistributedFileSystem() {
statistics = new FileSystem.Statistics("myhdfs"); // can't mock finals
dfs = mock(DFSClient.class);
}
@Override
public boolean exists(Path p) {
return true; // trick out deleteOnExit
}
// Symlink resolution doesn't work with a mock, since it doesn't
// have a valid Configuration to resolve paths to the right FileSystem.
// Just call the DFSClient directly to register the delete
@Override
public boolean delete(Path f, final boolean recursive) throws IOException {
return dfs.delete(f.toUri().getPath(), recursive);
}
}
@Test
public void testDFSSeekExceptions() throws IOException {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
FileSystem fileSys = cluster.getFileSystem();
String file = "/test/fileclosethenseek/file-0";
Path path = new Path(file);
// create file
FSDataOutputStream output = fileSys.create(path);
output.writeBytes("Some test data to write longer than 10 bytes");
output.close();
FSDataInputStream input = fileSys.open(path);
input.seek(10);
boolean threw = false;
try {
input.seek(100);
} catch (IOException e) {
// success
threw = true;
}
assertTrue("Failed to throw IOE when seeking past end", threw);
input.close();
threw = false;
try {
input.seek(1);
} catch (IOException e) {
//success
threw = true;
}
assertTrue("Failed to throw IOE when seeking after close", threw);
fileSys.close();
}
finally {
if (cluster != null) {cluster.shutdown();}
}
}
@Test
public void testDFSClient() throws Exception {
Configuration conf = getTestConfiguration();
final long grace = 1000L;
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
final String filepathstring = "/test/LeaseChecker/foo";
final Path[] filepaths = new Path[4];
for(int i = 0; i < filepaths.length; i++) {
filepaths[i] = new Path(filepathstring + i);
}
final long millis = Time.now();
{
final DistributedFileSystem dfs = cluster.getFileSystem();
Method setMethod = dfs.dfs.getLeaseRenewer().getClass()
.getDeclaredMethod("setGraceSleepPeriod", long.class);
setMethod.setAccessible(true);
setMethod.invoke(dfs.dfs.getLeaseRenewer(), grace);
Method checkMethod = dfs.dfs.getLeaseRenewer().getClass()
.getDeclaredMethod("isRunning");
checkMethod.setAccessible(true);
assertFalse((boolean) checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
{
//create a file
final FSDataOutputStream out = dfs.create(filepaths[0]);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//write something
out.writeLong(millis);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//close
out.close();
Thread.sleep(grace/4*3);
//within grace period
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
for(int i = 0; i < 3; i++) {
if ((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer())) {
Thread.sleep(grace/2);
}
}
//passed grace period
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
}
{
//create file1
final FSDataOutputStream out1 = dfs.create(filepaths[1]);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//create file2
final FSDataOutputStream out2 = dfs.create(filepaths[2]);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//write something to file1
out1.writeLong(millis);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//close file1
out1.close();
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//write something to file2
out2.writeLong(millis);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//close file2
out2.close();
Thread.sleep(grace/4*3);
//within grace period
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
}
{
//create file3
final FSDataOutputStream out3 = dfs.create(filepaths[3]);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
Thread.sleep(grace/4*3);
//passed previous grace period, should still running
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//write something to file3
out3.writeLong(millis);
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//close file3
out3.close();
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
Thread.sleep(grace/4*3);
//within grace period
assertTrue((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
for(int i = 0; i < 3; i++) {
if ((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer())) {
Thread.sleep(grace/2);
}
}
//passed grace period
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
}
dfs.close();
}
{
// Check to see if opening a non-existent file triggers a FNF
FileSystem fs = cluster.getFileSystem();
Path dir = new Path("/wrwelkj");
assertFalse("File should not exist for test.", fs.exists(dir));
try {
FSDataInputStream in = fs.open(dir);
try {
in.close();
fs.close();
} finally {
assertTrue("Did not get a FileNotFoundException for non-existing" +
" file.", false);
}
} catch (FileNotFoundException fnf) {
// This is the proper exception to catch; move on.
}
}
{
final DistributedFileSystem dfs = cluster.getFileSystem();
Method checkMethod = dfs.dfs.getLeaseRenewer().getClass()
.getDeclaredMethod("isRunning");
checkMethod.setAccessible(true);
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
//open and check the file
FSDataInputStream in = dfs.open(filepaths[0]);
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
assertEquals(millis, in.readLong());
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
in.close();
assertFalse((boolean)checkMethod.invoke(dfs.dfs.getLeaseRenewer()));
dfs.close();
}
{ // test accessing DFS with ip address. should work with any hostname
// alias or ip address that points to the interface that NameNode
// is listening on. In this case, it is localhost.
String uri = "hdfs://127.0.0.1:" + cluster.getNameNodePort() +
"/test/ipAddress/file";
Path path = new Path(uri);
FileSystem fs = FileSystem.get(path.toUri(), conf);
FSDataOutputStream out = fs.create(path);
byte[] buf = new byte[1024];
out.write(buf);
out.close();
FSDataInputStream in = fs.open(path);
in.readFully(buf);
in.close();
fs.close();
}
}
finally {
if (cluster != null) {cluster.shutdown();}
}
}
@Test
public void testStatistics() throws Exception {
int lsLimit = 2;
final Configuration conf = getTestConfiguration();
conf.setInt(DFSConfigKeys.DFS_LIST_LIMIT, lsLimit);
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
final FileSystem fs = cluster.getFileSystem();
Path dir = new Path("/test");
Path file = new Path(dir, "file");
int readOps = DFSTestUtil.getStatistics(fs).getReadOps();
int writeOps = DFSTestUtil.getStatistics(fs).getWriteOps();
int largeReadOps = DFSTestUtil.getStatistics(fs).getLargeReadOps();
fs.mkdirs(dir);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
FSDataOutputStream out = fs.create(file, (short)1);
out.close();
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
FileStatus status = fs.getFileStatus(file);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
fs.getFileBlockLocations(file, 0, 0);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
fs.getFileBlockLocations(status, 0, 0);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
FSDataInputStream in = fs.open(file);
in.close();
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
fs.setReplication(file, (short)2);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
Path file1 = new Path(dir, "file1");
fs.rename(file, file1);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
fs.getContentSummary(file1);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
// Iterative ls test
for (int i = 0; i < 10; i++) {
Path p = new Path(dir, Integer.toString(i));
fs.mkdirs(p);
FileStatus[] list = fs.listStatus(dir);
if (list.length > lsLimit) {
// if large directory, then count readOps and largeReadOps by
// number times listStatus iterates
int iterations = (int)Math.ceil((double)list.length/lsLimit);
largeReadOps += iterations;
readOps += iterations;
} else {
// Single iteration in listStatus - no large read operation done
readOps++;
}
// writeOps incremented by 1 for mkdirs
// readOps and largeReadOps incremented by 1 or more
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
}
fs.getStatus(file1);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
fs.getFileChecksum(file1);
checkStatistics(fs, ++readOps, writeOps, largeReadOps);
fs.setPermission(file1, new FsPermission((short)0777));
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
fs.setTimes(file1, 0L, 0L);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
fs.setOwner(file1, ugi.getUserName(), ugi.getGroupNames()[0]);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
fs.delete(dir, true);
checkStatistics(fs, readOps, ++writeOps, largeReadOps);
} finally {
if (cluster != null) cluster.shutdown();
}
}
/** Checks statistics. -1 indicates do not check for the operations */
private void checkStatistics(FileSystem fs, int readOps, int writeOps,
int largeReadOps) {
assertEquals(readOps, DFSTestUtil.getStatistics(fs).getReadOps());
assertEquals(writeOps, DFSTestUtil.getStatistics(fs).getWriteOps());
assertEquals(largeReadOps, DFSTestUtil.getStatistics(fs).getLargeReadOps());
}
/** Checks read statistics. */
private void checkReadStatistics(FileSystem fs, int distance, long expectedReadBytes) {
long bytesRead = DFSTestUtil.getStatistics(fs).
getBytesReadByDistance(distance);
assertEquals(expectedReadBytes, bytesRead);
}
@Test
public void testLocalHostReadStatistics() throws Exception {
testReadFileSystemStatistics(0);
}
@Test
public void testLocalRackReadStatistics() throws Exception {
testReadFileSystemStatistics(2);
}
@Test
public void testRemoteRackOfFirstDegreeReadStatistics() throws Exception {
testReadFileSystemStatistics(4);
}
/** expectedDistance is the expected distance between client and dn.
* 0 means local host.
* 2 means same rack.
* 4 means remote rack of first degree.
*/
private void testReadFileSystemStatistics(int expectedDistance)
throws IOException {
MiniDFSCluster cluster = null;
final Configuration conf = getTestConfiguration();
// create a cluster with a dn with the expected distance.
if (expectedDistance == 0) {
cluster = new MiniDFSCluster.Builder(conf).
hosts(new String[] {NetUtils.getLocalHostname()}).build();
} else if (expectedDistance == 2) {
cluster = new MiniDFSCluster.Builder(conf).
hosts(new String[] {"hostFoo"}).build();
} else if (expectedDistance == 4) {
cluster = new MiniDFSCluster.Builder(conf).
racks(new String[] {"/rackFoo"}).build();
}
// create a file, read the file and verify the metrics
try {
final FileSystem fs = cluster.getFileSystem();
DFSTestUtil.getStatistics(fs).reset();
Path dir = new Path("/test");
Path file = new Path(dir, "file");
String input = "hello world";
DFSTestUtil.writeFile(fs, file, input);
FSDataInputStream stm = fs.open(file);
byte[] actual = new byte[4096];
stm.read(actual);
checkReadStatistics(fs, expectedDistance, input.length());
} finally {
if (cluster != null) cluster.shutdown();
}
}
@Test
public void testFileChecksum() throws Exception {
final long seed = RAN.nextLong();
System.out.println("seed=" + seed);
RAN.setSeed(seed);
final Configuration conf = getTestConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(2).build();
final FileSystem hdfs = cluster.getFileSystem();
final String nnAddr = conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
final UserGroupInformation current = UserGroupInformation.getCurrentUser();
final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
current.getShortUserName() + "x", new String[]{"user"});
try {
hdfs.getFileChecksum(new Path(
"/test/TestNonExistingFile"));
fail("Expecting FileNotFoundException");
} catch (FileNotFoundException e) {
assertTrue("Not throwing the intended exception message", e.getMessage()
.contains("File does not exist: /test/TestNonExistingFile"));
}
try {
Path path = new Path("/test/TestExistingDir/");
hdfs.mkdirs(path);
hdfs.getFileChecksum(path);
fail("Expecting FileNotFoundException");
} catch (FileNotFoundException e) {
assertTrue("Not throwing the intended exception message", e.getMessage()
.contains("Path is not a file: /test/TestExistingDir"));
}
//webhdfs
final String webhdfsuri = WebHdfsConstants.WEBHDFS_SCHEME + "://" + nnAddr;
System.out.println("webhdfsuri=" + webhdfsuri);
final FileSystem webhdfs = ugi.doAs(
new PrivilegedExceptionAction<FileSystem>() {
@Override
public FileSystem run() throws Exception {
return new Path(webhdfsuri).getFileSystem(conf);
}
});
final Path dir = new Path("/filechecksum");
final int block_size = 1024;
final int buffer_size = conf.getInt(
CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096);
conf.setInt(HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, 512);
//try different number of blocks
for(int n = 0; n < 5; n++) {
//generate random data
final byte[] data = new byte[RAN.nextInt(block_size/2-1)+n*block_size+1];
RAN.nextBytes(data);
System.out.println("data.length=" + data.length);
//write data to a file
final Path foo = new Path(dir, "foo" + n);
{
final FSDataOutputStream out = hdfs.create(foo, false, buffer_size,
(short)2, block_size);
out.write(data);
out.close();
}
//compute checksum
final FileChecksum hdfsfoocs = hdfs.getFileChecksum(foo);
System.out.println("hdfsfoocs=" + hdfsfoocs);
//webhdfs
final FileChecksum webhdfsfoocs = webhdfs.getFileChecksum(foo);
System.out.println("webhdfsfoocs=" + webhdfsfoocs);
final Path webhdfsqualified = new Path(webhdfsuri + dir, "foo" + n);
final FileChecksum webhdfs_qfoocs =
webhdfs.getFileChecksum(webhdfsqualified);
System.out.println("webhdfs_qfoocs=" + webhdfs_qfoocs);
//create a zero byte file
final Path zeroByteFile = new Path(dir, "zeroByteFile" + n);
{
final FSDataOutputStream out = hdfs.create(zeroByteFile, false,
buffer_size, (short)2, block_size);
out.close();
}
// verify the magic val for zero byte files
{
final FileChecksum zeroChecksum = hdfs.getFileChecksum(zeroByteFile);
assertEquals(zeroChecksum.toString(),
"MD5-of-0MD5-of-0CRC32:70bc8f4b72a86921468bf8e8441dce51");
}
//write another file
final Path bar = new Path(dir, "bar" + n);
{
final FSDataOutputStream out = hdfs.create(bar, false, buffer_size,
(short)2, block_size);
out.write(data);
out.close();
}
{ //verify checksum
final FileChecksum barcs = hdfs.getFileChecksum(bar);
final int barhashcode = barcs.hashCode();
assertEquals(hdfsfoocs.hashCode(), barhashcode);
assertEquals(hdfsfoocs, barcs);
//webhdfs
assertEquals(webhdfsfoocs.hashCode(), barhashcode);
assertEquals(webhdfsfoocs, barcs);
assertEquals(webhdfs_qfoocs.hashCode(), barhashcode);
assertEquals(webhdfs_qfoocs, barcs);
}
hdfs.setPermission(dir, new FsPermission((short)0));
{ //test permission error on webhdfs
try {
webhdfs.getFileChecksum(webhdfsqualified);
fail();
} catch(IOException ioe) {
FileSystem.LOG.info("GOOD: getting an exception", ioe);
}
}
hdfs.setPermission(dir, new FsPermission((short)0777));
}
cluster.shutdown();
}
@Test
public void testAllWithDualPort() throws Exception {
dualPortTesting = true;
try {
testFileSystemCloseAll();
testDFSClose();
testDFSClient();
testFileChecksum();
} finally {
dualPortTesting = false;
}
}
@Test
public void testAllWithNoXmlDefaults() throws Exception {
// Do all the tests with a configuration that ignores the defaults in
// the XML files.
noXmlDefaults = true;
try {
testFileSystemCloseAll();
testDFSClose();
testDFSClient();
testFileChecksum();
} finally {
noXmlDefaults = false;
}
}
@Test(timeout=120000)
public void testLocatedFileStatusStorageIdsTypes() throws Exception {
final Configuration conf = getTestConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(3).build();
try {
final DistributedFileSystem fs = cluster.getFileSystem();
final Path testFile = new Path("/testListLocatedStatus");
final int blockSize = 4096;
final int numBlocks = 10;
// Create a test file
final int repl = 2;
DFSTestUtil.createFile(fs, testFile, blockSize, numBlocks * blockSize,
blockSize, (short) repl, 0xADDED);
DFSTestUtil.waitForReplication(fs, testFile, (short) repl, 30000);
// Get the listing
RemoteIterator<LocatedFileStatus> it = fs.listLocatedStatus(testFile);
assertTrue("Expected file to be present", it.hasNext());
LocatedFileStatus stat = it.next();
BlockLocation[] locs = stat.getBlockLocations();
assertEquals("Unexpected number of locations", numBlocks, locs.length);
Set<String> dnStorageIds = new HashSet<>();
for (DataNode d : cluster.getDataNodes()) {
try (FsDatasetSpi.FsVolumeReferences volumes = d.getFSDataset()
.getFsVolumeReferences()) {
for (FsVolumeSpi vol : volumes) {
dnStorageIds.add(vol.getStorageID());
}
}
}
for (BlockLocation loc : locs) {
String[] ids = loc.getStorageIds();
// Run it through a set to deduplicate, since there should be no dupes
Set<String> storageIds = new HashSet<>();
Collections.addAll(storageIds, ids);
assertEquals("Unexpected num storage ids", repl, storageIds.size());
// Make sure these are all valid storage IDs
assertTrue("Unknown storage IDs found!", dnStorageIds.containsAll
(storageIds));
// Check storage types are the default, since we didn't set any
StorageType[] types = loc.getStorageTypes();
assertEquals("Unexpected num storage types", repl, types.length);
for (StorageType t: types) {
assertEquals("Unexpected storage type", StorageType.DEFAULT, t);
}
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testCreateWithCustomChecksum() throws Exception {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = null;
Path testBasePath = new Path("/test/csum");
// create args
Path path1 = new Path(testBasePath, "file_wtih_crc1");
Path path2 = new Path(testBasePath, "file_with_crc2");
ChecksumOpt opt1 = new ChecksumOpt(DataChecksum.Type.CRC32C, 512);
ChecksumOpt opt2 = new ChecksumOpt(DataChecksum.Type.CRC32, 512);
// common args
FsPermission perm = FsPermission.getDefault().applyUMask(
FsPermission.getUMask(conf));
EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.OVERWRITE,
CreateFlag.CREATE);
short repl = 1;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
FileSystem dfs = cluster.getFileSystem();
dfs.mkdirs(testBasePath);
// create two files with different checksum types
FSDataOutputStream out1 = dfs.create(path1, perm, flags, 4096, repl,
131072L, null, opt1);
FSDataOutputStream out2 = dfs.create(path2, perm, flags, 4096, repl,
131072L, null, opt2);
for (int i = 0; i < 1024; i++) {
out1.write(i);
out2.write(i);
}
out1.close();
out2.close();
// the two checksums must be different.
MD5MD5CRC32FileChecksum sum1 =
(MD5MD5CRC32FileChecksum)dfs.getFileChecksum(path1);
MD5MD5CRC32FileChecksum sum2 =
(MD5MD5CRC32FileChecksum)dfs.getFileChecksum(path2);
assertFalse(sum1.equals(sum2));
// check the individual params
assertEquals(DataChecksum.Type.CRC32C, sum1.getCrcType());
assertEquals(DataChecksum.Type.CRC32, sum2.getCrcType());
} finally {
if (cluster != null) {
cluster.getFileSystem().delete(testBasePath, true);
cluster.shutdown();
}
}
}
@Test(timeout=60000)
public void testFileCloseStatus() throws IOException {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
DistributedFileSystem fs = cluster.getFileSystem();
try {
// create a new file.
Path file = new Path("/simpleFlush.dat");
FSDataOutputStream output = fs.create(file);
// write to file
output.writeBytes("Some test data");
output.flush();
assertFalse("File status should be open", fs.isFileClosed(file));
output.close();
assertTrue("File status should be closed", fs.isFileClosed(file));
} finally {
cluster.shutdown();
}
}
@Test(timeout=60000)
public void testListFiles() throws IOException {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
DistributedFileSystem fs = cluster.getFileSystem();
final Path relative = new Path("relative");
fs.create(new Path(relative, "foo")).close();
final List<LocatedFileStatus> retVal = new ArrayList<>();
final RemoteIterator<LocatedFileStatus> iter =
fs.listFiles(relative, true);
while (iter.hasNext()) {
retVal.add(iter.next());
}
System.out.println("retVal = " + retVal);
} finally {
cluster.shutdown();
}
}
@Test(timeout=10000)
public void testDFSClientPeerReadTimeout() throws IOException {
final int timeout = 1000;
final Configuration conf = new HdfsConfiguration();
conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, timeout);
// only need cluster to create a dfs client to get a peer
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
cluster.waitActive();
DistributedFileSystem dfs = cluster.getFileSystem();
// use a dummy socket to ensure the read timesout
ServerSocket socket = new ServerSocket(0);
Peer peer = dfs.getClient().newConnectedPeer(
(InetSocketAddress) socket.getLocalSocketAddress(), null, null);
long start = Time.now();
try {
peer.getInputStream().read();
Assert.fail("read should timeout");
} catch (SocketTimeoutException ste) {
long delta = Time.now() - start;
if (delta < timeout*0.9) {
throw new IOException("read timedout too soon in " + delta + " ms.",
ste);
}
if (delta > timeout*1.1) {
throw new IOException("read timedout too late in " + delta + " ms.",
ste);
}
}
} finally {
cluster.shutdown();
}
}
@Test(timeout=60000)
public void testGetServerDefaults() throws IOException {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
cluster.waitActive();
DistributedFileSystem dfs = cluster.getFileSystem();
FsServerDefaults fsServerDefaults = dfs.getServerDefaults();
Assert.assertNotNull(fsServerDefaults);
} finally {
cluster.shutdown();
}
}
@Test(timeout=10000)
public void testDFSClientPeerWriteTimeout() throws IOException {
final int timeout = 1000;
final Configuration conf = new HdfsConfiguration();
conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, timeout);
// only need cluster to create a dfs client to get a peer
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
cluster.waitActive();
DistributedFileSystem dfs = cluster.getFileSystem();
// Write 10 MB to a dummy socket to ensure the write times out
ServerSocket socket = new ServerSocket(0);
Peer peer = dfs.getClient().newConnectedPeer(
(InetSocketAddress) socket.getLocalSocketAddress(), null, null);
long start = Time.now();
try {
byte[] buf = new byte[10 * 1024 * 1024];
peer.getOutputStream().write(buf);
long delta = Time.now() - start;
Assert.fail("write finish in " + delta + " ms" + "but should timedout");
} catch (SocketTimeoutException ste) {
long delta = Time.now() - start;
if (delta < timeout * 0.9) {
throw new IOException("write timedout too soon in " + delta + " ms.",
ste);
}
if (delta > timeout * 1.2) {
throw new IOException("write timedout too late in " + delta + " ms.",
ste);
}
}
} finally {
cluster.shutdown();
}
}
@Test(timeout = 30000)
public void testTotalDfsUsed() throws Exception {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
FileSystem fs = cluster.getFileSystem();
// create file under root
FSDataOutputStream File1 = fs.create(new Path("/File1"));
File1.write("hi".getBytes());
File1.close();
// create file under sub-folder
FSDataOutputStream File2 = fs.create(new Path("/Folder1/File2"));
File2.write("hi".getBytes());
File2.close();
// getUsed(Path) should return total len of all the files from a path
assertEquals(2, fs.getUsed(new Path("/Folder1")));
//getUsed() should return total length of all files in filesystem
assertEquals(4, fs.getUsed());
} finally {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the count of workflow executions returned from <a>CountOpenWorkflowExecutions</a> or
* <a>CountClosedWorkflowExecutions</a>
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/swf-2012-01-25/WorkflowExecutionCount" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class WorkflowExecutionCount extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The number of workflow executions.
* </p>
*/
private Integer count;
/**
* <p>
* If set to true, indicates that the actual count was more than the maximum supported by this API and the count
* returned is the truncated value.
* </p>
*/
private Boolean truncated;
/**
* <p>
* The number of workflow executions.
* </p>
*
* @param count
* The number of workflow executions.
*/
public void setCount(Integer count) {
this.count = count;
}
/**
* <p>
* The number of workflow executions.
* </p>
*
* @return The number of workflow executions.
*/
public Integer getCount() {
return this.count;
}
/**
* <p>
* The number of workflow executions.
* </p>
*
* @param count
* The number of workflow executions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowExecutionCount withCount(Integer count) {
setCount(count);
return this;
}
/**
* <p>
* If set to true, indicates that the actual count was more than the maximum supported by this API and the count
* returned is the truncated value.
* </p>
*
* @param truncated
* If set to true, indicates that the actual count was more than the maximum supported by this API and the
* count returned is the truncated value.
*/
public void setTruncated(Boolean truncated) {
this.truncated = truncated;
}
/**
* <p>
* If set to true, indicates that the actual count was more than the maximum supported by this API and the count
* returned is the truncated value.
* </p>
*
* @return If set to true, indicates that the actual count was more than the maximum supported by this API and the
* count returned is the truncated value.
*/
public Boolean getTruncated() {
return this.truncated;
}
/**
* <p>
* If set to true, indicates that the actual count was more than the maximum supported by this API and the count
* returned is the truncated value.
* </p>
*
* @param truncated
* If set to true, indicates that the actual count was more than the maximum supported by this API and the
* count returned is the truncated value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WorkflowExecutionCount withTruncated(Boolean truncated) {
setTruncated(truncated);
return this;
}
/**
* <p>
* If set to true, indicates that the actual count was more than the maximum supported by this API and the count
* returned is the truncated value.
* </p>
*
* @return If set to true, indicates that the actual count was more than the maximum supported by this API and the
* count returned is the truncated value.
*/
public Boolean isTruncated() {
return this.truncated;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCount() != null)
sb.append("Count: ").append(getCount()).append(",");
if (getTruncated() != null)
sb.append("Truncated: ").append(getTruncated());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof WorkflowExecutionCount == false)
return false;
WorkflowExecutionCount other = (WorkflowExecutionCount) obj;
if (other.getCount() == null ^ this.getCount() == null)
return false;
if (other.getCount() != null && other.getCount().equals(this.getCount()) == false)
return false;
if (other.getTruncated() == null ^ this.getTruncated() == null)
return false;
if (other.getTruncated() != null && other.getTruncated().equals(this.getTruncated()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCount() == null) ? 0 : getCount().hashCode());
hashCode = prime * hashCode + ((getTruncated() == null) ? 0 : getTruncated().hashCode());
return hashCode;
}
@Override
public WorkflowExecutionCount clone() {
try {
return (WorkflowExecutionCount) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.authorization.history;
import static org.camunda.bpm.engine.authorization.Authorization.ANY;
import static org.camunda.bpm.engine.authorization.Permissions.READ_HISTORY;
import static org.camunda.bpm.engine.authorization.Resources.PROCESS_DEFINITION;
import java.util.Date;
import java.util.List;
import org.camunda.bpm.engine.AuthorizationException;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.history.HistoricJobLog;
import org.camunda.bpm.engine.history.HistoricJobLogQuery;
import org.camunda.bpm.engine.history.HistoricProcessInstance;
import org.camunda.bpm.engine.impl.AbstractQuery;
import org.camunda.bpm.engine.impl.interceptor.Command;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
import org.camunda.bpm.engine.impl.jobexecutor.TimerSuspendProcessDefinitionHandler;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.RequiredHistoryLevel;
import org.camunda.bpm.engine.test.api.authorization.AuthorizationTest;
/**
* @author Roman Smirnov
*
*/
@RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_FULL)
public class HistoricJobLogAuthorizationTest extends AuthorizationTest {
protected static final String TIMER_START_PROCESS_KEY = "timerStartProcess";
protected static final String TIMER_BOUNDARY_PROCESS_KEY = "timerBoundaryProcess";
protected static final String ONE_INCIDENT_PROCESS_KEY = "process";
protected String deploymentId;
@Override
public void setUp() throws Exception {
deploymentId = createDeployment(null,
"org/camunda/bpm/engine/test/api/authorization/timerStartEventProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/timerBoundaryEventProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/api/authorization/oneIncidentProcess.bpmn20.xml").getId();
super.setUp();
}
@Override
public void tearDown() {
super.tearDown();
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutorTxRequired();
commandExecutor.execute(new Command<Object>() {
public Object execute(CommandContext commandContext) {
commandContext.getHistoricJobLogManager().deleteHistoricJobLogsByHandlerType(TimerSuspendProcessDefinitionHandler.TYPE);
return null;
}
});
deleteDeployment(deploymentId);
}
// historic job log query (start timer job) ////////////////////////////////
public void testStartTimerJobLogQueryWithoutAuthorization() {
// given
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 0);
}
public void testStartTimerJobLogQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
createGrantAuthorization(PROCESS_DEFINITION, TIMER_START_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 1);
}
public void testStartTimerJobLogQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 1);
}
// historic job log query ////////////////////////////////////////////////
public void testSimpleQueryWithoutAuthorization() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 0);
}
public void testSimpleQueryWithHistoryReadPermissionOnProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 4);
}
public void testSimpleQueryWithHistoryReadPermissionOnAnyProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 5);
}
public void testSimpleQueryWithMultiple() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 5);
}
// historic job log query (multiple process instance) ////////////////////////////////////////////////
public void testQueryWithoutAuthorization() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
enableAuthorization();
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 0);
}
public void testQueryWithHistoryReadPermissionOnProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 12);
}
public void testQueryWithHistoryReadPermissionOnAnyProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
jobId = managementService.createJobQuery().processDefinitionKey(TIMER_START_PROCESS_KEY).singleResult().getId();
managementService.executeJob(jobId);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 17);
}
// historic job log query (standalone job) ///////////////////////
public void testQueryAfterStandaloneJob() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(TIMER_BOUNDARY_PROCESS_KEY, true, new Date());
enableAuthorization();
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 1);
HistoricJobLog jobLog = query.singleResult();
assertNull(jobLog.getProcessDefinitionKey());
deleteDeployment(deploymentId);
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.deleteJob(jobId);
enableAuthorization();
}
// delete deployment (cascade = false)
public void testQueryAfterDeletingDeployment() {
// given
startProcessInstanceByKey(TIMER_BOUNDARY_PROCESS_KEY);
startProcessInstanceByKey(TIMER_BOUNDARY_PROCESS_KEY);
startProcessInstanceByKey(TIMER_BOUNDARY_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, TIMER_BOUNDARY_PROCESS_KEY, userId, READ_HISTORY);
disableAuthorization();
List<Task> tasks = taskService.createTaskQuery().list();
for (Task task : tasks) {
taskService.complete(task.getId());
}
enableAuthorization();
disableAuthorization();
repositoryService.deleteDeployment(deploymentId);
enableAuthorization();
// when
HistoricJobLogQuery query = historyService.createHistoricJobLogQuery();
// then
verifyQueryResults(query, 6);
disableAuthorization();
List<HistoricProcessInstance> instances = historyService.createHistoricProcessInstanceQuery().list();
for (HistoricProcessInstance instance : instances) {
historyService.deleteHistoricProcessInstance(instance.getId());
}
enableAuthorization();
}
// get historic job log exception stacktrace (standalone) /////////////////////
public void testGetHistoricStandaloneJobLogExceptionStacktrace() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(TIMER_BOUNDARY_PROCESS_KEY, true, new Date());
enableAuthorization();
String jobLogId = historyService.createHistoricJobLogQuery().singleResult().getId();
// when
String stacktrace = historyService.getHistoricJobLogExceptionStacktrace(jobLogId);
// then
assertNull(stacktrace);
deleteDeployment(deploymentId);
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.deleteJob(jobId);
enableAuthorization();
}
// get historic job log exception stacktrace /////////////////////
public void testGetHistoricJobLogExceptionStacktraceWithoutAuthorization() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobLogId = historyService.createHistoricJobLogQuery().failureLog().listPage(0, 1).get(0).getId();
enableAuthorization();
try {
// when
historyService.getHistoricJobLogExceptionStacktrace(jobLogId);
fail("Exception expected: It should not be possible to get the historic job log exception stacktrace");
} catch (AuthorizationException e) {
// then
String message = e.getMessage();
assertTextPresent(userId, message);
assertTextPresent(READ_HISTORY.getName(), message);
assertTextPresent(ONE_INCIDENT_PROCESS_KEY, message);
assertTextPresent(PROCESS_DEFINITION.resourceName(), message);
}
}
public void testGetHistoricJobLogExceptionStacktraceWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobLogId = historyService.createHistoricJobLogQuery().failureLog().listPage(0, 1).get(0).getId();
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
String stacktrace = historyService.getHistoricJobLogExceptionStacktrace(jobLogId);
// then
assertNotNull(stacktrace);
}
public void testGetHistoricJobLogExceptionStacktraceWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
disableAuthorization();
String jobLogId = historyService.createHistoricJobLogQuery().failureLog().listPage(0, 1).get(0).getId();
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
String stacktrace = historyService.getHistoricJobLogExceptionStacktrace(jobLogId);
// then
assertNotNull(stacktrace);
}
// helper ////////////////////////////////////////////////////////
protected void verifyQueryResults(HistoricJobLogQuery query, int countExpected) {
verifyQueryResults((AbstractQuery<?, ?>) query, countExpected);
}
}
| |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.base;
import android.text.TextUtils;
import android.util.Log;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.annotations.MainDex;
import org.chromium.base.annotations.NativeMethods;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
/**
* Java mirror of base/command_line.h.
* Android applications don't have command line arguments. Instead, they're "simulated" by reading a
* file at a specific location early during startup. Applications each define their own files, e.g.,
* ContentShellApplication.COMMAND_LINE_FILE.
**/
@MainDex
public abstract class CommandLine {
// Public abstract interface, implemented in derived classes.
// All these methods reflect their native-side counterparts.
/**
* Returns true if this command line contains the given switch.
* (Switch names ARE case-sensitive).
*/
public abstract boolean hasSwitch(String switchString);
/**
* Return the value associated with the given switch, or null.
* @param switchString The switch key to lookup. It should NOT start with '--' !
* @return switch value, or null if the switch is not set or set to empty.
*/
public abstract String getSwitchValue(String switchString);
/**
* Return the value associated with the given switch, or {@code defaultValue} if the switch
* was not specified.
* @param switchString The switch key to lookup. It should NOT start with '--' !
* @param defaultValue The default value to return if the switch isn't set.
* @return Switch value, or {@code defaultValue} if the switch is not set or set to empty.
*/
public String getSwitchValue(String switchString, String defaultValue) {
String value = getSwitchValue(switchString);
return TextUtils.isEmpty(value) ? defaultValue : value;
}
/**
* Return a copy of all switches, along with their values.
*/
public abstract Map getSwitches();
/**
* Append a switch to the command line. There is no guarantee
* this action happens before the switch is needed.
* @param switchString the switch to add. It should NOT start with '--' !
*/
public abstract void appendSwitch(String switchString);
/**
* Append a switch and value to the command line. There is no
* guarantee this action happens before the switch is needed.
* @param switchString the switch to add. It should NOT start with '--' !
* @param value the value for this switch.
* For example, --foo=bar becomes 'foo', 'bar'.
*/
public abstract void appendSwitchWithValue(String switchString, String value);
/**
* Append switch/value items in "command line" format (excluding argv[0] program name).
* E.g. { '--gofast', '--username=fred' }
* @param array an array of switch or switch/value items in command line format.
* Unlike the other append routines, these switches SHOULD start with '--' .
* Unlike init(), this does not include the program name in array[0].
*/
public abstract void appendSwitchesAndArguments(String[] array);
/**
* Remove the switch from the command line. If no such switch is present, this has no effect.
* @param switchString The switch key to lookup. It should NOT start with '--' !
*/
public abstract void removeSwitch(String switchString);
/**
* Determine if the command line is bound to the native (JNI) implementation.
* @return true if the underlying implementation is delegating to the native command line.
*/
public boolean isNativeImplementation() {
return false;
}
/**
* Returns the switches and arguments passed into the program, with switches and their
* values coming before all of the arguments.
*/
protected abstract String[] getCommandLineArguments();
/**
* Destroy the command line. Called when a different instance is set.
* @see #setInstance
*/
protected void destroy() {}
private static final AtomicReference<CommandLine> sCommandLine =
new AtomicReference<CommandLine>();
/**
* @return true if the command line has already been initialized.
*/
public static boolean isInitialized() {
return sCommandLine.get() != null;
}
// Equivalent to CommandLine::ForCurrentProcess in C++.
public static CommandLine getInstance() {
CommandLine commandLine = sCommandLine.get();
assert commandLine != null;
return commandLine;
}
/**
* Initialize the singleton instance, must be called exactly once (either directly or
* via one of the convenience wrappers below) before using the static singleton instance.
* @param args command line flags in 'argv' format: args[0] is the program name.
*/
public static void init(@Nullable String[] args) {
setInstance(new JavaCommandLine(args));
}
/**
* Initialize the command line from the command-line file.
*
* @param file The fully qualified command line file.
*/
public static void initFromFile(String file) {
char[] buffer = readFileAsUtf8(file);
init(buffer == null ? null : tokenizeQuotedArguments(buffer));
}
/**
* Resets both the java proxy and the native command lines. This allows the entire
* command line initialization to be re-run including the call to onJniLoaded.
*/
@VisibleForTesting
public static void reset() {
setInstance(null);
}
/**
* Parse command line flags from a flat buffer, supporting double-quote enclosed strings
* containing whitespace. argv elements are derived by splitting the buffer on whitepace;
* double quote characters may enclose tokens containing whitespace; a double-quote literal
* may be escaped with back-slash. (Otherwise backslash is taken as a literal).
* @param buffer A command line in command line file format as described above.
* @return the tokenized arguments, suitable for passing to init().
*/
@VisibleForTesting
static String[] tokenizeQuotedArguments(char[] buffer) {
// Just field trials can take up to 10K of command line.
if (buffer.length > 64 * 1024) {
// Check that our test runners are setting a reasonable number of flags.
throw new RuntimeException("Flags file too big: " + buffer.length);
}
ArrayList<String> args = new ArrayList<String>();
StringBuilder arg = null;
final char noQuote = '\0';
final char singleQuote = '\'';
final char doubleQuote = '"';
char currentQuote = noQuote;
for (char c : buffer) {
// Detect start or end of quote block.
if ((currentQuote == noQuote && (c == singleQuote || c == doubleQuote))
|| c == currentQuote) {
if (arg != null && arg.length() > 0 && arg.charAt(arg.length() - 1) == '\\') {
// Last char was a backslash; pop it, and treat c as a literal.
arg.setCharAt(arg.length() - 1, c);
} else {
currentQuote = currentQuote == noQuote ? c : noQuote;
}
} else if (currentQuote == noQuote && Character.isWhitespace(c)) {
if (arg != null) {
args.add(arg.toString());
arg = null;
}
} else {
if (arg == null) arg = new StringBuilder();
arg.append(c);
}
}
if (arg != null) {
if (currentQuote != noQuote) {
Log.w(TAG, "Unterminated quoted string: " + arg);
}
args.add(arg.toString());
}
return args.toArray(new String[args.size()]);
}
private static final String TAG = "CommandLine";
private static final String SWITCH_PREFIX = "--";
private static final String SWITCH_TERMINATOR = SWITCH_PREFIX;
private static final String SWITCH_VALUE_SEPARATOR = "=";
public static void enableNativeProxy() {
// Make a best-effort to ensure we make a clean (atomic) switch over from the old to
// the new command line implementation. If another thread is modifying the command line
// when this happens, all bets are off. (As per the native CommandLine).
sCommandLine.set(new NativeCommandLine(getJavaSwitchesOrNull()));
}
@Nullable
public static String[] getJavaSwitchesOrNull() {
CommandLine commandLine = sCommandLine.get();
if (commandLine != null) {
return commandLine.getCommandLineArguments();
}
return null;
}
private static void setInstance(CommandLine commandLine) {
CommandLine oldCommandLine = sCommandLine.getAndSet(commandLine);
if (oldCommandLine != null) {
oldCommandLine.destroy();
}
}
/**
* Set {@link CommandLine} for testing.
* @param commandLine The {@link CommandLine} to use.
*/
@VisibleForTesting
public static void setInstanceForTesting(CommandLine commandLine) {
setInstance(commandLine);
}
/**
* @param fileName the file to read in.
* @return Array of chars read from the file, or null if the file cannot be read.
*/
private static char[] readFileAsUtf8(String fileName) {
File f = new File(fileName);
try (FileReader reader = new FileReader(f)) {
char[] buffer = new char[(int) f.length()];
int charsRead = reader.read(buffer);
// charsRead < f.length() in the case of multibyte characters.
return Arrays.copyOfRange(buffer, 0, charsRead);
} catch (IOException e) {
return null; // Most likely file not found.
}
}
private CommandLine() {}
private static class JavaCommandLine extends CommandLine {
private HashMap<String, String> mSwitches = new HashMap<String, String>();
private ArrayList<String> mArgs = new ArrayList<String>();
// The arguments begin at index 1, since index 0 contains the executable name.
private int mArgsBegin = 1;
JavaCommandLine(@Nullable String[] args) {
if (args == null || args.length == 0 || args[0] == null) {
mArgs.add("");
} else {
mArgs.add(args[0]);
appendSwitchesInternal(args, 1);
}
// Invariant: we always have the argv[0] program name element.
assert mArgs.size() > 0;
}
@Override
protected String[] getCommandLineArguments() {
return mArgs.toArray(new String[mArgs.size()]);
}
@Override
public boolean hasSwitch(String switchString) {
return mSwitches.containsKey(switchString);
}
@Override
public String getSwitchValue(String switchString) {
// This is slightly round about, but needed for consistency with the NativeCommandLine
// version which does not distinguish empty values from key not present.
String value = mSwitches.get(switchString);
return value == null || value.isEmpty() ? null : value;
}
@Override
public Map<String, String> getSwitches() {
return new HashMap<>(mSwitches);
}
@Override
public void appendSwitch(String switchString) {
appendSwitchWithValue(switchString, null);
}
/**
* Appends a switch to the current list.
* @param switchString the switch to add. It should NOT start with '--' !
* @param value the value for this switch.
*/
@Override
public void appendSwitchWithValue(String switchString, String value) {
mSwitches.put(switchString, value == null ? "" : value);
// Append the switch and update the switches/arguments divider mArgsBegin.
String combinedSwitchString = SWITCH_PREFIX + switchString;
if (value != null && !value.isEmpty()) {
combinedSwitchString += SWITCH_VALUE_SEPARATOR + value;
}
mArgs.add(mArgsBegin++, combinedSwitchString);
}
@Override
public void appendSwitchesAndArguments(String[] array) {
appendSwitchesInternal(array, 0);
}
// Add the specified arguments, but skipping the first |skipCount| elements.
private void appendSwitchesInternal(String[] array, int skipCount) {
boolean parseSwitches = true;
for (String arg : array) {
if (skipCount > 0) {
--skipCount;
continue;
}
if (arg.equals(SWITCH_TERMINATOR)) {
parseSwitches = false;
}
if (parseSwitches && arg.startsWith(SWITCH_PREFIX)) {
String[] parts = arg.split(SWITCH_VALUE_SEPARATOR, 2);
String value = parts.length > 1 ? parts[1] : null;
appendSwitchWithValue(parts[0].substring(SWITCH_PREFIX.length()), value);
} else {
mArgs.add(arg);
}
}
}
@Override
public void removeSwitch(String switchString) {
mSwitches.remove(switchString);
String combinedSwitchString = SWITCH_PREFIX + switchString;
// Since we permit a switch to be added multiple times, we need to remove all instances
// from mArgs.
for (int i = mArgsBegin - 1; i > 0; i--) {
if (mArgs.get(i).equals(combinedSwitchString)
|| mArgs.get(i).startsWith(combinedSwitchString + SWITCH_VALUE_SEPARATOR)) {
--mArgsBegin;
mArgs.remove(i);
}
}
}
}
private static class NativeCommandLine extends CommandLine {
public NativeCommandLine(@Nullable String[] args) {
CommandLineJni.get().init(args);
}
@Override
public boolean hasSwitch(String switchString) {
return CommandLineJni.get().hasSwitch(switchString);
}
@Override
public String getSwitchValue(String switchString) {
return CommandLineJni.get().getSwitchValue(switchString);
}
@Override
public Map<String, String> getSwitches() {
HashMap<String, String> switches = new HashMap<String, String>();
// Iterate 2 array members at a time. JNI doesn't support returning Maps, but because
// key & value are both Strings, we can join them into a flattened String array:
// [ key1, value1, key2, value2, ... ]
String[] keysAndValues = CommandLineJni.get().getSwitchesFlattened();
assert keysAndValues.length % 2 == 0 : "must have same number of keys and values";
for (int i = 0; i < keysAndValues.length; i += 2) {
String key = keysAndValues[i];
String value = keysAndValues[i + 1];
switches.put(key, value);
}
return switches;
}
@Override
public void appendSwitch(String switchString) {
CommandLineJni.get().appendSwitch(switchString);
}
@Override
public void appendSwitchWithValue(String switchString, String value) {
CommandLineJni.get().appendSwitchWithValue(switchString, value == null ? "" : value);
}
@Override
public void appendSwitchesAndArguments(String[] array) {
CommandLineJni.get().appendSwitchesAndArguments(array);
}
@Override
public void removeSwitch(String switchString) {
CommandLineJni.get().removeSwitch(switchString);
}
@Override
public boolean isNativeImplementation() {
return true;
}
@Override
protected String[] getCommandLineArguments() {
assert false;
return null;
}
@Override
protected void destroy() {
// TODO(https://crbug.com/771205): Downgrade this to an assert once we have eliminated
// tests that do this.
throw new IllegalStateException("Can't destroy native command line after startup");
}
}
@NativeMethods
interface Natives {
void init(String[] args);
boolean hasSwitch(String switchString);
String getSwitchValue(String switchString);
String[] getSwitchesFlattened();
void appendSwitch(String switchString);
void appendSwitchWithValue(String switchString, String value);
void appendSwitchesAndArguments(String[] array);
void removeSwitch(String switchString);
}
}
| |
/*
* Copyright 2005 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.dwr.ticketcenter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import jsx3.GI;
import jsx3.app.Server;
import jsx3.gui.Form;
import jsx3.gui.LayoutGrid;
import jsx3.gui.Matrix;
import jsx3.gui.Select;
import jsx3.gui.TextBox;
import jsx3.xml.CdfDocument;
import jsx3.xml.Record;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.directwebremoting.Browser;
import org.directwebremoting.ScriptSession;
import org.directwebremoting.ScriptSessions;
import org.directwebremoting.ServerContextFactory;
import org.directwebremoting.WebContextFactory;
import org.directwebremoting.impl.DaemonThreadFactory;
import org.directwebremoting.ui.browser.Window;
import com.example.dwr.people.RandomData;
/**
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public class CallCenter implements Runnable
{
/**
* Create a new publish thread and start it
*/
public CallCenter()
{
// Start with some calls waiting
addRandomKnownCall();
addRandomUnknownCall();
addRandomUnknownCall();
addRandomUnknownCall();
ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1, new DaemonThreadFactory());
//noinspection ThisEscapedInObjectConstruction
executor.scheduleAtFixedRate(this, 2, 2, TimeUnit.SECONDS);
}
/**
* Called once every couple of seconds to take some random action
*/
public void run()
{
try
{
synchronized (calls)
{
switch (random.nextInt(5))
{
case 0:
case 1:
addRandomUnknownCall();
break;
case 2:
addRandomKnownCall();
break;
case 3:
removeRandomCall();
break;
default:
break;
}
updateAll();
}
}
catch (Exception ex)
{
log.warn("Random event failure", ex);
}
}
/**
* Called when the page first loads to ensure we have an up-to-date screen
*/
public void load()
{
deselect();
update();
}
/**
*
*/
public void alertSupervisor(Call fromWeb)
{
// This is the ScriptSession of the agent that wishes to alert a supervisor
ScriptSession session = WebContextFactory.get().getScriptSession();
// We store the ID of the call we are working on in the ScriptSession
Object handlingId = session.getAttribute("handlingId");
if (handlingId == null)
{
Window.alert("No call found");
return;
}
synchronized (calls)
{
// Check to see that the caller has not hung up since the last update
Call call = findCaller(handlingId);
if (call == null)
{
Window.alert("That caller hung up, please select another");
return;
}
// The user isn't handling this call any more
session.removeAttribute("handlingId");
// Update the server details from those passed in
call.setName(fromWeb.getName());
call.setAddress(fromWeb.getAddress());
call.setNotes(fromWeb.getNotes());
call.setHandlerId(null);
call.setSupervisorAlert(true);
// Update the screen of the current user
deselect();
// Update everyone else's screen
updateAll();
}
}
/**
*
*/
public void completeHandling(Call fromWeb)
{
ScriptSession session = WebContextFactory.get().getScriptSession();
Object handlingId = session.getAttribute("handlingId");
if (handlingId == null)
{
Window.alert("No call found");
return;
}
synchronized (calls)
{
Call call = findCaller(handlingId);
if (call == null)
{
Window.alert("That caller hung up, please select another");
return;
}
session.removeAttribute("handlingId");
calls.remove(call);
log.debug("Properly we should book a ticket for " + fromWeb.getPhoneNumber());
deselect();
updateAll();
}
}
/**
*
*/
public void cancelHandling()
{
ScriptSession session = WebContextFactory.get().getScriptSession();
Object handlingId = session.getAttribute("handlingId");
if (handlingId == null)
{
Window.alert("That caller hung up, please select another");
return;
}
synchronized (calls)
{
Call call = findCaller(handlingId);
if (call == null)
{
log.debug("Cancel handling of call that hung up");
return;
}
session.removeAttribute("handlingId");
call.setHandlerId(null);
deselect();
updateAll();
}
}
/**
*
*/
public void beginHandling(String id)
{
ScriptSession session = WebContextFactory.get().getScriptSession();
Object handlingId = session.getAttribute("handlingId");
if (handlingId != null)
{
Window.alert("Please finish handling the current call before selecting another");
return;
}
synchronized (calls)
{
Call call = findCaller(id);
if (call == null)
{
log.debug("Caller not found: " + id);
Window.alert("That caller hung up, please select another");
}
else
{
if (call.getHandlerId() != null)
{
Window.alert("That call is being handled, please select another");
return;
}
session.setAttribute("handlingId", id);
call.setHandlerId(session.getId());
select(call);
updateAll();
}
}
}
/**
*
*/
private Call findCaller(Object attribute)
{
try
{
int id = Integer.parseInt(attribute.toString());
// We could optimize this, but since there are less than 20 people
// in the queue ...
for (Call call : calls)
{
if (call.getId() == id)
{
return call;
}
}
return null;
}
catch (NumberFormatException ex)
{
log.warn("Illegal number format: " + attribute.toString(), ex);
return null;
}
}
/**
*
*/
protected void removeRandomCall()
{
if (!calls.isEmpty())
{
synchronized (calls)
{
int toDelete = random.nextInt(calls.size());
Call removed = calls.remove(toDelete);
String sessionId = removed.getHandlerId();
if (sessionId != null)
{
Browser.withSession(sessionId, new Runnable()
{
public void run()
{
ScriptSessions.removeAttribute("handlingId");
Window.alert("It appears that this caller has hung up. Please select another.");
deselect();
}
});
}
updateAll();
}
// log.info("Random Event: Caller hangs up: " + removed.getPhoneNumber());
}
}
/**
*
*/
protected void addRandomKnownCall()
{
if (calls.size() < 10)
{
Call call = new Call();
call.setId(getNextId());
call.setName(RandomData.getFullName());
String[] addressAndNumber = RandomData.getAddressAndNumber();
call.setAddress(addressAndNumber[0]);
call.setPhoneNumber(addressAndNumber[1]);
calls.add(call);
// log.info("Random Event: New caller: " + call.getName());
}
}
/**
*
*/
protected void addRandomUnknownCall()
{
if (calls.size() < 10)
{
String phoneNumber = RandomData.getPhoneNumber(random.nextInt(3) != 0);
Call call = new Call();
call.setPhoneNumber(phoneNumber);
call.setId(getNextId());
calls.add(call);
// log.info("Random Event: New caller: " + call.getPhoneNumber());
}
}
/**
*
*/
protected void updateAll()
{
String contextPath = ServerContextFactory.get().getContextPath();
if (contextPath == null)
{
return;
}
String page = contextPath + "/gi/ticketcenter.html";
Browser.withPage(page, new Runnable()
{
public void run()
{
update();
}
});
}
/**
*
*/
protected void update()
{
// Populate a CDF document with data about our calls
CdfDocument cdfdoc = new CdfDocument("jsxroot");
for (Call call : calls)
{
cdfdoc.appendRecord(new Record(call));
}
// Put the CDF doc into the client side cache, and repaint the table
Server tc = GI.getServer("ticketcenter");
tc.getCache().setDocument("callers", cdfdoc);
tc.getJSXByName("listCallers", Matrix.class).repaint(null);
}
/**
*
*/
private void select(Call call)
{
Server ticketcenter = GI.getServer("ticketcenter");
ticketcenter.getJSXByName("textPhone", TextBox.class).setValue(call.getPhoneNumber());
ticketcenter.getJSXByName("textName", TextBox.class).setValue(call.getName());
ticketcenter.getJSXByName("textNotes", TextBox.class).setValue(call.getNotes());
ticketcenter.getJSXByName("selectEvent", Select.class).setValue(null);
setFormEnabled(true);
}
/**
* Set the form to show no caller
*/
private void deselect()
{
Server ticketcenter = GI.getServer("ticketcenter");
ticketcenter.getJSXByName("textPhone", TextBox.class).setValue("");
ticketcenter.getJSXByName("textName", TextBox.class).setValue("");
ticketcenter.getJSXByName("textNotes", TextBox.class).setValue("");
ticketcenter.getJSXByName("selectEvent", Select.class).setValue(null);
setFormEnabled(false);
}
/**
* Disable all the elements in the form
* @param enabled True to enable the elements/false ...
*/
private void setFormEnabled(boolean enabled)
{
int state = enabled ? Form.STATEENABLED : Form.STATEDISABLED;
Server ticketcenter = GI.getServer("ticketcenter");
for (String element : ELEMENTS)
{
ticketcenter.getJSXByName(element, TextBox.class).setEnabled(state, true);
}
LayoutGrid layoutForm = ticketcenter.getJSXByName("layoutForm", LayoutGrid.class);
layoutForm.setBackgroundColor(enabled ? "#FFF" : "#EEE", true);
}
/**
* The form fields that we enable and disable
*/
private static final String[] ELEMENTS = new String[]
{
"textPhone", "textName", "textAddress", "textPayment", "textNotes",
"selectEvent", "selectPaymentType", "buttonBook", "buttonSupervisor",
"buttonCancel"
};
/**
* Get the next unique ID in a thread safe way
* @return a unique id
*/
public static synchronized int getNextId()
{
return nextId++;
}
/**
* The next ID, to get around serialization issues
*/
private static int nextId = 1;
/**
* The set of people in our database
*/
private final List<Call> calls = Collections.synchronizedList(new ArrayList<Call>());
/**
* Used to generate random data
*/
private final Random random = new Random();
/**
* The log stream
*/
private static final Log log = LogFactory.getLog(CallCenter.class);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.step.branch;
import org.apache.tinkerpop.gremlin.LoadGraphWith;
import org.apache.tinkerpop.gremlin.process.AbstractGremlinProcessTest;
import org.apache.tinkerpop.gremlin.process.GremlinProcessRunner;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.Pick;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.MapHelper;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.MODERN;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.hasLabel;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.identity;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.in;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.label;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.out;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.outE;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.valueMap;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.values;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
* @author Joshua Shinavier (http://fortytwo.net)
*/
@RunWith(GremlinProcessRunner.class)
public abstract class ChooseTest extends AbstractGremlinProcessTest {
public abstract Traversal<Vertex, Object> get_g_V_chooseXout_countX_optionX2L_nameX_optionX3L_ageX();
public abstract Traversal<Vertex, String> get_g_V_chooseXlabel_eqXpersonX__outXknowsX__inXcreatedXX_name();
public abstract Traversal<Vertex, String> get_g_V_chooseXhasLabelXpersonX_and_outXcreatedX__outXknowsX__identityX_name();
public abstract Traversal<Vertex, String> get_g_V_chooseXlabelX_optionXblah__outXknowsXX_optionXbleep__outXcreatedXX_optionXnone__identityX_name();
public abstract Traversal<Vertex, String> get_g_V_chooseXoutXknowsX_count_isXgtX0XX__outXknowsXX_name();
public abstract Traversal<Vertex, Map<String, String>> get_g_V_hasLabelXpersonX_asXp1X_chooseXoutEXknowsX__outXknowsXX_asXp2X_selectXp1_p2X_byXnameX();
public abstract Traversal<Vertex, Map<String, Long>> get_g_V_hasLabelXpersonX_chooseXageX__optionX27L__constantXyoungXX_optionXnone__constantXoldXX_groupCount();
public abstract Traversal<Integer, List<Integer>> get_g_injectX1X_chooseXisX1X__constantX10Xfold__foldX();
public abstract Traversal<Integer, List<Integer>> get_g_injectX2X_chooseXisX1X__constantX10Xfold__foldX();
@Test
@LoadGraphWith(MODERN)
public void g_V_chooseXout_countX_optionX2L_nameX_optionX3L_ageX() {
final Traversal<Vertex, Object> traversal = get_g_V_chooseXout_countX_optionX2L_nameX_optionX3L_ageX();
printTraversalForm(traversal);
final Map<String, Long> counts = new HashMap<>();
int counter = 0;
while (traversal.hasNext()) {
MapHelper.incr(counts, traversal.next().toString(), 1L);
counter++;
}
assertFalse(traversal.hasNext());
assertEquals(2, counter);
assertEquals(2, counts.size());
assertEquals(Long.valueOf(1), counts.get("29"));
assertEquals(Long.valueOf(1), counts.get("josh"));
}
@Test
@LoadGraphWith(MODERN)
public void g_V_chooseXlabel_eqXpersonX__outXknowsX__inXcreatedXX_name() {
final Traversal<Vertex, String> traversal = get_g_V_chooseXlabel_eqXpersonX__outXknowsX__inXcreatedXX_name();
printTraversalForm(traversal);
checkResults(Arrays.asList("josh", "vadas", "josh", "josh", "marko", "peter"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_chooseXhasLabelXpersonX_and_outXcreatedX__outXknowsX__identityX_name() {
final Traversal<Vertex, String> traversal = get_g_V_chooseXhasLabelXpersonX_and_outXcreatedX__outXknowsX__identityX_name();
printTraversalForm(traversal);
checkResults(Arrays.asList("lop", "ripple", "josh", "vadas", "vadas"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_chooseXlabelX_optionXblah__outXknowsXX_optionXbleep__outXcreatedXX_optionXnone__identityX_name() {
final Traversal<Vertex, String> traversal = get_g_V_chooseXlabelX_optionXblah__outXknowsXX_optionXbleep__outXcreatedXX_optionXnone__identityX_name();
printTraversalForm(traversal);
checkResults(Arrays.asList("marko", "vadas", "peter", "josh", "lop", "ripple"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_chooseXoutXknowsX_count_isXgtX0XX__outXknowsXX_name() {
final Traversal<Vertex, String> traversal = get_g_V_chooseXoutXknowsX_count_isXgtX0XX__outXknowsXX_name();
printTraversalForm(traversal);
checkResults(Arrays.asList("vadas", "josh", "vadas", "josh", "peter", "lop", "ripple"), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_asXp1X_chooseXoutEXknowsX__outXknowsXX_asXp2X_selectXp1_p2X_byXnameX() {
final Traversal<Vertex, Map<String, String>> traversal = get_g_V_hasLabelXpersonX_asXp1X_chooseXoutEXknowsX__outXknowsXX_asXp2X_selectXp1_p2X_byXnameX();
printTraversalForm(traversal);
checkResults(makeMapList(2,
"p1", "marko", "p2", "vadas",
"p1", "marko", "p2", "josh",
"p1", "vadas", "p2", "vadas",
"p1", "josh", "p2", "josh",
"p1", "peter", "p2", "peter"
), traversal);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_hasLabelXpersonX_chooseXageX__optionX27L__constantXyoungXX_optionXnone__constantXoldXX_groupCount() {
final Traversal<Vertex, Map<String, Long>> traversal = get_g_V_hasLabelXpersonX_chooseXageX__optionX27L__constantXyoungXX_optionXnone__constantXoldXX_groupCount();
printTraversalForm(traversal);
final Map<String, Long> expected = new HashMap<>(2);
expected.put("young", 1L);
expected.put("old", 3L);
assertTrue(traversal.hasNext());
checkMap(expected, traversal.next());
assertFalse(traversal.hasNext());
}
@Test
public void g_injectX1X_chooseXisX1X__constantX10Xfold__foldX() {
final Traversal<Integer, List<Integer>> traversal = get_g_injectX1X_chooseXisX1X__constantX10Xfold__foldX();
printTraversalForm(traversal);
final List expected = new ArrayList() {{ add(10);}};
assertEquals(expected, traversal.next());
assertThat(traversal.hasNext(), is(false));
}
@Test
public void g_injectX2X_chooseXisX1X__constantX10Xfold__foldX() {
final Traversal<Integer, List<Integer>> traversal = get_g_injectX2X_chooseXisX1X__constantX10Xfold__foldX();
printTraversalForm(traversal);
final List expected = new ArrayList() {{ add(2);}};
assertEquals(expected, traversal.next());
assertThat(traversal.hasNext(), is(false));
}
public static class Traversals extends ChooseTest {
@Override
public Traversal<Vertex, Object> get_g_V_chooseXout_countX_optionX2L_nameX_optionX3L_ageX() {
return g.V().choose(out().count())
.option(2L, values("name"))
.option(3L, values("age"));
}
@Override
public Traversal<Vertex, String> get_g_V_chooseXlabel_eqXpersonX__outXknowsX__inXcreatedXX_name() {
return g.V().choose(v -> v.label().equals("person"), out("knows"), in("created")).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_chooseXhasLabelXpersonX_and_outXcreatedX__outXknowsX__identityX_name() {
return g.V().choose(hasLabel("person").and().out("created"), out("knows"), identity()).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_chooseXlabelX_optionXblah__outXknowsXX_optionXbleep__outXcreatedXX_optionXnone__identityX_name() {
return g.V().choose(label())
.option("blah", out("knows"))
.option("bleep", out("created"))
.option(Pick.none, identity()).values("name");
}
@Override
public Traversal<Vertex, String> get_g_V_chooseXoutXknowsX_count_isXgtX0XX__outXknowsXX_name() {
return g.V().choose(out("knows").count().is(P.gt(0)), out("knows")).values("name");
}
@Override
public Traversal<Vertex, Map<String, String>> get_g_V_hasLabelXpersonX_asXp1X_chooseXoutEXknowsX__outXknowsXX_asXp2X_selectXp1_p2X_byXnameX() {
return g.V().hasLabel("person").as("p1").choose(outE("knows"), out("knows")).as("p2").<String>select("p1", "p2").by("name");
}
@Override
public Traversal<Vertex, Map<String, Long>> get_g_V_hasLabelXpersonX_chooseXageX__optionX27L__constantXyoungXX_optionXnone__constantXoldXX_groupCount() {
return g.V().hasLabel("person").choose(values("age"))
.option(27L, __.constant("young"))
.option(Pick.none, __.constant("old"))
.groupCount();
}
@Override
public Traversal<Integer, List<Integer>> get_g_injectX1X_chooseXisX1X__constantX10Xfold__foldX() {
return g.inject(1).choose(__.is(1), __.constant(10).fold(), __.fold());
}
@Override
public Traversal<Integer, List<Integer>> get_g_injectX2X_chooseXisX1X__constantX10Xfold__foldX() {
return g.inject(2).choose(__.is(1), __.constant(10).fold(), __.fold());
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.env.python.debug;
import com.google.common.collect.Sets;
import com.intellij.execution.*;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.configurations.RunProfile;
import com.intellij.execution.executors.DefaultDebugExecutor;
import com.intellij.execution.process.KillableColoredProcessHandler;
import com.intellij.execution.process.ProcessAdapter;
import com.intellij.execution.process.ProcessEvent;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.xdebugger.*;
import com.jetbrains.env.python.PythonDebuggerTest;
import com.jetbrains.python.debugger.PyDebugProcess;
import com.jetbrains.python.debugger.PyDebugRunner;
import com.jetbrains.python.run.PythonCommandLineState;
import com.jetbrains.python.run.PythonConfigurationType;
import com.jetbrains.python.run.PythonRunConfiguration;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Assert;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.ServerSocket;
import java.util.Set;
import java.util.concurrent.Semaphore;
/**
* @author traff
*/
public class PyDebuggerTask extends PyBaseDebuggerTask {
private boolean myMultiprocessDebug = false;
protected PythonRunConfiguration myRunConfiguration;
private boolean myWaitForTermination = true;
public PyDebuggerTask(@Nullable final String relativeTestDataPath, String scriptName, String scriptParameters) {
super(relativeTestDataPath);
setScriptName(scriptName);
setScriptParameters(scriptParameters);
init();
}
public PyDebuggerTask(@Nullable final String relativeTestDataPath, String scriptName) {
this(relativeTestDataPath, scriptName, null);
}
protected void init() {
}
@Nullable
@Override
public Set<String> getTagsToCover() {
return Sets.newHashSet("python2.6", "python2.7", "python3.5", "python3.6", "jython", "IronPython", "pypy");
}
public void runTestOn(String sdkHome) throws Exception {
final Project project = getProject();
final ConfigurationFactory factory = PythonConfigurationType.getInstance().getConfigurationFactories()[0];
final RunnerAndConfigurationSettings settings =
RunManager.getInstance(project).createRunConfiguration("test", factory);
myRunConfiguration = (PythonRunConfiguration)settings.getConfiguration();
myRunConfiguration.setSdkHome(sdkHome);
myRunConfiguration.setScriptName(getScriptName());
myRunConfiguration.setWorkingDirectory(myFixture.getTempDirPath());
myRunConfiguration.setScriptParameters(getScriptParameters());
new WriteAction() {
@Override
protected void run(@NotNull Result result) throws Throwable {
RunManager runManager = RunManager.getInstance(project);
runManager.addConfiguration(settings, false);
runManager.setSelectedConfiguration(settings);
Assert.assertSame(settings, runManager.getSelectedConfiguration());
}
}.execute();
final PyDebugRunner runner = (PyDebugRunner)ProgramRunnerUtil.getRunner(getExecutorId(), settings);
Assert.assertTrue(runner.canRun(getExecutorId(), myRunConfiguration));
final Executor executor = DefaultDebugExecutor.getDebugExecutorInstance();
final ExecutionEnvironment env = new ExecutionEnvironment(executor, runner, settings, project);
final PythonCommandLineState pyState = (PythonCommandLineState)myRunConfiguration.getState(executor, env);
assert pyState != null;
pyState.setMultiprocessDebug(isMultiprocessDebug());
final ServerSocket serverSocket;
try {
//noinspection SocketOpenedButNotSafelyClosed
serverSocket = new ServerSocket(0);
}
catch (IOException e) {
throw new ExecutionException("Failed to find free socket port", e);
}
final int serverLocalPort = serverSocket.getLocalPort();
final RunProfile profile = env.getRunProfile();
PythonDebuggerTest.createExceptionBreak(myFixture, false, false, false); //turn off exception breakpoints by default
before();
setProcessCanTerminate(false);
myTerminateSemaphore = new Semaphore(0);
new WriteAction<ExecutionResult>() {
@Override
protected void run(@NotNull Result<ExecutionResult> result) throws Throwable {
myExecutionResult =
pyState.execute(executor, runner.createCommandLinePatchers(myFixture.getProject(), pyState, profile, serverLocalPort));
mySession = XDebuggerManager.getInstance(getProject()).
startSession(env, new XDebugProcessStarter() {
@NotNull
public XDebugProcess start(@NotNull final XDebugSession session) {
myDebugProcess =
new PyDebugProcess(session, serverSocket, myExecutionResult.getExecutionConsole(), myExecutionResult.getProcessHandler(), isMultiprocessDebug());
StringBuilder output = new StringBuilder();
myDebugProcess.getProcessHandler().addProcessListener(new ProcessAdapter() {
@Override
public void onTextAvailable(ProcessEvent event, Key outputType) {
output.append(event.getText());
}
@Override
public void processTerminated(ProcessEvent event) {
myTerminateSemaphore.release();
if (event.getExitCode() != 0 && !myProcessCanTerminate) {
Assert.fail("Process terminated unexpectedly\n" + output.toString());
}
}
});
myDebugProcess.getProcessHandler().startNotify();
return myDebugProcess;
}
});
result.setResult(myExecutionResult);
}
}.execute().getResultObject();
OutputPrinter myOutputPrinter = null;
if (shouldPrintOutput) {
myOutputPrinter = new OutputPrinter();
myOutputPrinter.start();
}
myPausedSemaphore = new Semaphore(0);
mySession.addSessionListener(new XDebugSessionListener() {
@Override
public void sessionPaused() {
if (myPausedSemaphore != null) {
myPausedSemaphore.release();
}
}
});
doTest(myOutputPrinter);
}
protected String getExecutorId() {
return DefaultDebugExecutor.EXECUTOR_ID;
}
public PythonRunConfiguration getRunConfiguration() {
return myRunConfiguration;
}
private boolean isMultiprocessDebug() {
return myMultiprocessDebug;
}
public void setMultiprocessDebug(boolean multiprocessDebug) {
myMultiprocessDebug = multiprocessDebug;
}
public void setWaitForTermination(boolean waitForTermination) {
myWaitForTermination = waitForTermination;
}
protected void waitForAllThreadsPause() throws InterruptedException, InvocationTargetException {
waitForPause();
Assert.assertTrue(String.format("All threads didn't stop within timeout\n" +
"Output: %s", output()), waitForAllThreads());
XDebuggerTestUtil.waitForSwing();
}
protected boolean waitForAllThreads() throws InterruptedException {
long until = System.currentTimeMillis() + NORMAL_TIMEOUT;
while (System.currentTimeMillis() < until && getRunningThread() != null) {
Thread.sleep(1000);
}
return getRunningThread() == null;
}
@Override
protected void disposeDebugProcess() throws InterruptedException {
if (myDebugProcess != null) {
ProcessHandler processHandler = myDebugProcess.getProcessHandler();
myDebugProcess.stop();
if (myWaitForTermination) {
// for some tests (with infinite loops, for example, it has no sense)
waitFor(processHandler);
}
if (!processHandler.isProcessTerminated()) {
killDebugProcess();
if (!waitFor(processHandler)) {
new Throwable("Cannot stop debugger process").printStackTrace();
}
}
}
}
protected void killDebugProcess() {
if (myDebugProcess.getProcessHandler() instanceof KillableColoredProcessHandler) {
KillableColoredProcessHandler h = (KillableColoredProcessHandler)myDebugProcess.getProcessHandler();
h.killProcess();
}
else {
myDebugProcess.getProcessHandler().destroyProcess();
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.postgresql.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ext.postgresql.PostgreConstants;
import org.jkiss.dbeaver.ext.postgresql.PostgreUtils;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.DBCLogicalOperator;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectCache;
import org.jkiss.dbeaver.model.impl.jdbc.struct.JDBCDataType;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.utils.ArrayUtils;
import org.jkiss.utils.CommonUtils;
import java.sql.SQLException;
import java.sql.Types;
import java.util.*;
/**
* PostgreTypeType
*/
public class PostgreDataType extends JDBCDataType<PostgreSchema> implements PostgreClass, PostgreScriptObject, DBPQualifiedObject, DBPImageProvider
{
private static final Log log = Log.getLog(PostgreDataType.class);
//private static final String CAT_MAIN = "Main";
private static final String CAT_MISC = "Miscellaneous";
private static final String CAT_MODIFIERS = "Modifiers";
private static final String CAT_FUNCTIONS = "Functions";
private static final String CAT_ARRAY = "Array";
private static String[] OID_TYPES = new String[] {
"regproc",
"regprocedure",
"regoper",
"regoperator",
"regnamespace",
"regclass",
"regtype",
"regconfig",
"regdictionary",
};
private final boolean alias;
private long typeId;
private PostgreTypeType typeType;
private PostgreTypeCategory typeCategory;
private DBPDataKind dataKind;
private final long ownerId;
private boolean isByValue;
private boolean isPreferred;
private String arrayDelimiter;
private long classId;
private long elementTypeId;
private long arrayItemTypeId;
private String inputFunc;
private String outputFunc;
private String receiveFunc;
private String sendFunc;
private String modInFunc;
private String modOutFunc;
private String analyzeFunc;
private PostgreTypeAlign align = PostgreTypeAlign.c;
private PostgreTypeStorage storage = PostgreTypeStorage.p;
private boolean isNotNull;
private long baseTypeId;
private int typeMod;
private int arrayDim;
private long collationId;
private String defaultValue;
private String canonicalName;
private String constraintText;
private final AttributeCache attributeCache;
private Object[] enumValues;
public PostgreDataType(@NotNull JDBCSession session, @NotNull PostgreSchema owner, long typeId, int valueType, String name, int length, JDBCResultSet dbResult) throws DBException {
super(owner, valueType, name, null, false, true, length, -1, -1);
this.alias = false;
if (owner.isCatalogSchema()) {
this.canonicalName = PostgreConstants.DATA_TYPE_CANONICAL_NAMES.get(name);
}
this.typeId = typeId;
this.typeType = PostgreTypeType.b;
String typTypeStr = JDBCUtils.safeGetString(dbResult, "typtype");
try {
if (typTypeStr != null && !typTypeStr.isEmpty()) {
this.typeType = PostgreTypeType.valueOf(typTypeStr.toLowerCase(Locale.ENGLISH));
}
} catch (Throwable e) {
log.debug("Invalid type type [" + typTypeStr + "] - " + e.getMessage());
}
this.typeCategory = PostgreTypeCategory.X;
boolean supportsCategory = session.getDataSource().isServerVersionAtLeast(8, 4);
if (supportsCategory) {
String typCategoryStr = JDBCUtils.safeGetString(dbResult, "typcategory");
try {
if (typCategoryStr != null && !typCategoryStr.isEmpty()) {
this.typeCategory = PostgreTypeCategory.valueOf(typCategoryStr.toUpperCase(Locale.ENGLISH));
}
} catch (Throwable e) {
log.debug("Invalid type category [" + typCategoryStr + "] - " + e.getMessage());
}
}
if (typeType == PostgreTypeType.e) {
// Enums are strings
this.dataKind = DBPDataKind.STRING;
} else {
this.dataKind = JDBCDataSource.getDataKind(getName(), valueType);
if (this.dataKind == DBPDataKind.OBJECT) {
if (PostgreConstants.TYPE_JSONB.equals(name) || PostgreConstants.TYPE_JSON.equals(name)) {
this.dataKind = DBPDataKind.CONTENT;
} else if (PostgreConstants.TYPE_INTERVAL.equals(name)) {
this.dataKind = DBPDataKind.DATETIME;
}
}
}
this.ownerId = JDBCUtils.safeGetLong(dbResult, "typowner");
this.isByValue = JDBCUtils.safeGetBoolean(dbResult, "typbyval");
if (getDataSource().isServerVersionAtLeast(8, 4)) {
this.isPreferred = JDBCUtils.safeGetBoolean(dbResult, "typispreferred");
}
this.arrayDelimiter = JDBCUtils.safeGetString(dbResult, "typdelim");
this.classId = JDBCUtils.safeGetLong(dbResult, "typrelid");
this.elementTypeId = JDBCUtils.safeGetLong(dbResult, "typelem");
this.arrayItemTypeId = JDBCUtils.safeGetLong(dbResult, "typarray");
this.inputFunc = JDBCUtils.safeGetString(dbResult, "typinput");
this.outputFunc = JDBCUtils.safeGetString(dbResult, "typoutput");
this.receiveFunc = JDBCUtils.safeGetString(dbResult, "typreceive");
this.sendFunc = JDBCUtils.safeGetString(dbResult, "typsend");
this.modInFunc = JDBCUtils.safeGetString(dbResult, "typmodin");
this.modOutFunc = JDBCUtils.safeGetString(dbResult, "typmodout");
this.analyzeFunc = JDBCUtils.safeGetString(dbResult, "typanalyze");
String typAlignStr = JDBCUtils.safeGetString(dbResult, "typalign");
if (!CommonUtils.isEmpty(typAlignStr)) {
try {
this.align = PostgreTypeAlign.valueOf(typAlignStr);
} catch (Exception e) {
log.debug("Invalid type align [" + typAlignStr + "] - " + e.getMessage());
}
}
String typStorageStr = JDBCUtils.safeGetString(dbResult, "typstorage");
if (!CommonUtils.isEmpty(typStorageStr)) {
try {
this.storage = PostgreTypeStorage.valueOf(typStorageStr);
} catch (Exception e) {
log.debug("Invalid type storage [" + typStorageStr + "] - " + e.getMessage());
}
}
this.isNotNull = JDBCUtils.safeGetBoolean(dbResult, "typnotnull");
this.baseTypeId = JDBCUtils.safeGetLong(dbResult, "typbasetype");
this.typeMod = JDBCUtils.safeGetInt(dbResult, "typtypmod");
this.arrayDim = JDBCUtils.safeGetInt(dbResult, "typndims");
if (getDataSource().getServerType().supportsCollations()) {
this.collationId = JDBCUtils.safeGetLong(dbResult, "typcollation");
}
this.defaultValue = JDBCUtils.safeGetString(dbResult, "typdefault");
this.attributeCache = hasAttributes() ? new AttributeCache() : null;
if (typeCategory == PostgreTypeCategory.E) {
readEnumValues(session);
}
}
PostgreDataType(PostgreDataType realType, String aliasName) {
super(realType.getParentObject(), realType);
setName(aliasName);
this.alias = true;
this.typeId = realType.typeId;
this.typeType = realType.typeType;
this.typeCategory = realType.typeCategory;
this.dataKind = realType.dataKind;
this.ownerId = realType.ownerId;
this.isByValue = realType.isByValue;
this.isPreferred = realType.isPreferred;
this.arrayDelimiter = realType.arrayDelimiter;
this.classId = realType.classId;
this.elementTypeId = realType.elementTypeId;
this.arrayItemTypeId = realType.arrayItemTypeId;
this.inputFunc = realType.inputFunc;
this.outputFunc = realType.outputFunc;
this.receiveFunc = realType.receiveFunc;
this.sendFunc = realType.sendFunc;
this.modInFunc = realType.modInFunc;
this.modOutFunc = realType.modOutFunc;
this.analyzeFunc = realType.analyzeFunc;
this.align = realType.align;
this.storage = realType.storage;
this.isNotNull = realType.isNotNull;
this.baseTypeId = realType.baseTypeId;
this.typeMod = realType.typeMod;
this.arrayDim = realType.arrayDim;
this.collationId = realType.collationId;
this.defaultValue = realType.defaultValue;
this.attributeCache = null;
this.enumValues = null;
}
void resolveValueTypeFromBaseType(DBRProgressMonitor monitor) {
if (baseTypeId > 0) {
PostgreDataType baseType = getBaseType(monitor);
if (baseType == null) {
log.debug("Can't find type '" + getFullTypeName() + "' base type " + baseTypeId);
} else {
if (getTypeID() != baseType.getTypeID()) {
//log.debug(getFullTypeName() + " type ID resolved to " + baseType.getTypeID());
setTypeID(baseType.getTypeID());
}
}
}
}
public boolean isAlias() {
return alias;
}
private void readEnumValues(JDBCSession session) throws DBException {
try (JDBCPreparedStatement dbStat = session.prepareStatement(
"SELECT e.enumlabel \n" +
"FROM pg_catalog.pg_enum e\n" +
"WHERE e.enumtypid=?\n" +
"ORDER BY e.enumsortorder")) {
dbStat.setLong(1, getObjectId());
try (JDBCResultSet rs = dbStat.executeQuery()) {
List<String> values = new ArrayList<>();
while (rs.nextRow()) {
values.add(JDBCUtils.safeGetString(rs, 1));
}
enumValues = values.toArray();
}
} catch (SQLException e) {
throw new DBException("Error reading enum values", e, getDataSource());
}
}
@Override
@Property(viewable = true, order = 1)
public String getName() {
return super.getName();
}
@Override
public String getFullTypeName() {
return super.getFullTypeName();
}
public String getCanonicalName() {
return canonicalName;
}
@NotNull
@Override
public PostgreDataSource getDataSource() {
return (PostgreDataSource) super.getDataSource();
}
@NotNull
@Override
public PostgreDatabase getDatabase() {
return getParentObject().getDatabase();
}
@Override
public DBPDataKind getDataKind()
{
if (dataKind != null) {
return dataKind;
}
return super.getDataKind();
}
@Nullable
@Override
public DBSDataType getComponentType(@NotNull DBRProgressMonitor monitor) throws DBException {
return getElementType(monitor);
}
@Nullable
@Override
public Object geTypeExtension() {
return typeCategory;
}
@Override
@Property(viewable = false, order = 9)
public long getObjectId() {
return typeId;
}
@Property(viewable = true, order = 10)
public PostgreTypeType getTypeType() {
return typeType;
}
@Property(viewable = true, order = 11)
public PostgreTypeCategory getTypeCategory() {
return typeCategory;
}
@Property(viewable = true, optional = true, order = 12)
public PostgreDataType getBaseType(DBRProgressMonitor monitor) {
return getDatabase().getDataType(monitor, baseTypeId);
}
@Property(viewable = true, optional = true, order = 13)
public PostgreDataType getElementType(DBRProgressMonitor monitor) {
return elementTypeId == 0 ? null : getDatabase().getDataType(monitor, elementTypeId);
}
@Property(order = 15)
public PostgreRole getOwner(DBRProgressMonitor monitor) throws DBException {
return getDatabase().getRoleById(monitor, ownerId);
}
@Property(category = CAT_MISC)
public boolean isByValue() {
return isByValue;
}
@Property(category = CAT_MISC)
public boolean isPreferred() {
return isPreferred;
}
@Property(category = CAT_MISC)
public String getDefaultValue() {
return defaultValue;
}
@Property(category = CAT_FUNCTIONS)
public String getInputFunc() {
return inputFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getOutputFunc() {
return outputFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getReceiveFunc() {
return receiveFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getSendFunc() {
return sendFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getModInFunc() {
return modInFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getModOutFunc() {
return modOutFunc;
}
@Property(category = CAT_FUNCTIONS)
public String getAnalyzeFunc() {
return analyzeFunc;
}
@Property(category = CAT_MODIFIERS)
public PostgreTypeAlign getAlign() {
return align;
}
@Property(category = CAT_MODIFIERS)
public PostgreTypeStorage getStorage() {
return storage;
}
@Property(category = CAT_MODIFIERS)
public boolean isNotNull() {
return isNotNull;
}
@Property(category = CAT_MODIFIERS)
public int getTypeMod() {
return typeMod;
}
@Property(category = CAT_MODIFIERS)
public PostgreCollation getCollationId(DBRProgressMonitor monitor) throws DBException {
if (collationId != 0) {
return getDatabase().getCollation(monitor, collationId);
}
return null;
}
@Property(category = CAT_MODIFIERS)
public String getConstraint(DBRProgressMonitor monitor) throws DBException {
if (typeType != PostgreTypeType.d) {
return null;
}
if (constraintText != null) {
return constraintText;
}
try (JDBCSession session = DBUtils.openMetaSession(monitor, this, "Read domain constraint value")) {
this.constraintText = JDBCUtils.queryString(
session,
"SELECT pg_catalog.pg_get_constraintdef((SELECT oid FROM pg_catalog.pg_constraint WHERE contypid = " + getObjectId() + "), true)");
} catch (SQLException e) {
throw new DBCException("Error reading domain constraint value", e, getDataSource());
}
return this.constraintText;
}
@Property(category = CAT_ARRAY)
public String getArrayDelimiter() {
return arrayDelimiter;
}
@Property(category = CAT_ARRAY)
public PostgreDataType getArrayItemType(DBRProgressMonitor monitor) {
return arrayItemTypeId == 0 ? null : getDatabase().getDataType(monitor, arrayItemTypeId);
}
// Plain type
public boolean isPlainType() {
return arrayItemTypeId != 0;
}
@Property(category = CAT_ARRAY)
public int getArrayDim() {
return arrayDim;
}
public boolean hasAttributes() {
return typeType == PostgreTypeType.c && classId >= 0;
}
@NotNull
@Override
public DBSEntityType getEntityType() {
return DBSEntityType.TYPE;
}
@Override
public Collection<PostgreDataTypeAttribute> getAttributes(@NotNull DBRProgressMonitor monitor) throws DBException {
return attributeCache == null ? null : attributeCache.getAllObjects(monitor, this);
}
@Override
public PostgreDataTypeAttribute getAttribute(@NotNull DBRProgressMonitor monitor, @NotNull String attributeName) throws DBException {
return attributeCache == null ? null : attributeCache.getObject(monitor, this, attributeName);
}
@Override
public Collection<? extends DBSEntityConstraint> getConstraints(@NotNull DBRProgressMonitor monitor) throws DBException {
return null;
}
@Override
public Collection<? extends DBSEntityAssociation> getAssociations(@NotNull DBRProgressMonitor monitor) throws DBException {
return null;
}
@Override
public Collection<? extends DBSEntityAssociation> getReferences(@NotNull DBRProgressMonitor monitor) throws DBException {
return null;
}
@NotNull
@Override
public DBCLogicalOperator[] getSupportedOperators(DBSTypedObject attribute) {
if (dataKind == DBPDataKind.STRING) {
if (typeCategory == PostgreTypeCategory.S || typeCategory == PostgreTypeCategory.E || typeCategory == PostgreTypeCategory.X) {
return new DBCLogicalOperator[]{
DBCLogicalOperator.IS_NULL,
DBCLogicalOperator.IS_NOT_NULL,
DBCLogicalOperator.EQUALS,
DBCLogicalOperator.NOT_EQUALS,
DBCLogicalOperator.GREATER,
DBCLogicalOperator.LESS,
DBCLogicalOperator.LIKE,
DBCLogicalOperator.IN,
};
} else {
return new DBCLogicalOperator[] {
DBCLogicalOperator.IS_NULL,
DBCLogicalOperator.IS_NOT_NULL
};
}
}
return super.getSupportedOperators(attribute);
}
@Override
public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException {
if (attributeCache != null) {
attributeCache.clearCache();
}
if (typeCategory == PostgreTypeCategory.E) {
try (JDBCSession session = DBUtils.openMetaSession(monitor, this, "Refresh enum values")) {
readEnumValues(session);
}
}
return this;
}
@Property(viewable = true, optional = true, order = 16)
public Object[] getEnumValues() {
return enumValues;
}
@NotNull
@Override
public String getFullyQualifiedName(DBPEvaluationContext context) {
final PostgreSchema owner = getParentObject();
if (owner == null || owner.getName().equals(PostgreConstants.PUBLIC_SCHEMA_NAME) || owner.getName().equals(PostgreConstants.CATALOG_SCHEMA_NAME)) {
return getName();
} else {
return DBUtils.getQuotedIdentifier(owner) + "." + DBUtils.getQuotedIdentifier(this);
}
}
@Nullable
@Override
public DBPImage getObjectImage() {
if (PostgreConstants.TYPE_JSONB.equals(getName()) || PostgreConstants.TYPE_JSON.equals(getName())) {
return DBIcon.TYPE_JSON;
}
return null;
}
@Override
public String getObjectDefinitionText(DBRProgressMonitor monitor, Map<String, Object> options) throws DBException {
StringBuilder sql = new StringBuilder();
if (typeType == PostgreTypeType.d) {
sql.append("-- DROP DOMAIN ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(";\n\n");
} else {
sql.append("-- DROP TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(";\n\n");
}
switch (typeType) {
case p: {
sql.append("CREATE TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(";");
break;
}
case d: {
sql.append("CREATE DOMAIN ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" AS ").append(getBaseType(monitor).getFullyQualifiedName(DBPEvaluationContext.DDL));
PostgreCollation collation = getCollationId(monitor);
if (collation != null) {
sql.append("\n\tCOLLATE ").append(collation.getName());
}
if (!CommonUtils.isEmpty(defaultValue)) {
sql.append("\n\tDEFAULT ").append(defaultValue);
}
String constraint = getConstraint(monitor);
if (!CommonUtils.isEmpty(constraint)) {
sql.append("\n\tCONSTRAINT ").append(constraint);
}
sql.append(";");
break;
}
case e: {
sql.append("CREATE TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" AS ENUM (\n");
if (enumValues != null) {
for (int i = 0; i < enumValues.length; i++) {
Object item = enumValues[i];
sql.append("\t").append(SQLUtils.quoteString(this, CommonUtils.toString(item)));
if (i < enumValues.length - 1) sql.append(",\n");
}
}
sql.append(");\n");
break;
}
case r: {
sql.append("CREATE TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" AS RANGE (\n");
PostgreCollation collation = getCollationId(monitor);
appendCreateTypeParameter(sql, "COLLATION ", collation.getName());
appendCreateTypeParameter(sql, "CANONICAL", canonicalName);
// TODO: read data from pg_range
// if (!CommonUtils.isEmpty(su)) {
// sql.append("\n\tCOLLATION ").append(canonicalName);
// }
sql.append(");\n");
break;
}
case b: {
sql.append("CREATE TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" (");
if (isValidFuncRef(inputFunc)) appendCreateTypeParameter(sql, "INPUT", inputFunc);
if (isValidFuncRef(outputFunc)) appendCreateTypeParameter(sql, "OUTPUT", outputFunc);
if (isValidFuncRef(receiveFunc)) appendCreateTypeParameter(sql, "RECEIVE", receiveFunc);
if (isValidFuncRef(sendFunc)) appendCreateTypeParameter(sql, "SEND", sendFunc);
if (isValidFuncRef(modInFunc)) appendCreateTypeParameter(sql, "TYPMOD_IN", modInFunc);
if (isValidFuncRef(modOutFunc)) appendCreateTypeParameter(sql, "TYPMOD_OUT", modOutFunc);
if (isValidFuncRef(analyzeFunc)) appendCreateTypeParameter(sql, "ANALYZE", analyzeFunc);
if (getMaxLength() > 0) appendCreateTypeParameter(sql, "INTERNALLENGTH", getMaxLength());
if (isByValue) appendCreateTypeParameter(sql, "PASSEDBYVALUE");
if (align != null && align.getBytes() > 1) appendCreateTypeParameter(sql, "ALIGNMENT", align.getBytes());
if (storage != null) appendCreateTypeParameter(sql, "STORAGE", storage.getName());
if (typeCategory != null) appendCreateTypeParameter(sql, "CATEGORY", typeCategory.name());
if (isPreferred) appendCreateTypeParameter(sql, "PREFERRED", isPreferred);
appendCreateTypeParameter(sql, "DEFAULT", defaultValue);
PostgreDataType elementType = getElementType(monitor);
if (elementType != null) {
appendCreateTypeParameter(sql, "ELEMENT", elementType.getFullyQualifiedName(DBPEvaluationContext.DDL));
}
if (!CommonUtils.isEmpty(arrayDelimiter)) appendCreateTypeParameter(sql, "DELIMITER", SQLUtils.quoteString(getDataSource(), arrayDelimiter));
if (collationId != 0) appendCreateTypeParameter(sql, "COLLATABLE", true);
sql.append(");\n");
break;
}
case c: {
sql.append("CREATE TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" AS (");
Collection<PostgreDataTypeAttribute> attributes = getAttributes(monitor);
if (!CommonUtils.isEmpty(attributes)) {
boolean first = true;
for (PostgreDataTypeAttribute attr : attributes) {
if (!first) sql.append(",");
first = false;
sql.append("\n\t")
.append(DBUtils.getQuotedIdentifier(attr)).append(" ").append(attr.getTypeName());
String modifiers = SQLUtils.getColumnTypeModifiers(getDataSource(), attr, attr.getTypeName(), attr.getDataKind());
if (modifiers != null) sql.append(modifiers);
}
}
sql.append(");\n");
break;
}
default: {
sql.append("-- Data type ").append(getFullyQualifiedName(DBPEvaluationContext.UI)).append(" (").append(typeType.getName()).append(") DDL is not supported\n");
break;
}
}
String description = getDescription();
if (!CommonUtils.isEmpty(description)) {
sql.append("\nCOMMENT ON TYPE ").append(getFullyQualifiedName(DBPEvaluationContext.DDL)).append(" IS ").append(SQLUtils.quoteString(this, description));
}
return sql.toString();
}
private boolean isValidFuncRef(String func) {
return !CommonUtils.isEmpty(func) && !func.equals("-");
}
private void appendCreateTypeParameter(@NotNull StringBuilder sql, @NotNull String name, @Nullable Object value) {
if (value == null) {
return;
}
if (sql.charAt(sql.length() - 1)!= '(') {
sql.append(",");
}
sql.append("\n\t").append(name).append(" = ").append(value);
}
private void appendCreateTypeParameter(@NotNull StringBuilder sql, @NotNull String name) {
if (Character.isLetterOrDigit(sql.charAt(sql.length() - 1))) {
sql.append(",");
}
sql.append("\n\t").append(name);
}
@Override
public void setObjectDefinitionText(String sourceText) throws DBException {
throw new DBException("Not supported");
}
class AttributeCache extends JDBCObjectCache<PostgreDataType, PostgreDataTypeAttribute> {
@NotNull
@Override
protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDataType postgreDataType) throws SQLException {
JDBCPreparedStatement dbStat = session.prepareStatement(
"SELECT c.relname,a.*,ad.oid as attr_id,pg_catalog.pg_get_expr(ad.adbin, ad.adrelid, true) as def_value,dsc.description" +
"\nFROM pg_catalog.pg_attribute a" +
"\nINNER JOIN pg_catalog.pg_class c ON (a.attrelid=c.oid)" +
"\nLEFT OUTER JOIN pg_catalog.pg_attrdef ad ON (a.attrelid=ad.adrelid AND a.attnum = ad.adnum)" +
"\nLEFT OUTER JOIN pg_catalog.pg_description dsc ON (c.oid=dsc.objoid AND a.attnum = dsc.objsubid)" +
"\nWHERE a.attnum > 0 AND NOT a.attisdropped AND c.oid=?" +
"\nORDER BY a.attnum");
dbStat.setLong(1, postgreDataType.classId);
return dbStat;
}
@Override
protected PostgreDataTypeAttribute fetchObject(@NotNull JDBCSession session, @NotNull PostgreDataType postgreDataType, @NotNull JDBCResultSet resultSet) throws SQLException, DBException {
return new PostgreDataTypeAttribute(session.getProgressMonitor(), postgreDataType, resultSet);
}
}
public static PostgreDataType readDataType(@NotNull JDBCSession session, @NotNull PostgreSchema schema, @NotNull JDBCResultSet dbResult, boolean skipTables) throws SQLException, DBException
{
//long schemaId = JDBCUtils.safeGetLong(dbResult, "typnamespace");
long typeId = JDBCUtils.safeGetLong(dbResult, "oid");
String name = JDBCUtils.safeGetString(dbResult, "typname");
if (CommonUtils.isEmpty(name)) {
log.debug("Empty name for data type " + typeId);
return null;
}
if (skipTables) {
String relKind = JDBCUtils.safeGetString(dbResult, "relkind");
if (relKind != null) {
try {
final RelKind tableType = RelKind.valueOf(relKind);
if (tableType != RelKind.c) {
// No a composite data type - skip it
return null;
}
} catch (Exception e) {
log.debug(e.getMessage());
}
}
}
int typeLength = JDBCUtils.safeGetInt(dbResult, "typlen");
PostgreTypeCategory typeCategory;
final String catString =
PostgreUtils.supportsTypeCategory(session.getDataSource()) ? JDBCUtils.safeGetString(dbResult, "typcategory") : null;
if (catString == null) {
typeCategory = null;
} else {
try {
typeCategory = PostgreTypeCategory.valueOf(catString.toUpperCase());
} catch (IllegalArgumentException e) {
log.debug(e);
typeCategory = null;
}
}
int valueType;
if (ArrayUtils.contains(OID_TYPES, name) || name.equals(PostgreConstants.TYPE_HSTORE)) {
valueType = Types.VARCHAR;
} else {
if (typeCategory == null) {
final long typElem = JDBCUtils.safeGetLong(dbResult, "typelem");
// In old PostgreSQL versions
switch ((int) typeId) {
case PostgreOid.BIT:
valueType = Types.BIT;
break;
case PostgreOid.BOOL:
valueType = Types.BOOLEAN;
break;
case PostgreOid.INT2:
valueType = Types.SMALLINT;
break;
case PostgreOid.INT4:
valueType = Types.INTEGER;
break;
case PostgreOid.INT8:
valueType = Types.BIGINT;
break;
case PostgreOid.FLOAT4:
valueType = Types.FLOAT;
break;
case PostgreOid.FLOAT8:
valueType = Types.DOUBLE;
break;
case PostgreOid.NUMERIC:
valueType = Types.NUMERIC;
break;
case PostgreOid.CHAR:
valueType = Types.CHAR;
break;
case PostgreOid.VARCHAR:
valueType = Types.VARCHAR;
break;
case PostgreOid.DATE:
valueType = Types.DATE;
break;
case PostgreOid.TIME:
case PostgreOid.TIMETZ:
valueType = Types.TIME;
break;
case PostgreOid.TIMESTAMP:
case PostgreOid.TIMESTAMPTZ:
valueType = Types.TIMESTAMP;
break;
case PostgreOid.BYTEA:
valueType = Types.BINARY;
break;
case PostgreOid.CHAR_ARRAY:
valueType = Types.CHAR;
break;
case PostgreOid.BPCHAR:
valueType = Types.CHAR;
break;
case PostgreOid.XML:
valueType = Types.SQLXML;
break;
default:
if (typElem > 0) {
valueType = Types.ARRAY;
} else {
valueType = Types.OTHER;
}
break;
}
} else {
switch (typeCategory) {
case A:
valueType = Types.ARRAY;
break;
case P:
valueType = Types.OTHER;
break;
case B:
valueType = Types.BOOLEAN;
break;
case C:
valueType = Types.STRUCT;
break;
case D:
if (typeLength == 4) {
valueType = Types.DATE;
} else {
switch ((int) typeId) {
case PostgreOid.DATE:
valueType = Types.DATE;
break;
case PostgreOid.TIME:
case PostgreOid.TIMETZ:
valueType = Types.TIME;
break;
case PostgreOid.TIMESTAMP:
case PostgreOid.TIMESTAMPTZ:
valueType = Types.TIMESTAMP;
break;
default:
valueType = Types.TIMESTAMP;
break;
}
}
break;
case N:
valueType = Types.NUMERIC;
if (name.equals("numeric")) {
valueType = Types.NUMERIC;
} else if (name.startsWith("float")) {
switch (typeLength) {
case 4:
valueType = Types.FLOAT;
break;
case 8:
valueType = Types.DOUBLE;
break;
}
} else {
switch (typeLength) {
case 2:
valueType = Types.SMALLINT;
break;
case 4:
valueType = Types.INTEGER;
break;
case 8:
valueType = Types.BIGINT;
break;
}
}
break;
case S:
// if (name.equals("text")) {
// valueType = Types.CLOB;
// } else {
valueType = Types.VARCHAR;
// }
break;
case U:
switch (name) {
case "bytea":
valueType = Types.BINARY;
break;
case "xml":
valueType = Types.SQLXML;
break;
default:
valueType = Types.OTHER;
break;
}
break;
case V:
valueType = Types.NUMERIC;
break;
default:
valueType = Types.OTHER;
break;
}
}
}
return new PostgreDataType(
session,
schema,
typeId,
valueType,
name,
typeLength,
dbResult);
}
}
| |
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 2003, 2016. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.util;
import java.util.Map.Entry;
import sun.misc.SharedSecrets;
/**
* A specialized {@link Map} implementation for use with enum type keys. All
* of the keys in an enum map must come from a single enum type that is
* specified, explicitly or implicitly, when the map is created. Enum maps
* are represented internally as arrays. This representation is extremely
* compact and efficient.
*
* <p>Enum maps are maintained in the <i>natural order</i> of their keys
* (the order in which the enum constants are declared). This is reflected
* in the iterators returned by the collections views ({@link #keySet()},
* {@link #entrySet()}, and {@link #values()}).
*
* <p>Iterators returned by the collection views are <i>weakly consistent</i>:
* they will never throw {@link ConcurrentModificationException} and they may
* or may not show the effects of any modifications to the map that occur while
* the iteration is in progress.
*
* <p>Null keys are not permitted. Attempts to insert a null key will
* throw {@link NullPointerException}. Attempts to test for the
* presence of a null key or to remove one will, however, function properly.
* Null values are permitted.
* <P>Like most collection implementations <tt>EnumMap</tt> is not
* synchronized. If multiple threads access an enum map concurrently, and at
* least one of the threads modifies the map, it should be synchronized
* externally. This is typically accomplished by synchronizing on some
* object that naturally encapsulates the enum map. If no such object exists,
* the map should be "wrapped" using the {@link Collections#synchronizedMap}
* method. This is best done at creation time, to prevent accidental
* unsynchronized access:
*
* <pre>
* Map<EnumKey, V> m
* = Collections.synchronizedMap(new EnumMap<EnumKey, V>(...));
* </pre>
*
* <p>Implementation note: All basic operations execute in constant time.
* They are likely (though not guaranteed) to be faster than their
* {@link HashMap} counterparts.
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @author Josh Bloch
* @see EnumSet
* @since 1.5
*/
public class EnumMap<K extends Enum<K>, V> extends AbstractMap<K, V>
implements java.io.Serializable, Cloneable
{
/**
* The <tt>Class</tt> object for the enum type of all the keys of this map.
*
* @serial
*/
private final Class<K> keyType;
/**
* All of the values comprising K. (Cached for performance.)
*/
private transient K[] keyUniverse;
/**
* Array representation of this map. The ith element is the value
* to which universe[i] is currently mapped, or null if it isn't
* mapped to anything, or NULL if it's mapped to null.
*/
private transient Object[] vals;
/**
* The number of mappings in this map.
*/
private transient int size = 0;
/**
* Distinguished non-null value for representing null values.
*/
private static final Object NULL = new Object() {
public int hashCode() {
return 0;
}
public String toString() {
return "java.util.EnumMap.NULL";
}
};
private Object maskNull(Object value) {
return (value == null ? NULL : value);
}
@SuppressWarnings("unchecked")
private V unmaskNull(Object value) {
return (V)(value == NULL ? null : value);
}
private static final Enum<?>[] ZERO_LENGTH_ENUM_ARRAY = new Enum<?>[0];
/**
* Creates an empty enum map with the specified key type.
*
* @param keyType the class object of the key type for this enum map
* @throws NullPointerException if <tt>keyType</tt> is null
*/
public EnumMap(Class<K> keyType) {
this.keyType = keyType;
keyUniverse = getKeyUniverse(keyType);
vals = new Object[keyUniverse.length];
}
/**
* Creates an enum map with the same key type as the specified enum
* map, initially containing the same mappings (if any).
*
* @param m the enum map from which to initialize this enum map
* @throws NullPointerException if <tt>m</tt> is null
*/
public EnumMap(EnumMap<K, ? extends V> m) {
keyType = m.keyType;
keyUniverse = m.keyUniverse;
vals = m.vals.clone();
size = m.size;
}
/**
* Creates an enum map initialized from the specified map. If the
* specified map is an <tt>EnumMap</tt> instance, this constructor behaves
* identically to {@link #EnumMap(EnumMap)}. Otherwise, the specified map
* must contain at least one mapping (in order to determine the new
* enum map's key type).
*
* @param m the map from which to initialize this enum map
* @throws IllegalArgumentException if <tt>m</tt> is not an
* <tt>EnumMap</tt> instance and contains no mappings
* @throws NullPointerException if <tt>m</tt> is null
*/
public EnumMap(Map<K, ? extends V> m) {
if (m instanceof EnumMap) {
EnumMap<K, ? extends V> em = (EnumMap<K, ? extends V>) m;
keyType = em.keyType;
keyUniverse = em.keyUniverse;
vals = em.vals.clone();
size = em.size;
} else {
if (m.isEmpty())
throw new IllegalArgumentException("Specified map is empty");
keyType = m.keySet().iterator().next().getDeclaringClass();
keyUniverse = getKeyUniverse(keyType);
vals = new Object[keyUniverse.length];
putAll(m);
}
}
// Query Operations
/**
* Returns the number of key-value mappings in this map.
*
* @return the number of key-value mappings in this map
*/
public int size() {
return size;
}
/**
* Returns <tt>true</tt> if this map maps one or more keys to the
* specified value.
*
* @param value the value whose presence in this map is to be tested
* @return <tt>true</tt> if this map maps one or more keys to this value
*/
public boolean containsValue(Object value) {
value = maskNull(value);
for (Object val : vals)
if (value.equals(val))
return true;
return false;
}
/**
* Returns <tt>true</tt> if this map contains a mapping for the specified
* key.
*
* @param key the key whose presence in this map is to be tested
* @return <tt>true</tt> if this map contains a mapping for the specified
* key
*/
public boolean containsKey(Object key) {
return isValidKey(key) && vals[((Enum<?>)key).ordinal()] != null;
}
private boolean containsMapping(Object key, Object value) {
return isValidKey(key) &&
maskNull(value).equals(vals[((Enum<?>)key).ordinal()]);
}
/**
* Returns the value to which the specified key is mapped,
* or {@code null} if this map contains no mapping for the key.
*
* <p>More formally, if this map contains a mapping from a key
* {@code k} to a value {@code v} such that {@code (key == k)},
* then this method returns {@code v}; otherwise it returns
* {@code null}. (There can be at most one such mapping.)
*
* <p>A return value of {@code null} does not <i>necessarily</i>
* indicate that the map contains no mapping for the key; it's also
* possible that the map explicitly maps the key to {@code null}.
* The {@link #containsKey containsKey} operation may be used to
* distinguish these two cases.
*/
public V get(Object key) {
return (isValidKey(key) ?
unmaskNull(vals[((Enum<?>)key).ordinal()]) : null);
}
// Modification Operations
/**
* Associates the specified value with the specified key in this map.
* If the map previously contained a mapping for this key, the old
* value is replaced.
*
* @param key the key with which the specified value is to be associated
* @param value the value to be associated with the specified key
*
* @return the previous value associated with specified key, or
* <tt>null</tt> if there was no mapping for key. (A <tt>null</tt>
* return can also indicate that the map previously associated
* <tt>null</tt> with the specified key.)
* @throws NullPointerException if the specified key is null
*/
public V put(K key, V value) {
typeCheck(key);
int index = key.ordinal();
Object oldValue = vals[index];
vals[index] = maskNull(value);
if (oldValue == null)
size++;
return unmaskNull(oldValue);
}
/**
* Removes the mapping for this key from this map if present.
*
* @param key the key whose mapping is to be removed from the map
* @return the previous value associated with specified key, or
* <tt>null</tt> if there was no entry for key. (A <tt>null</tt>
* return can also indicate that the map previously associated
* <tt>null</tt> with the specified key.)
*/
public V remove(Object key) {
if (!isValidKey(key))
return null;
int index = ((Enum<?>)key).ordinal();
Object oldValue = vals[index];
vals[index] = null;
if (oldValue != null)
size--;
return unmaskNull(oldValue);
}
private boolean removeMapping(Object key, Object value) {
if (!isValidKey(key))
return false;
int index = ((Enum<?>)key).ordinal();
if (maskNull(value).equals(vals[index])) {
vals[index] = null;
size--;
return true;
}
return false;
}
/**
* Returns true if key is of the proper type to be a key in this
* enum map.
*/
private boolean isValidKey(Object key) {
if (key == null)
return false;
// Cheaper than instanceof Enum followed by getDeclaringClass
Class<?> keyClass = key.getClass();
return keyClass == keyType || keyClass.getSuperclass() == keyType;
}
// Bulk Operations
/**
* Copies all of the mappings from the specified map to this map.
* These mappings will replace any mappings that this map had for
* any of the keys currently in the specified map.
*
* @param m the mappings to be stored in this map
* @throws NullPointerException the specified map is null, or if
* one or more keys in the specified map are null
*/
public void putAll(Map<? extends K, ? extends V> m) {
if (m instanceof EnumMap) {
EnumMap<?, ?> em = (EnumMap<?, ?>)m;
if (em.keyType != keyType) {
if (em.isEmpty())
return;
throw new ClassCastException(em.keyType + " != " + keyType);
}
for (int i = 0; i < keyUniverse.length; i++) {
Object emValue = em.vals[i];
if (emValue != null) {
if (vals[i] == null)
size++;
vals[i] = emValue;
}
}
} else {
super.putAll(m);
}
}
/**
* Removes all mappings from this map.
*/
public void clear() {
Arrays.fill(vals, null);
size = 0;
}
// Views
/**
* This field is initialized to contain an instance of the entry set
* view the first time this view is requested. The view is stateless,
* so there's no reason to create more than one.
*/
private transient Set<Map.Entry<K,V>> entrySet;
/**
* Returns a {@link Set} view of the keys contained in this map.
* The returned set obeys the general contract outlined in
* {@link Map#keySet()}. The set's iterator will return the keys
* in their natural order (the order in which the enum constants
* are declared).
*
* @return a set view of the keys contained in this enum map
*/
public Set<K> keySet() {
Set<K> ks = keySet;
if (ks == null) {
ks = new KeySet();
keySet = ks;
}
return ks;
}
private class KeySet extends AbstractSet<K> {
public Iterator<K> iterator() {
return new KeyIterator();
}
public int size() {
return size;
}
public boolean contains(Object o) {
return containsKey(o);
}
public boolean remove(Object o) {
int oldSize = size;
EnumMap.this.remove(o);
return size != oldSize;
}
public void clear() {
EnumMap.this.clear();
}
}
/**
* Returns a {@link Collection} view of the values contained in this map.
* The returned collection obeys the general contract outlined in
* {@link Map#values()}. The collection's iterator will return the
* values in the order their corresponding keys appear in map,
* which is their natural order (the order in which the enum constants
* are declared).
*
* @return a collection view of the values contained in this map
*/
public Collection<V> values() {
Collection<V> vs = values;
if (vs == null) {
vs = new Values();
values = vs;
}
return vs;
}
private class Values extends AbstractCollection<V> {
public Iterator<V> iterator() {
return new ValueIterator();
}
public int size() {
return size;
}
public boolean contains(Object o) {
return containsValue(o);
}
public boolean remove(Object o) {
o = maskNull(o);
for (int i = 0; i < vals.length; i++) {
if (o.equals(vals[i])) {
vals[i] = null;
size--;
return true;
}
}
return false;
}
public void clear() {
EnumMap.this.clear();
}
}
/**
* Returns a {@link Set} view of the mappings contained in this map.
* The returned set obeys the general contract outlined in
* {@link Map#keySet()}. The set's iterator will return the
* mappings in the order their keys appear in map, which is their
* natural order (the order in which the enum constants are declared).
*
* @return a set view of the mappings contained in this enum map
*/
public Set<Map.Entry<K,V>> entrySet() {
Set<Map.Entry<K,V>> es = entrySet;
if (es != null)
return es;
else
return entrySet = new EntrySet();
}
private class EntrySet extends AbstractSet<Map.Entry<K,V>> {
public Iterator<Map.Entry<K,V>> iterator() {
return new EntryIterator();
}
public boolean contains(Object o) {
if (!(o instanceof Map.Entry))
return false;
Map.Entry<?,?> entry = (Map.Entry<?,?>)o;
return containsMapping(entry.getKey(), entry.getValue());
}
public boolean remove(Object o) {
if (!(o instanceof Map.Entry))
return false;
Map.Entry<?,?> entry = (Map.Entry<?,?>)o;
return removeMapping(entry.getKey(), entry.getValue());
}
public int size() {
return size;
}
public void clear() {
EnumMap.this.clear();
}
public Object[] toArray() {
return fillEntryArray(new Object[size]);
}
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) {
int size = size();
if (a.length < size)
a = (T[])java.lang.reflect.Array
.newInstance(a.getClass().getComponentType(), size);
if (a.length > size)
a[size] = null;
return (T[]) fillEntryArray(a);
}
private Object[] fillEntryArray(Object[] a) {
int j = 0;
for (int i = 0; i < vals.length; i++)
if (vals[i] != null)
a[j++] = new AbstractMap.SimpleEntry<>(
keyUniverse[i], unmaskNull(vals[i]));
return a;
}
}
private abstract class EnumMapIterator<T> implements Iterator<T> {
// Lower bound on index of next element to return
int index = 0;
// Index of last returned element, or -1 if none
int lastReturnedIndex = -1;
public boolean hasNext() {
while (index < vals.length && vals[index] == null)
index++;
return index != vals.length;
}
public void remove() {
checkLastReturnedIndex();
if (vals[lastReturnedIndex] != null) {
vals[lastReturnedIndex] = null;
size--;
}
lastReturnedIndex = -1;
}
private void checkLastReturnedIndex() {
if (lastReturnedIndex < 0)
throw new IllegalStateException();
}
}
private class KeyIterator extends EnumMapIterator<K> {
public K next() {
if (!hasNext())
throw new NoSuchElementException();
lastReturnedIndex = index++;
return keyUniverse[lastReturnedIndex];
}
}
private class ValueIterator extends EnumMapIterator<V> {
public V next() {
if (!hasNext())
throw new NoSuchElementException();
lastReturnedIndex = index++;
return unmaskNull(vals[lastReturnedIndex]);
}
}
private class EntryIterator extends EnumMapIterator<Map.Entry<K,V>> {
private Entry lastReturnedEntry;
public Map.Entry<K,V> next() {
if (!hasNext())
throw new NoSuchElementException();
lastReturnedEntry = new Entry(index++);
return lastReturnedEntry;
}
public void remove() {
lastReturnedIndex =
((null == lastReturnedEntry) ? -1 : lastReturnedEntry.index);
super.remove();
lastReturnedEntry.index = lastReturnedIndex;
lastReturnedEntry = null;
}
private class Entry implements Map.Entry<K,V> {
private int index;
private Entry(int index) {
this.index = index;
}
public K getKey() {
checkIndexForEntryUse();
return keyUniverse[index];
}
public V getValue() {
checkIndexForEntryUse();
return unmaskNull(vals[index]);
}
public V setValue(V value) {
checkIndexForEntryUse();
V oldValue = unmaskNull(vals[index]);
vals[index] = maskNull(value);
return oldValue;
}
public boolean equals(Object o) {
if (index < 0)
return o == this;
if (!(o instanceof Map.Entry))
return false;
Map.Entry<?,?> e = (Map.Entry<?,?>)o;
V ourValue = unmaskNull(vals[index]);
Object hisValue = e.getValue();
return (e.getKey() == keyUniverse[index] &&
(ourValue == hisValue ||
(ourValue != null && ourValue.equals(hisValue))));
}
public int hashCode() {
if (index < 0)
return super.hashCode();
return entryHashCode(index);
}
public String toString() {
if (index < 0)
return super.toString();
return keyUniverse[index] + "="
+ unmaskNull(vals[index]);
}
private void checkIndexForEntryUse() {
if (index < 0)
throw new IllegalStateException("Entry was removed");
}
}
}
// Comparison and hashing
/**
* Compares the specified object with this map for equality. Returns
* <tt>true</tt> if the given object is also a map and the two maps
* represent the same mappings, as specified in the {@link
* Map#equals(Object)} contract.
*
* @param o the object to be compared for equality with this map
* @return <tt>true</tt> if the specified object is equal to this map
*/
public boolean equals(Object o) {
if (this == o)
return true;
if (o instanceof EnumMap)
return equals((EnumMap<?,?>)o);
if (!(o instanceof Map))
return false;
Map<?,?> m = (Map<?,?>)o;
if (size != m.size())
return false;
for (int i = 0; i < keyUniverse.length; i++) {
if (null != vals[i]) {
K key = keyUniverse[i];
V value = unmaskNull(vals[i]);
if (null == value) {
if (!((null == m.get(key)) && m.containsKey(key)))
return false;
} else {
if (!value.equals(m.get(key)))
return false;
}
}
}
return true;
}
private boolean equals(EnumMap<?,?> em) {
if (em.keyType != keyType)
return size == 0 && em.size == 0;
// Key types match, compare each value
for (int i = 0; i < keyUniverse.length; i++) {
Object ourValue = vals[i];
Object hisValue = em.vals[i];
if (hisValue != ourValue &&
(hisValue == null || !hisValue.equals(ourValue)))
return false;
}
return true;
}
/**
* Returns the hash code value for this map. The hash code of a map is
* defined to be the sum of the hash codes of each entry in the map.
*/
public int hashCode() {
int h = 0;
for (int i = 0; i < keyUniverse.length; i++) {
if (null != vals[i]) {
h += entryHashCode(i);
}
}
return h;
}
private int entryHashCode(int index) {
return (keyUniverse[index].hashCode() ^ vals[index].hashCode());
}
/**
* Returns a shallow copy of this enum map. (The values themselves
* are not cloned.
*
* @return a shallow copy of this enum map
*/
@SuppressWarnings("unchecked")
public EnumMap<K, V> clone() {
EnumMap<K, V> result = null;
try {
result = (EnumMap<K, V>) super.clone();
} catch(CloneNotSupportedException e) {
throw new AssertionError();
}
result.vals = result.vals.clone();
result.entrySet = null;
return result;
}
/**
* Throws an exception if e is not of the correct type for this enum set.
*/
private void typeCheck(K key) {
Class<?> keyClass = key.getClass();
if (keyClass != keyType && keyClass.getSuperclass() != keyType)
throw new ClassCastException(keyClass + " != " + keyType);
}
/**
* Returns all of the values comprising K.
* The result is uncloned, cached, and shared by all callers.
*/
private static <K extends Enum<K>> K[] getKeyUniverse(Class<K> keyType) {
return SharedSecrets.getJavaLangAccess()
.getEnumConstantsShared(keyType);
}
private static final long serialVersionUID = 458661240069192865L;
/**
* Save the state of the <tt>EnumMap</tt> instance to a stream (i.e.,
* serialize it).
*
* @serialData The <i>size</i> of the enum map (the number of key-value
* mappings) is emitted (int), followed by the key (Object)
* and value (Object) for each key-value mapping represented
* by the enum map.
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException
{
// Write out the key type and any hidden stuff
s.defaultWriteObject();
// Write out size (number of Mappings)
s.writeInt(size);
// Write out keys and values (alternating)
int entriesToBeWritten = size;
for (int i = 0; entriesToBeWritten > 0; i++) {
if (null != vals[i]) {
s.writeObject(keyUniverse[i]);
s.writeObject(unmaskNull(vals[i]));
entriesToBeWritten--;
}
}
}
/**
* Reconstitute the <tt>EnumMap</tt> instance from a stream (i.e.,
* deserialize it).
*/
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException
{
// Read in the key type and any hidden stuff
s.defaultReadObject();
keyUniverse = getKeyUniverse(keyType);
vals = new Object[keyUniverse.length];
// Read in size (number of Mappings)
int size = s.readInt();
// Read the keys and values, and put the mappings in the HashMap
for (int i = 0; i < size; i++) {
K key = (K) s.readObject();
V value = (V) s.readObject();
put(key, value);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.DataTier;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexingSlowLog;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.SearchSlowLog;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesRequestCache;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.license.LicenseUtils;
import org.elasticsearch.persistent.PersistentTasksService;
import org.elasticsearch.snapshots.SearchableSnapshotsSettings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.ccr.Ccr;
import org.elasticsearch.xpack.ccr.CcrLicenseChecker;
import org.elasticsearch.xpack.ccr.CcrSettings;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.ccr.action.FollowParameters;
import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction;
import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask;
import java.io.IOException;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
public class TransportResumeFollowAction extends AcknowledgedTransportMasterNodeAction<ResumeFollowAction.Request> {
static final ByteSizeValue DEFAULT_MAX_READ_REQUEST_SIZE = new ByteSizeValue(32, ByteSizeUnit.MB);
static final ByteSizeValue DEFAULT_MAX_WRITE_REQUEST_SIZE = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES);
private static final TimeValue DEFAULT_MAX_RETRY_DELAY = new TimeValue(500);
private static final int DEFAULT_MAX_OUTSTANDING_WRITE_REQUESTS = 9;
private static final int DEFAULT_MAX_WRITE_BUFFER_COUNT = Integer.MAX_VALUE;
private static final ByteSizeValue DEFAULT_MAX_WRITE_BUFFER_SIZE = new ByteSizeValue(512, ByteSizeUnit.MB);
private static final int DEFAULT_MAX_READ_REQUEST_OPERATION_COUNT = 5120;
private static final int DEFAULT_MAX_WRITE_REQUEST_OPERATION_COUNT = 5120;
private static final int DEFAULT_MAX_OUTSTANDING_READ_REQUESTS = 12;
static final TimeValue DEFAULT_READ_POLL_TIMEOUT = TimeValue.timeValueMinutes(1);
private final Client client;
private final ThreadPool threadPool;
private final PersistentTasksService persistentTasksService;
private final IndicesService indicesService;
private final CcrLicenseChecker ccrLicenseChecker;
@Inject
public TransportResumeFollowAction(
final ThreadPool threadPool,
final TransportService transportService,
final ActionFilters actionFilters,
final Client client,
final ClusterService clusterService,
final IndexNameExpressionResolver indexNameExpressionResolver,
final PersistentTasksService persistentTasksService,
final IndicesService indicesService,
final CcrLicenseChecker ccrLicenseChecker
) {
super(
ResumeFollowAction.NAME,
true,
transportService,
clusterService,
threadPool,
actionFilters,
ResumeFollowAction.Request::new,
indexNameExpressionResolver,
ThreadPool.Names.SAME
);
this.client = client;
this.threadPool = threadPool;
this.persistentTasksService = persistentTasksService;
this.indicesService = indicesService;
this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker);
}
@Override
protected ClusterBlockException checkBlock(ResumeFollowAction.Request request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected void masterOperation(
Task task,
final ResumeFollowAction.Request request,
ClusterState state,
final ActionListener<AcknowledgedResponse> listener
) throws Exception {
if (ccrLicenseChecker.isCcrAllowed() == false) {
listener.onFailure(LicenseUtils.newComplianceException("ccr"));
return;
}
final IndexMetadata followerIndexMetadata = state.getMetadata().index(request.getFollowerIndex());
if (followerIndexMetadata == null) {
listener.onFailure(new IndexNotFoundException(request.getFollowerIndex()));
return;
}
final Map<String, String> ccrMetadata = followerIndexMetadata.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY);
if (ccrMetadata == null) {
throw new IllegalArgumentException("follow index [" + request.getFollowerIndex() + "] does not have ccr metadata");
}
final String leaderCluster = ccrMetadata.get(Ccr.CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY);
// Validates whether the leader cluster has been configured properly:
client.getRemoteClusterClient(leaderCluster);
final String leaderIndex = ccrMetadata.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY);
ccrLicenseChecker.checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs(
client,
leaderCluster,
leaderIndex,
listener::onFailure,
(leaderHistoryUUID, leaderIndexMetadata) -> {
try {
start(request, leaderCluster, leaderIndexMetadata.v1(), followerIndexMetadata, leaderHistoryUUID, listener);
} catch (final IOException e) {
listener.onFailure(e);
}
}
);
}
/**
* Performs validation on the provided leader and follow {@link IndexMetadata} instances and then
* creates a persistent task for each leader primary shard. This persistent tasks track changes in the leader
* shard and replicate these changes to a follower shard.
*
* Currently the following validation is performed:
* <ul>
* <li>The leader index and follow index need to have the same number of primary shards</li>
* </ul>
*/
void start(
ResumeFollowAction.Request request,
String clusterNameAlias,
IndexMetadata leaderIndexMetadata,
IndexMetadata followIndexMetadata,
String[] leaderIndexHistoryUUIDs,
ActionListener<AcknowledgedResponse> listener
) throws IOException {
MapperService mapperService = followIndexMetadata != null ? indicesService.createIndexMapperService(followIndexMetadata) : null;
validate(request, leaderIndexMetadata, followIndexMetadata, leaderIndexHistoryUUIDs, mapperService);
final int numShards = followIndexMetadata.getNumberOfShards();
final ResponseHandler handler = new ResponseHandler(numShards, listener);
Map<String, String> filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders());
for (int shardId = 0; shardId < numShards; shardId++) {
String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId;
final ShardFollowTask shardFollowTask = createShardFollowTask(
shardId,
clusterNameAlias,
request.getParameters(),
leaderIndexMetadata,
followIndexMetadata,
filteredHeaders
);
persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask, handler.getActionListener(shardId));
}
}
static void validate(
final ResumeFollowAction.Request request,
final IndexMetadata leaderIndex,
final IndexMetadata followIndex,
final String[] leaderIndexHistoryUUID,
final MapperService followerMapperService
) {
Map<String, String> ccrIndexMetadata = followIndex.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY);
if (ccrIndexMetadata == null) {
throw new IllegalArgumentException("follow index [" + followIndex.getIndex().getName() + "] does not have ccr metadata");
}
String leaderIndexUUID = leaderIndex.getIndex().getUUID();
String recordedLeaderIndexUUID = ccrIndexMetadata.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY);
if (leaderIndexUUID.equals(recordedLeaderIndexUUID) == false) {
throw new IllegalArgumentException(
"follow index ["
+ request.getFollowerIndex()
+ "] should reference ["
+ leaderIndexUUID
+ "] as leader index but instead reference ["
+ recordedLeaderIndexUUID
+ "] as leader index"
);
}
String[] recordedHistoryUUIDs = extractLeaderShardHistoryUUIDs(ccrIndexMetadata);
assert recordedHistoryUUIDs.length == leaderIndexHistoryUUID.length;
for (int i = 0; i < leaderIndexHistoryUUID.length; i++) {
String recordedLeaderIndexHistoryUUID = recordedHistoryUUIDs[i];
String actualLeaderIndexHistoryUUID = leaderIndexHistoryUUID[i];
if (recordedLeaderIndexHistoryUUID.equals(actualLeaderIndexHistoryUUID) == false) {
throw new IllegalArgumentException(
"leader shard ["
+ request.getFollowerIndex()
+ "]["
+ i
+ "] should reference ["
+ recordedLeaderIndexHistoryUUID
+ "] as history uuid but instead reference ["
+ actualLeaderIndexHistoryUUID
+ "] as history uuid"
);
}
}
if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(leaderIndex.getSettings()) == false) {
throw new IllegalArgumentException(
"leader index [" + leaderIndex.getIndex().getName() + "] does not have soft deletes enabled"
);
}
if (SearchableSnapshotsSettings.isSearchableSnapshotStore(leaderIndex.getSettings())) {
throw new IllegalArgumentException(
"leader index ["
+ leaderIndex.getIndex().getName()
+ "] is a searchable snapshot index and cannot be used for cross-cluster replication purpose"
);
}
if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(followIndex.getSettings()) == false) {
throw new IllegalArgumentException("follower index [" + request.getFollowerIndex() + "] does not have soft deletes enabled");
}
if (SearchableSnapshotsSettings.isSearchableSnapshotStore(followIndex.getSettings())) {
throw new IllegalArgumentException(
"follower index ["
+ request.getFollowerIndex()
+ "] is a searchable snapshot index and cannot be used for cross-cluster replication purpose"
);
}
if (leaderIndex.getNumberOfShards() != followIndex.getNumberOfShards()) {
throw new IllegalArgumentException(
"leader index primary shards ["
+ leaderIndex.getNumberOfShards()
+ "] does not match with the number of shards of the follow index ["
+ followIndex.getNumberOfShards()
+ "]"
);
}
if (leaderIndex.getRoutingNumShards() != followIndex.getRoutingNumShards()) {
throw new IllegalArgumentException(
"leader index number_of_routing_shards ["
+ leaderIndex.getRoutingNumShards()
+ "] does not match with the number_of_routing_shards of the follow index ["
+ followIndex.getRoutingNumShards()
+ "]"
);
}
if (leaderIndex.getState() != IndexMetadata.State.OPEN || followIndex.getState() != IndexMetadata.State.OPEN) {
throw new IllegalArgumentException("leader and follow index must be open");
}
if (CcrSettings.CCR_FOLLOWING_INDEX_SETTING.get(followIndex.getSettings()) == false) {
throw new IllegalArgumentException(
"the following index ["
+ request.getFollowerIndex()
+ "] is not ready "
+ "to follow; the setting ["
+ CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey()
+ "] must be enabled."
);
}
validateSettings(leaderIndex.getSettings(), followIndex.getSettings());
// Validates if the current follower mapping is mergable with the leader mapping.
// This also validates for example whether specific mapper plugins have been installed
followerMapperService.merge(leaderIndex, MapperService.MergeReason.MAPPING_RECOVERY);
}
/**
* Validate that the settings that are required to be identical between the leader and follower index are in fact equal.
*
* @param leaderIndexSettings the leader index settings
* @param followerIndexSettings the follower index settings
* @throws IllegalArgumentException if there are settings that are required to be equal that are not equal
*/
private static void validateSettings(final Settings leaderIndexSettings, final Settings followerIndexSettings) {
// make a copy, remove settings that are allowed to be different, and then compare if the settings are equal
final Settings leaderSettings = filter(leaderIndexSettings);
final Settings followerSettings = filter(followerIndexSettings);
if (leaderSettings.equals(followerSettings) == false) {
final String message = String.format(
Locale.ROOT,
"the leader index settings [%s] and follower index settings [%s] must be identical",
leaderSettings,
followerSettings
);
throw new IllegalArgumentException(message);
}
}
private static ShardFollowTask createShardFollowTask(
int shardId,
String clusterAliasName,
FollowParameters parameters,
IndexMetadata leaderIndexMetadata,
IndexMetadata followIndexMetadata,
Map<String, String> filteredHeaders
) {
int maxReadRequestOperationCount;
if (parameters.getMaxReadRequestOperationCount() != null) {
maxReadRequestOperationCount = parameters.getMaxReadRequestOperationCount();
} else {
maxReadRequestOperationCount = DEFAULT_MAX_READ_REQUEST_OPERATION_COUNT;
}
ByteSizeValue maxReadRequestSize;
if (parameters.getMaxReadRequestSize() != null) {
maxReadRequestSize = parameters.getMaxReadRequestSize();
} else {
maxReadRequestSize = DEFAULT_MAX_READ_REQUEST_SIZE;
}
int maxOutstandingReadRequests;
if (parameters.getMaxOutstandingReadRequests() != null) {
maxOutstandingReadRequests = parameters.getMaxOutstandingReadRequests();
} else {
maxOutstandingReadRequests = DEFAULT_MAX_OUTSTANDING_READ_REQUESTS;
}
final int maxWriteRequestOperationCount;
if (parameters.getMaxWriteRequestOperationCount() != null) {
maxWriteRequestOperationCount = parameters.getMaxWriteRequestOperationCount();
} else {
maxWriteRequestOperationCount = DEFAULT_MAX_WRITE_REQUEST_OPERATION_COUNT;
}
final ByteSizeValue maxWriteRequestSize;
if (parameters.getMaxWriteRequestSize() != null) {
maxWriteRequestSize = parameters.getMaxWriteRequestSize();
} else {
maxWriteRequestSize = DEFAULT_MAX_WRITE_REQUEST_SIZE;
}
int maxOutstandingWriteRequests;
if (parameters.getMaxOutstandingWriteRequests() != null) {
maxOutstandingWriteRequests = parameters.getMaxOutstandingWriteRequests();
} else {
maxOutstandingWriteRequests = DEFAULT_MAX_OUTSTANDING_WRITE_REQUESTS;
}
int maxWriteBufferCount;
if (parameters.getMaxWriteBufferCount() != null) {
maxWriteBufferCount = parameters.getMaxWriteBufferCount();
} else {
maxWriteBufferCount = DEFAULT_MAX_WRITE_BUFFER_COUNT;
}
ByteSizeValue maxWriteBufferSize;
if (parameters.getMaxWriteBufferSize() != null) {
maxWriteBufferSize = parameters.getMaxWriteBufferSize();
} else {
maxWriteBufferSize = DEFAULT_MAX_WRITE_BUFFER_SIZE;
}
TimeValue maxRetryDelay = parameters.getMaxRetryDelay() == null ? DEFAULT_MAX_RETRY_DELAY : parameters.getMaxRetryDelay();
TimeValue readPollTimeout = parameters.getReadPollTimeout() == null ? DEFAULT_READ_POLL_TIMEOUT : parameters.getReadPollTimeout();
return new ShardFollowTask(
clusterAliasName,
new ShardId(followIndexMetadata.getIndex(), shardId),
new ShardId(leaderIndexMetadata.getIndex(), shardId),
maxReadRequestOperationCount,
maxWriteRequestOperationCount,
maxOutstandingReadRequests,
maxOutstandingWriteRequests,
maxReadRequestSize,
maxWriteRequestSize,
maxWriteBufferCount,
maxWriteBufferSize,
maxRetryDelay,
readPollTimeout,
filteredHeaders
);
}
static String[] extractLeaderShardHistoryUUIDs(Map<String, String> ccrIndexMetadata) {
String historyUUIDs = ccrIndexMetadata.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS);
if (historyUUIDs == null) {
throw new IllegalArgumentException("leader index shard UUIDs are missing");
}
return historyUUIDs.split(",");
}
/**
* These are settings that are not replicated to the follower index and
* therefor these settings are not validated whether they have the same
* value between leader and follower index.
*
* These dynamic settings don't affect how documents are indexed (affect index time text analysis) and / or
* are inconvenient if they were replicated (e.g. changing number of replicas).
*/
static final Set<Setting<?>> NON_REPLICATED_SETTINGS = Set.of(
IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING,
IndexMetadata.INDEX_AUTO_EXPAND_REPLICAS_SETTING,
IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING,
IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING,
IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING,
IndexMetadata.INDEX_READ_ONLY_SETTING,
IndexMetadata.INDEX_BLOCKS_READ_SETTING,
IndexMetadata.INDEX_BLOCKS_WRITE_SETTING,
IndexMetadata.INDEX_BLOCKS_METADATA_SETTING,
IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING,
IndexMetadata.INDEX_PRIORITY_SETTING,
IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS,
IndexMetadata.INDEX_HIDDEN_SETTING,
EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING,
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING,
ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING,
MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY,
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING,
IndexSettings.MAX_RESULT_WINDOW_SETTING,
IndexSettings.INDEX_WARMER_ENABLED_SETTING,
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING,
IndexSettings.MAX_RESCORE_WINDOW_SETTING,
IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING,
IndexSettings.DEFAULT_FIELD_SETTING,
IndexSettings.QUERY_STRING_LENIENT_SETTING,
IndexSettings.QUERY_STRING_ANALYZE_WILDCARD,
IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD,
IndexSettings.ALLOW_UNMAPPED,
IndexSettings.INDEX_SEARCH_IDLE_AFTER,
IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING,
IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING,
IndexSettings.MAX_SCRIPT_FIELDS_SETTING,
IndexSettings.MAX_REGEX_LENGTH_SETTING,
IndexSettings.MAX_TERMS_COUNT_SETTING,
IndexSettings.MAX_ANALYZED_OFFSET_SETTING,
IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING,
IndexSettings.MAX_TOKEN_COUNT_SETTING,
IndexSettings.MAX_SLICES_PER_SCROLL,
IndexSettings.DEFAULT_PIPELINE,
IndexSettings.FINAL_PIPELINE,
IndexSettings.INDEX_SEARCH_THROTTLED,
IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING,
IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING,
IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING,
IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING,
IndexSettings.INDEX_GC_DELETES_SETTING,
IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD,
IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING,
BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING,
SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING,
IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING,
MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_DELETES_PCT_ALLOWED_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING,
MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING,
MergeSchedulerConfig.AUTO_THROTTLE_SETTING,
MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING,
MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING,
EngineConfig.INDEX_CODEC_SETTING,
DataTier.TIER_PREFERENCE_SETTING
);
public static Settings filter(Settings originalSettings) {
Settings.Builder settings = Settings.builder().put(originalSettings);
// Remove settings that are always going to be different between leader and follow index:
settings.remove(CcrSettings.CCR_FOLLOWING_INDEX_SETTING.getKey());
// soft deletes setting is checked manually
settings.remove(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey());
settings.remove(IndexMetadata.SETTING_VERSION_CREATED);
settings.remove(IndexMetadata.SETTING_INDEX_UUID);
settings.remove(IndexMetadata.SETTING_HISTORY_UUID);
settings.remove(IndexMetadata.SETTING_INDEX_PROVIDED_NAME);
settings.remove(IndexMetadata.SETTING_CREATION_DATE);
// Follower index may be upgraded, while the leader index hasn't been upgraded, so it is expected
// that these settings are different:
settings.remove(IndexMetadata.SETTING_VERSION_UPGRADED);
settings.remove(IndexMetadata.SETTING_VERSION_UPGRADED_STRING);
Iterator<String> iterator = settings.keys().iterator();
while (iterator.hasNext()) {
String key = iterator.next();
for (Setting<?> whitelistedSetting : NON_REPLICATED_SETTINGS) {
if (whitelistedSetting.match(key)) {
iterator.remove();
break;
}
}
}
return settings.build();
}
}
| |
/*
*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.genie.common.dto;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import com.netflix.genie.common.exceptions.GeniePreconditionException;
import com.netflix.genie.common.external.util.GenieObjectMapper;
import com.netflix.genie.test.suppliers.RandomSuppliers;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.time.Instant;
import java.util.Set;
import java.util.UUID;
/**
* Tests for the Application DTO.
*
* @author tgianos
* @since 3.0.0
*/
class ApplicationTest {
private static final String NAME = UUID.randomUUID().toString();
private static final String USER = UUID.randomUUID().toString();
private static final String VERSION = UUID.randomUUID().toString();
/**
* Test to make sure we can build an application using the default builder constructor.
*/
@Test
void canBuildApplication() {
final Application application = new Application.Builder(NAME, USER, VERSION, ApplicationStatus.ACTIVE).build();
Assertions.assertThat(application.getName()).isEqualTo(NAME);
Assertions.assertThat(application.getUser()).isEqualTo(USER);
Assertions.assertThat(application.getVersion()).isEqualTo(VERSION);
Assertions.assertThat(application.getStatus()).isEqualTo(ApplicationStatus.ACTIVE);
Assertions.assertThat(application.getDependencies()).isEmpty();
Assertions.assertThat(application.getType().isPresent()).isFalse();
Assertions.assertThat(application.getSetupFile().isPresent()).isFalse();
Assertions.assertThat(application.getConfigs()).isEmpty();
Assertions.assertThat(application.getCreated().isPresent()).isFalse();
Assertions.assertThat(application.getDescription().isPresent()).isFalse();
Assertions.assertThat(application.getId().isPresent()).isFalse();
Assertions.assertThat(application.getTags()).isEmpty();
Assertions.assertThat(application.getUpdated().isPresent()).isFalse();
Assertions.assertThat(application.getMetadata().isPresent()).isFalse();
}
/**
* Test to make sure we can build an application with all optional parameters.
*/
@Test
void canBuildApplicationWithOptionals() {
final Application.Builder builder = new Application.Builder(NAME, USER, VERSION, ApplicationStatus.ACTIVE);
final Set<String> dependencies = Sets.newHashSet(UUID.randomUUID().toString(), UUID.randomUUID().toString());
builder.withDependencies(dependencies);
final String type = UUID.randomUUID().toString();
builder.withType(type);
final String setupFile = UUID.randomUUID().toString();
builder.withSetupFile(setupFile);
final Set<String> configs = Sets.newHashSet(UUID.randomUUID().toString(), UUID.randomUUID().toString());
builder.withConfigs(configs);
final Instant created = Instant.now();
builder.withCreated(created);
final String description = UUID.randomUUID().toString();
builder.withDescription(description);
final String id = UUID.randomUUID().toString();
builder.withId(id);
final Set<String> tags = Sets.newHashSet(UUID.randomUUID().toString(), UUID.randomUUID().toString());
builder.withTags(tags);
final Instant updated = Instant.now();
builder.withUpdated(updated);
final Application application = builder.build();
Assertions.assertThat(application.getName()).isEqualTo(NAME);
Assertions.assertThat(application.getUser()).isEqualTo(USER);
Assertions.assertThat(application.getVersion()).isEqualTo(VERSION);
Assertions.assertThat(application.getStatus()).isEqualTo(ApplicationStatus.ACTIVE);
Assertions.assertThat(application.getDependencies()).isEqualTo(dependencies);
Assertions.assertThat(application.getType().orElseGet(RandomSuppliers.STRING)).isEqualTo(type);
Assertions.assertThat(application.getSetupFile().orElseGet(RandomSuppliers.STRING)).isEqualTo(setupFile);
Assertions.assertThat(application.getConfigs()).isEqualTo(configs);
Assertions.assertThat(application.getCreated().orElseGet(RandomSuppliers.INSTANT)).isEqualTo(created);
Assertions
.assertThat(application.getDescription().orElseThrow(IllegalArgumentException::new))
.isEqualTo(description);
Assertions
.assertThat(application.getId().orElseThrow(IllegalArgumentException::new))
.isEqualTo(id);
Assertions
.assertThat(application.getTags())
.isEqualTo(tags);
Assertions
.assertThat(application.getUpdated().orElseThrow(IllegalArgumentException::new))
.isEqualTo(updated);
}
/**
* Make sure we can use both builder methods for metadata and that trying to use invalid JSON throws exception.
*
* @throws IOException on JSON error
* @throws GeniePreconditionException on Invalid JSON
*/
@Test
void canBuildWithMetadata() throws IOException, GeniePreconditionException {
final ObjectMapper objectMapper = GenieObjectMapper.getMapper();
final String metadata = "{\"key1\":\"value1\",\"key2\":3}";
final JsonNode metadataNode = objectMapper.readTree(metadata);
final Application.Builder builder = new Application.Builder(NAME, USER, VERSION, ApplicationStatus.ACTIVE);
builder.withMetadata(metadata);
Assertions
.assertThat(
objectMapper.writeValueAsString(
builder.build().getMetadata().orElseThrow(IllegalArgumentException::new)
)
)
.isEqualTo(metadata);
builder.withMetadata(metadataNode);
Assertions
.assertThat(builder.build().getMetadata().orElseThrow(IllegalArgumentException::new))
.isEqualTo(metadataNode);
builder.withMetadata((JsonNode) null);
Assertions.assertThat(builder.build().getMetadata().isPresent()).isFalse();
Assertions
.assertThatExceptionOfType(GeniePreconditionException.class)
.isThrownBy(() -> builder.withMetadata("{I'm Not valid Json}"));
}
/**
* Test to make sure we can build an application with null collection parameters.
*/
@Test
void canBuildApplicationNullOptionals() {
final Application.Builder builder = new Application.Builder(NAME, USER, VERSION, ApplicationStatus.ACTIVE);
builder.withDependencies(null);
builder.withType(null);
builder.withSetupFile(null);
builder.withConfigs(null);
builder.withCreated(null);
builder.withDescription(null);
builder.withId(null);
builder.withTags(null);
builder.withUpdated(null);
final Application application = builder.build();
Assertions.assertThat(application.getName()).isEqualTo(NAME);
Assertions.assertThat(application.getUser()).isEqualTo(USER);
Assertions.assertThat(application.getVersion()).isEqualTo(VERSION);
Assertions.assertThat(application.getStatus()).isEqualTo(ApplicationStatus.ACTIVE);
Assertions.assertThat(application.getDependencies()).isEmpty();
Assertions.assertThat(application.getType().isPresent()).isFalse();
Assertions.assertThat(application.getSetupFile().isPresent()).isFalse();
Assertions.assertThat(application.getConfigs()).isEmpty();
Assertions.assertThat(application.getCreated().isPresent()).isFalse();
Assertions.assertThat(application.getDescription().isPresent()).isFalse();
Assertions.assertThat(application.getId().isPresent()).isFalse();
Assertions.assertThat(application.getTags()).isEmpty();
Assertions.assertThat(application.getUpdated().isPresent()).isFalse();
}
/**
* Test equals.
*/
@Test
void testEqualityAndHashCode() {
final Application.Builder builder = new Application.Builder(NAME, USER, VERSION, ApplicationStatus.ACTIVE);
builder.withDependencies(null);
builder.withType(null);
builder.withSetupFile(null);
builder.withConfigs(null);
builder.withCreated(null);
builder.withDescription(null);
builder.withId(UUID.randomUUID().toString());
builder.withTags(null);
builder.withUpdated(null);
final Application app1 = builder.build();
final Application app2 = builder.build();
builder.withId(UUID.randomUUID().toString());
final Application app3 = builder.build();
Assertions.assertThat(app1).isEqualTo(app2);
Assertions.assertThat(app1).isNotEqualTo(app3);
Assertions.assertThat(app1.hashCode()).isEqualTo(app2.hashCode());
Assertions.assertThat(app1.hashCode()).isNotEqualTo(app3.hashCode());
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Red Hat, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.node_monitors;
import hudson.Util;
import hudson.Extension;
import hudson.slaves.OfflineCause;
import hudson.model.Computer;
import hudson.remoting.Callable;
import jenkins.security.MasterToSlaveCallable;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.StaplerRequest;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Logger;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
/**
* Monitors the round-trip response time to this slave.
*
* @author Kohsuke Kawaguchi
*/
public class ResponseTimeMonitor extends NodeMonitor {
@Extension
public static final AbstractNodeMonitorDescriptor<Data> DESCRIPTOR = new AbstractAsyncNodeMonitorDescriptor<Data>() {
@Override
protected Callable<Data,IOException> createCallable(Computer c) {
return new Step1(get(c));
}
@Override
protected Map<Computer, Data> monitor() throws InterruptedException {
Map<Computer, Data> base = super.monitor();
for (Entry<Computer, Data> e : base.entrySet()) {
Computer c = e.getKey();
Data d = e.getValue();
if (d ==null) {
// if we failed to monitor, put in the special value that indicates a failure
e.setValue(d=new Data(get(c),-1L));
}
if(d.hasTooManyTimeouts() && !isIgnored()) {
// unlike other monitors whose failure still allow us to communicate with the slave,
// the failure in this monitor indicates that we are just unable to make any requests
// to this slave. So we should severe the connection, as opposed to marking it temporarily
// off line, which still keeps the underlying channel open.
c.disconnect(d);
LOGGER.warning(Messages.ResponseTimeMonitor_MarkedOffline(c.getName()));
}
}
return base;
}
public String getDisplayName() {
return Messages.ResponseTimeMonitor_DisplayName();
}
@Override
public NodeMonitor newInstance(StaplerRequest req, JSONObject formData) throws FormException {
return new ResponseTimeMonitor();
}
};
private static final class Step1 extends MasterToSlaveCallable<Data,IOException> {
private Data cur;
private Step1(Data cur) {
this.cur = cur;
}
public Data call() {
// this method must be being invoked locally, which means the roundtrip time is zero and zero forever
return new Data(cur,0);
}
private Object writeReplace() {
return new Step2(cur);
}
private static final long serialVersionUID = 1L;
}
private static final class Step2 extends MasterToSlaveCallable<Step3,IOException> {
private final Data cur;
private final long start = System.currentTimeMillis();
public Step2(Data cur) {
this.cur = cur;
}
public Step3 call() {
// this method must be being invoked locally, which means the roundtrip time is zero and zero forever
return new Step3(cur,start);
}
private static final long serialVersionUID = 1L;
}
private static final class Step3 implements Serializable {
private final Data cur;
private final long start;
private Step3(Data cur, long start) {
this.cur = cur;
this.start = start;
}
private Object readResolve() {
long end = System.currentTimeMillis();
return new Data(cur,(end-start));
}
private static final long serialVersionUID = 1L;
}
/**
* Immutable representation of the monitoring data.
*/
@ExportedBean
public static final class Data extends MonitorOfflineCause implements Serializable {
/**
* Record of the past 5 times. -1 if time out. Otherwise in milliseconds.
* Old ones first.
*/
private final long[] past5;
private Data(Data old, long newDataPoint) {
if(old==null)
past5 = new long[] {newDataPoint};
else {
past5 = new long[Math.min(5,old.past5.length+1)];
int copyLen = past5.length - 1;
System.arraycopy(old.past5, old.past5.length-copyLen, this.past5, 0, copyLen);
past5[past5.length-1] = newDataPoint;
}
}
/**
* Computes the recurrence of the time out
*/
private int failureCount() {
int cnt=0;
for(int i=past5.length-1; i>=0 && past5[i]<0; i--, cnt++)
;
return cnt;
}
/**
* Computes the average response time, by taking the time out into account.
*/
@Exported
public long getAverage() {
long total=0;
for (long l : past5) {
if(l<0) total += TIMEOUT;
else total += l;
}
return total/past5.length;
}
public boolean hasTooManyTimeouts() {
return failureCount()>=5;
}
/**
* HTML rendering of the data
*/
@Override
public String toString() {
// StringBuilder buf = new StringBuilder();
// for (long l : past5) {
// if(buf.length()>0) buf.append(',');
// buf.append(l);
// }
// return buf.toString();
int fc = failureCount();
if(fc>0)
return Util.wrapToErrorSpan(Messages.ResponseTimeMonitor_TimeOut(fc));
return getAverage()+"ms";
}
@Override
public Class<? extends NodeMonitor> getTrigger() {
return ResponseTimeMonitor.class;
}
private static final long serialVersionUID = 1L;
}
/**
* Time out interval in milliseconds.
*/
private static final long TIMEOUT = 5000;
private static final Logger LOGGER = Logger.getLogger(ResponseTimeMonitor.class.getName());
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import static org.apache.hadoop.yarn.util.StringHelper._split;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.sjoin;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
/**
* YARN internal application-related utilities
*/
@Private
public class Apps {
public static final String APP = "application";
public static final String ID = "ID";
private static final Pattern VAR_SUBBER =
Pattern.compile(Shell.getEnvironmentVariableRegex());
private static final Pattern VARVAL_SPLITTER = Pattern.compile(
"(?<=^|,)" // preceded by ',' or line begin
+ '(' + Shell.ENV_NAME_REGEX + ')' // var group
+ '='
+ "([^,]*)" // val group
);
public static ApplicationId toAppID(String aid) {
Iterator<String> it = _split(aid).iterator();
return toAppID(APP, aid, it);
}
public static ApplicationId toAppID(String prefix, String s, Iterator<String> it) {
if (!it.hasNext() || !it.next().equals(prefix)) {
throwParseException(sjoin(prefix, ID), s);
}
shouldHaveNext(prefix, s, it);
ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
Integer.parseInt(it.next()));
return appId;
}
public static void shouldHaveNext(String prefix, String s, Iterator<String> it) {
if (!it.hasNext()) {
throwParseException(sjoin(prefix, ID), s);
}
}
public static void throwParseException(String name, String s) {
throw new YarnRuntimeException(join("Error parsing ", name, ": ", s));
}
private static void setEnvFromString(Map<String, String> env,
String envVar, String varString, String classPathSeparator) {
Matcher m = VAR_SUBBER.matcher(varString);
StringBuffer sb = new StringBuffer();
while (m.find()) {
String var = m.group(1);
// do variable substitution of $var from passed in environment or from
// system environment and default to empty string if undefined in both.
String replace = env.get(var);
if (replace == null) {
replace = System.getenv(var);
}
if (replace == null) {
replace = "";
}
m.appendReplacement(sb, Matcher.quoteReplacement(replace));
}
m.appendTail(sb);
addToEnvironment(env, envVar, sb.toString(), classPathSeparator);
}
public static void setEnvFromInputString(Map<String, String> env,
String envString, String classPathSeparator) {
if (envString != null && envString.length() > 0) {
Matcher varValMatcher = VARVAL_SPLITTER.matcher(envString);
while (varValMatcher.find()) {
String envVar = varValMatcher.group(1);
String varString = varValMatcher.group(2);
setEnvFromString(env, envVar, varString, classPathSeparator);
}
}
}
/**
* Set environment from string without doing any variable substitution.
* Used internally to avoid double expansion.
* @param env environment to set
* @param envString comma-separated k=v pairs.
* @param classPathSeparator Separator to use when appending to an existing
* environment variable.
*/
private static void setEnvFromInputStringNoExpand(Map<String, String> env,
String envString, String classPathSeparator) {
if (envString != null && envString.length() > 0) {
Matcher varValMatcher = VARVAL_SPLITTER.matcher(envString);
while (varValMatcher.find()) {
String envVar = varValMatcher.group(1);
String varString = varValMatcher.group(2);
addToEnvironment(env, envVar, varString, classPathSeparator);
}
}
}
/**
* Set environment variables from map of input properties.
* @param env environment to update
* @param inputMap environment variable property keys and values
* @param classPathSeparator separator to use when appending to an existing
* environment variable
*/
private static void setEnvFromInputStringMap(Map<String, String> env,
Map<String, String> inputMap, String classPathSeparator) {
for(Map.Entry<String, String> inputVar: inputMap.entrySet()) {
String envVar = inputVar.getKey();
String varString = inputVar.getValue();
setEnvFromString(env, envVar, varString, classPathSeparator);
}
}
/**
* Set environment variables from the given environment input property.
* For example, given the property mapreduce.map.env, this method
* will extract environment variables from:
* the comma-separated string value of mapreduce.map.env, and
* the values of any properties of the form mapreduce.map.env.VAR_NAME
* Variables specified via the latter syntax take precedence over those
* specified using the former syntax.
* @param env the environment to update
* @param propName the name of the property
* @param defaultPropValue the default value for propName
* @param conf configuration containing properties
* @param classPathSeparator Separator used when appending to an existing var
*/
public static void setEnvFromInputProperty(Map<String, String> env,
String propName, String defaultPropValue, Configuration conf,
String classPathSeparator) {
String envString = conf.get(propName, defaultPropValue);
// Get k,v pairs from string into a tmp env. Note that we don't want
// to expand the env var values, because we will do that below -
// don't want to do it twice.
Map<String, String> tmpEnv = new HashMap<String, String>();
Apps.setEnvFromInputStringNoExpand(tmpEnv, envString, classPathSeparator);
// Get map of props with prefix propName.
// (e.g., map.reduce.env.ENV_VAR_NAME=value)
Map<String, String> inputMap = conf.getPropsWithPrefix(propName + ".");
// Entries from map should override entries from input string.
tmpEnv.putAll(inputMap);
// Add them to the environment
setEnvFromInputStringMap(env, tmpEnv, classPathSeparator);
}
/**
*
* @param envString String containing env variable definitions
* @return Set of environment variable names
*/
private static Set<String> getEnvVarsFromInputString(String envString) {
Set<String> envSet = new HashSet<>();
if (envString != null && envString.length() > 0) {
Matcher varValMatcher = VARVAL_SPLITTER.matcher(envString);
while (varValMatcher.find()) {
String envVar = varValMatcher.group(1);
envSet.add(envVar);
}
}
return envSet;
}
/**
* Return the list of environment variable names specified in the
* given property or default string and those specified individually
* with the propname.VARNAME syntax (e.g., mapreduce.map.env.VARNAME=value).
* @param propName the name of the property
* @param defaultPropValue the default value for propName
* @param conf configuration containing properties
* @return Set of environment variable names
*/
public static Set<String> getEnvVarsFromInputProperty(
String propName, String defaultPropValue, Configuration conf) {
String envString = conf.get(propName, defaultPropValue);
Set<String> varSet = getEnvVarsFromInputString(envString);
Map<String, String> propMap = conf.getPropsWithPrefix(propName + ".");
varSet.addAll(propMap.keySet());
return varSet;
}
/**
* This older version of this method is kept around for compatibility
* because downstream frameworks like Spark and Tez have been using it.
* Downstream frameworks are expected to move off of it.
*/
@Deprecated
public static void setEnvFromInputString(Map<String, String> env,
String envString) {
setEnvFromInputString(env, envString, File.pathSeparator);
}
@Public
@Unstable
public static void addToEnvironment(
Map<String, String> environment,
String variable, String value, String classPathSeparator) {
String val = environment.get(variable);
if (val == null) {
val = value;
} else {
val = val + classPathSeparator + value;
}
environment.put(StringInterner.weakIntern(variable),
StringInterner.weakIntern(val));
}
/**
* This older version of this method is kept around for compatibility
* because downstream frameworks like Spark and Tez have been using it.
* Downstream frameworks are expected to move off of it.
*/
@Deprecated
public static void addToEnvironment(
Map<String, String> environment,
String variable, String value) {
addToEnvironment(environment, variable, value, File.pathSeparator);
}
public static String crossPlatformify(String var) {
return ApplicationConstants.PARAMETER_EXPANSION_LEFT + var
+ ApplicationConstants.PARAMETER_EXPANSION_RIGHT;
}
// Check if should black list the node based on container exit status
@Private
@Unstable
public static boolean shouldCountTowardsNodeBlacklisting(int exitStatus) {
switch (exitStatus) {
case ContainerExitStatus.PREEMPTED:
case ContainerExitStatus.KILLED_BY_RESOURCEMANAGER:
case ContainerExitStatus.KILLED_BY_APPMASTER:
case ContainerExitStatus.KILLED_AFTER_APP_COMPLETION:
case ContainerExitStatus.ABORTED:
// Neither the app's fault nor the system's fault. This happens by design,
// so no need for skipping nodes
return false;
case ContainerExitStatus.DISKS_FAILED:
// This container is marked with this exit-status means that the node is
// already marked as unhealthy given that most of the disks failed. So, no
// need for any explicit skipping of nodes.
return false;
case ContainerExitStatus.KILLED_EXCEEDED_VMEM:
case ContainerExitStatus.KILLED_EXCEEDED_PMEM:
// No point in skipping the node as it's not the system's fault
return false;
case ContainerExitStatus.SUCCESS:
return false;
case ContainerExitStatus.INVALID:
// Ideally, this shouldn't be considered for skipping a node. But in
// reality, it seems like there are cases where we are not setting
// exit-code correctly and so it's better to be conservative. See
// YARN-4284.
return true;
default:
return true;
}
}
/**
* Returns whether a given application state is final: FINISHED,
* FAILED or KILLED.
*
* @param appState application state
* @return whether the appState is final.
*/
public static boolean isApplicationFinalState(YarnApplicationState appState) {
return appState == YarnApplicationState.FINISHED
|| appState == YarnApplicationState.FAILED
|| appState == YarnApplicationState.KILLED;
}
}
| |
package io.github.samurainate.chestdrop;
import java.util.Arrays;
import java.util.HashMap;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.Chest;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
public class Utils {
public static void worldBroadcast(Server server, String worldname, String format) {
World world = server.getWorld(worldname);
for (Player player : server.getOnlinePlayers()) {
if (player.getWorld().equals(world))
player.sendMessage(format);
}
}
public static int getPlayerCount(Server server, String worldname) {
int count = 0;
World world = server.getWorld(worldname);
for (Player player : server.getOnlinePlayers()) {
if (player.getWorld().equals(world))
count++;
}
return count;
}
/**
* @param config
* the PluginConfig
* @param worldname
* the world to get drop location in
* @return a random drop location NOTE: If WorldBorder integration is
* enabled AND THERE IS A BORDER DEFINED, then drops are centered on
* the center of the world's border. Otherwise, the drops are
* centered on the world spawn location.
*/
public static double[] getDrop(PluginConfig config, String worldname) {
double[] drop = null;
WorldConfig worldConfig = config.getWorldConfig(worldname);
int maxRange = worldConfig.getMaxRangeForDrops();
// try world border method first
if (config.isWorldBorderEnabled() && config.getWb().worldHasBorder(worldname)) {
if (config.isTownyEnabled()) {
/*
* Try to get a wild spawn location but take whatever after 100
* iterations
*/
for (int i = 0; i < 100; i++) {
drop = config.getWb().randomCoordWithinBordersOf(worldname, maxRange);
if (config.getTowny()
.isWild(new Location(config.getServer().getWorld(worldname), drop[0], 0, drop[1])))
break;
}
} else {
drop = config.getWb().randomCoordWithinBordersOf(worldname, maxRange);
}
return drop;
}
// either no world border integration or no border defined
/*
* Try to get a wild spawn location but take whatever after 100
* iterations
*/
if (config.isTownyEnabled()) {
for (int i = 0; i < 100; i++) {
drop = randomXZ(config, worldname, maxRange);
if (config.getTowny().isWild(new Location(config.getServer().getWorld(worldname), drop[0], 0, drop[1])))
break;
}
} else {
drop = randomXZ(config, worldname, maxRange);
}
return drop;
}
private static double[] randomXZ(PluginConfig config, String worldname, int maxRange) {
double[] drop;
drop = new double[2];
Location l = config.getServer().getWorld(worldname).getSpawnLocation();
int minx = (int) (l.getX() - maxRange);
int minz = (int) (l.getZ() - maxRange);
drop[0] = config.getRandom().nextInt(maxRange) + minx;
drop[1] = config.getRandom().nextInt(maxRange) + minz;
return drop;
}
public static void scheduleTasks(PluginConfig config) {
for (String worldname : config.getConfiguredWorlds()) {
WorldConfig worldConfig = config.getWorldConfig(worldname);
if (worldConfig.isEnabled()) {
}
config.getServer().getScheduler().scheduleSyncRepeatingTask(config.getPlugin(),
new ChestDropTask(config, worldname), worldConfig.getDropInterval(), worldConfig.getDropInterval());
}
}
public static boolean dropChest(PluginConfig config, String worldname) {
/* Get random drop coords */
double[] coords = Utils.getDrop(config, worldname);
World world = config.getServer().getWorld(worldname);
if (world == null) {
return false;
}
/* Load drop chunk */
loadChunk(world, coords);
/* Drop to ground loop */
Block block = null;
int y;
for (y = world.getMaxHeight(); y > 0; y--) {
block = world.getBlockAt((int) coords[0], y - 1, (int) coords[1]);
switch (block.getType()) {
/* fall through air, water, leaves, other soft things */
case AIR:
case WATER:
case STATIONARY_WATER:
case ICE:
case SNOW:
case LEAVES:
case YELLOW_FLOWER:
case RED_ROSE:
case DEAD_BUSH:
case LONG_GRASS:
case DOUBLE_PLANT:
case SUGAR_CANE_BLOCK:
case MELON:
case PUMPKIN:
case WATER_LILY:
case JACK_O_LANTERN:
case LEAVES_2:
case SAPLING:
case VINE:
case WEB:
case BROWN_MUSHROOM:
case RED_MUSHROOM:
case CACTUS:
case COCOA:
continue;
/* shift so we don't land on or in trees (and other things) */
case LOG:
case LOG_2:
case CROPS:
case MELON_STEM:
case PUMPKIN_STEM:
/* Other denizens of Roofed Forest */
case HUGE_MUSHROOM_1:
case HUGE_MUSHROOM_2:
/* Village Roof */
case WOOD:
case WOOD_STAIRS:
/* Desert Temple */
case SANDSTONE:
case SANDSTONE_STAIRS:
/* Hot Stuff */
case FIRE:
case LAVA:
case STATIONARY_LAVA:
/* Stuff I don't want to see a chest on top of */
case CARPET:
case SIGN:
case SIGN_POST:
case ARMOR_STAND:
case RAILS:
case ACTIVATOR_RAIL:
case DETECTOR_RAIL:
case POWERED_RAIL:
case REDSTONE_WIRE:
case DRAGON_EGG:
/* could be more here but I'll wait for complaints */
/* recurse */
return dropChest(config, worldname);
default:
break;
}
break;
}
if (y == 0) {
config.getServer().getLogger().info("[ChestDrop] chest fell into the void...");
return false; // Abort if we make it to the void
}
/* Place Chest with Marker */
block = world.getBlockAt((int) coords[0], y + 3, (int) coords[1]);
block.setType(Material.GLOWSTONE);
block = world.getBlockAt((int) coords[0], y + 2, (int) coords[1]);
block.setType(Material.FENCE);
block = world.getBlockAt((int) coords[0], y + 1, (int) coords[1]);
block.setType(Material.FENCE);
block = world.getBlockAt((int) coords[0], y, (int) coords[1]);
block.setType(Material.CHEST);
/* Open Chest */
Chest chest = (Chest) block.getState();
Inventory in = chest.getBlockInventory();
/* Put Treasure in Chest */
in.setItem(in.firstEmpty(), config.gemModel().hiddenGem(1 + config.getRandom().nextInt(5)));
/* Notify players */
Utils.worldBroadcast(config.getServer(), worldname,
String.format("Chest dropped at %1.0fX, %1.0fZ", coords[0], coords[1]));
return true;
}
public static Chunk loadChunk(World world, double[] coords) {
Chunk chunk = world.getChunkAt((int) coords[0], (int) coords[1]);
if (!chunk.isLoaded()) {
chunk.load(true);
}
return chunk;
}
public static void displayTrades(PluginConfig pluginConfig, Player player) {
Inventory inv = pluginConfig.getServer().createInventory(null, 27, "Trade "+pluginConfig.gemModel().getName());
for (Trade trade : pluginConfig.getTrades()) {
ItemStack item = trade.getItems().clone();
ItemMeta meta = item.getItemMeta();
meta.setLore(Arrays.asList("Trade for " + trade.getCost() + " " + pluginConfig.gemModel().getName(), trade.getName()));
item.setItemMeta(meta);
inv.setItem(inv.firstEmpty(), item);
}
player.openInventory(inv);
}
public static void manageTrades(PluginConfig pluginConfig, Player player) {
Inventory inv = pluginConfig.getServer().createInventory(null, 27, "Delete "+pluginConfig.gemModel().getName()+" Trades");
for (Trade trade : pluginConfig.getTrades()) {
ItemStack item = trade.getItems().clone();
ItemMeta meta = item.getItemMeta();
meta.setLore(Arrays.asList("Click to delete", trade.getName()));
item.setItemMeta(meta);
inv.setItem(inv.firstEmpty(), item);
}
player.openInventory(inv);
}
public static int gemCount(Player p, GemModel gemModel) {
int gems = 0;
Inventory inv = p.getInventory();
HashMap<Integer, ? extends ItemStack> emeralds = inv.all(Material.EMERALD);
for (Integer key : emeralds.keySet()) {
/* check balance */
ItemStack emerald = emeralds.get(key);
if (gemModel.isHiddenGem(emerald))
gems += emerald.getAmount();
}
return gems;
}
public static boolean executeTrade(Player p, Trade trade, GemModel gemModel) {
int costToGo = trade.getCost();
Inventory inv = p.getInventory();
HashMap<Integer, ? extends ItemStack> emeralds = inv.all(Material.EMERALD);
for (Integer key : emeralds.keySet()) {
/* check balance */
ItemStack emerald = emeralds.get(key);
if (gemModel.isHiddenGem(emerald)) {
if (emerald.getAmount() > costToGo) {
emerald.setAmount(emerald.getAmount() - costToGo);
costToGo = 0;
} else if (emerald.getAmount() == costToGo) {
inv.setItem(key, null);
costToGo = 0;
} else {
costToGo -= emerald.getAmount();
inv.setItem(key, null);
}
}
if (costToGo == 0) {
ItemStack item = trade.getItems().clone();
giveItem(p, item);
return true;
}
}
return false;
}
public static void giveItem(Player p, ItemStack item) {
HashMap<Integer, ItemStack> drops = p.getInventory().addItem(item);
for (ItemStack stack : drops.values()) {
p.getWorld().dropItem(p.getLocation(), stack);
}
p.updateInventory();
}
public static void displayConfirmDelete(PluginConfig pluginConfig, Player p, ItemStack item) {
Inventory inv = pluginConfig.getServer().createInventory(null, 27, "Confirm Delete Trade?");
ItemStack ok = null;
ItemStack cancel = null;
// TODO: finish here
inv.setItem(2 * 9 + 2, ok);
inv.setItem(2 * 9 + 7, cancel);
p.openInventory(inv);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import java.io.IOException;
public final class PhraseSuggestParser implements SuggestContextParser {
private PhraseSuggester suggester;
public PhraseSuggestParser(PhraseSuggester suggester) {
this.suggester = suggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
HasContextAndHeaders headersContext) throws IOException {
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
boolean gramSizeSet = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
suggestion.setRealWordErrorLikelihood(parser.floatValue());
if (suggestion.realworldErrorLikelyhood() <= 0.0) {
throw new IllegalArgumentException("real_word_error_likelihood must be > 0.0");
}
} else if ("confidence".equals(fieldName)) {
suggestion.setConfidence(parser.floatValue());
if (suggestion.confidence() < 0.0) {
throw new IllegalArgumentException("confidence must be >= 0.0");
}
} else if ("separator".equals(fieldName)) {
suggestion.setSeparator(new BytesRef(parser.text()));
} else if ("max_errors".equals(fieldName) || "maxErrors".equals(fieldName)) {
suggestion.setMaxErrors(parser.floatValue());
if (suggestion.maxErrors() <= 0.0) {
throw new IllegalArgumentException("max_error must be > 0.0");
}
} else if ("gram_size".equals(fieldName) || "gramSize".equals(fieldName)) {
suggestion.setGramSize(parser.intValue());
if (suggestion.gramSize() < 1) {
throw new IllegalArgumentException("gram_size must be >= 1");
}
gramSizeSet = true;
} else if ("force_unigrams".equals(fieldName) || "forceUnigrams".equals(fieldName)) {
suggestion.setRequireUnigram(parser.booleanValue());
} else if ("token_limit".equals(fieldName) || "tokenLimit".equals(fieldName)) {
int tokenLimit = parser.intValue();
if (tokenLimit <= 0) {
throw new IllegalArgumentException("token_limit must be >= 1");
}
suggestion.setTokenLimit(tokenLimit);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
} else if (token == Token.START_ARRAY) {
if ("direct_generator".equals(fieldName) || "directGenerator".equals(fieldName)) {
// for now we only have a single type of generators
while ((token = parser.nextToken()) == Token.START_OBJECT) {
PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
parseCandidateGenerator(parser, mapperService, fieldName, generator, parseFieldMatcher);
}
}
verifyGenerator(generator);
suggestion.addGenerator(generator);
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else if (token == Token.START_OBJECT) {
if ("smoothing".equals(fieldName)) {
parseSmoothingModel(parser, suggestion, fieldName);
} else if ("highlight".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("pre_tag".equals(fieldName) || "preTag".equals(fieldName)) {
suggestion.setPreTag(parser.utf8Bytes());
} else if ("post_tag".equals(fieldName) || "postTag".equals(fieldName)) {
suggestion.setPostTag(parser.utf8Bytes());
} else {
throw new IllegalArgumentException(
"suggester[phrase][highlight] doesn't support field [" + fieldName + "]");
}
}
}
} else if ("collate".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if ("query".equals(fieldName)) {
if (suggestion.getCollateQueryScript() != null) {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
}
Template template = Template.parse(parser, parseFieldMatcher);
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
headersContext);
suggestion.setCollateQueryScript(compiledScript);
} else if ("params".equals(fieldName)) {
suggestion.setCollateScriptParams(parser.map());
} else if ("prune".equals(fieldName)) {
if (parser.isBooleanValue()) {
suggestion.setCollatePrune(parser.booleanValue());
} else {
throw new IllegalArgumentException("suggester[phrase][collate] prune must be either 'true' or 'false'");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][collate] doesn't support field [" + fieldName + "]");
}
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
if (suggestion.getField() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
MappedFieldType fieldType = mapperService.smartNameFieldType(suggestion.getField());
if (fieldType == null) {
throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]");
} else if (suggestion.getAnalyzer() == null) {
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
if (fieldType.searchAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
} else {
suggestion.setAnalyzer(fieldType.searchAnalyzer());
}
}
if (suggestion.model() == null) {
suggestion.setModel(StupidBackoffScorer.FACTORY);
}
if (!gramSizeSet || suggestion.generators().isEmpty()) {
final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils.getShingleFilterFactory(suggestion.getAnalyzer());
if (!gramSizeSet) {
// try to detect the shingle size
if (shingleFilterFactory != null) {
suggestion.setGramSize(shingleFilterFactory.getMaxShingleSize());
if (suggestion.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams()) {
throw new IllegalArgumentException("The default analyzer for field: [" + suggestion.getField() + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly");
}
}
}
if (suggestion.generators().isEmpty()) {
if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams() && suggestion.getRequireUnigram()) {
throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + suggestion.getField() + "] since it doesn't emit unigrams. If this is intentional try to set the candidate generator field explicitly");
}
// use a default generator on the same field
DirectCandidateGenerator generator = new DirectCandidateGenerator();
generator.setField(suggestion.getField());
suggestion.addGenerator(generator);
}
}
return suggestion;
}
public void parseSmoothingModel(XContentParser parser, PhraseSuggestionContext suggestion, String fieldName) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
if ("linear".equals(fieldName)) {
ensureNoSmoothing(suggestion);
final double[] lambdas = new double[3];
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
if ("trigram_lambda".equals(fieldName) || "trigramLambda".equals(fieldName)) {
lambdas[0] = parser.doubleValue();
if (lambdas[0] < 0) {
throw new IllegalArgumentException("trigram_lambda must be positive");
}
} else if ("bigram_lambda".equals(fieldName) || "bigramLambda".equals(fieldName)) {
lambdas[1] = parser.doubleValue();
if (lambdas[1] < 0) {
throw new IllegalArgumentException("bigram_lambda must be positive");
}
} else if ("unigram_lambda".equals(fieldName) || "unigramLambda".equals(fieldName)) {
lambdas[2] = parser.doubleValue();
if (lambdas[2] < 0) {
throw new IllegalArgumentException("unigram_lambda must be positive");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
}
}
}
double sum = 0.0d;
for (int i = 0; i < lambdas.length; i++) {
sum += lambdas[i];
}
if (Math.abs(sum - 1.0) > 0.001) {
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
}
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, lambdas[0], lambdas[1],
lambdas[2]);
}
});
} else if ("laplace".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theAlpha = 0.5;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "alpha".equals(fieldName)) {
theAlpha = parser.doubleValue();
}
}
final double alpha = theAlpha;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
}
});
} else if ("stupid_backoff".equals(fieldName) || "stupidBackoff".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theDiscount = 0.4;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "discount".equals(fieldName)) {
theDiscount = parser.doubleValue();
}
}
final double discount = theDiscount;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
}
});
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
}
}
}
private void ensureNoSmoothing(PhraseSuggestionContext suggestion) {
if (suggestion.model() != null) {
throw new IllegalArgumentException("only one smoothing model supported");
}
}
private void verifyGenerator(PhraseSuggestionContext.DirectCandidateGenerator suggestion) {
// Verify options and set defaults
if (suggestion.field() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
}
private void parseCandidateGenerator(XContentParser parser, MapperService mapperService, String fieldName,
PhraseSuggestionContext.DirectCandidateGenerator generator, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
if ("field".equals(fieldName)) {
generator.setField(parser.text());
if (mapperService.smartNameFieldType(generator.field()) == null) {
throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
}
} else if ("size".equals(fieldName)) {
generator.size(parser.intValue());
} else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.preFilter(analyzer);
} else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.postFilter(analyzer);
} else {
throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.olingo4;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.camel.Category;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.olingo4.internal.Olingo4ApiCollection;
import org.apache.camel.component.olingo4.internal.Olingo4ApiName;
import org.apache.camel.component.olingo4.internal.Olingo4Constants;
import org.apache.camel.component.olingo4.internal.Olingo4PropertiesHelper;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.support.component.AbstractApiEndpoint;
import org.apache.camel.support.component.ApiMethod;
import org.apache.camel.support.component.ApiMethodPropertiesHelper;
/**
* Communicate with OData 4.0 services using Apache Olingo OData API.
*/
@UriEndpoint(firstVersion = "2.19.0", scheme = "olingo4", title = "Olingo4", syntax = "olingo4:apiName/methodName", category = {Category.CLOUD})
public class Olingo4Endpoint extends AbstractApiEndpoint<Olingo4ApiName, Olingo4Configuration> {
protected static final String RESOURCE_PATH_PROPERTY = "resourcePath";
protected static final String RESPONSE_HANDLER_PROPERTY = "responseHandler";
protected static final String SERVICE_URI_PROPERTY = "serviceUri";
protected static final String FILTER_ALREADY_SEEN = "filterAlreadySeen";
private static final String KEY_PREDICATE_PROPERTY = "keyPredicate";
private static final String QUERY_PARAMS_PROPERTY = "queryParams";
private static final String ENDPOINT_HTTP_HEADERS_PROPERTY = "endpointHttpHeaders";
private static final String READ_METHOD = "read";
private static final String EDM_PROPERTY = "edm";
private static final String DATA_PROPERTY = "data";
private static final String DELETE_METHOD = "delete";
// unparsed variants
private static final String UREAD_METHOD = "uread";
private final Set<String> endpointPropertyNames;
@UriParam
private Olingo4Configuration configuration;
private Olingo4AppWrapper apiProxy;
public Olingo4Endpoint(String uri, Olingo4Component component, Olingo4ApiName apiName, String methodName, Olingo4Configuration endpointConfiguration) {
super(uri, component, apiName, methodName, Olingo4ApiCollection.getCollection().getHelper(apiName), endpointConfiguration);
this.configuration = endpointConfiguration;
// get all endpoint property names
endpointPropertyNames = new HashSet<>(getPropertiesHelper().getValidEndpointProperties(configuration));
// avoid adding edm as queryParam
endpointPropertyNames.add(EDM_PROPERTY);
endpointPropertyNames.add(ENDPOINT_HTTP_HEADERS_PROPERTY);
endpointPropertyNames.add(SERVICE_URI_PROPERTY);
endpointPropertyNames.add(FILTER_ALREADY_SEEN);
}
@Override
public Producer createProducer() throws Exception {
return new Olingo4Producer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
// make sure inBody is not set for consumers
if (inBody != null) {
throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint");
}
// only read method is supported
if (!READ_METHOD.equals(methodName) && !UREAD_METHOD.equals(methodName)) {
throw new IllegalArgumentException("Only read method is supported for consumer endpoints");
}
final Olingo4Consumer consumer = new Olingo4Consumer(this, processor);
consumer.setSplitResult(configuration.isSplitResult());
configureConsumer(consumer);
return consumer;
}
@Override
protected ApiMethodPropertiesHelper<Olingo4Configuration> getPropertiesHelper() {
return Olingo4PropertiesHelper.getHelper();
}
@Override
protected String getThreadProfileName() {
return Olingo4Constants.THREAD_PROFILE_NAME;
}
@Override
public void configureProperties(Map<String, Object> options) {
super.configureProperties(options);
// handle individual query params
parseQueryParams(options);
}
@Override
protected void afterConfigureProperties() {
// set default inBody
if (!(READ_METHOD.equals(methodName) || DELETE_METHOD.equals(methodName) || UREAD_METHOD.equals(methodName)) && inBody == null) {
inBody = DATA_PROPERTY;
}
createProxy();
}
@Override
public synchronized Object getApiProxy(ApiMethod method, Map<String, Object> args) {
return apiProxy.getOlingo4App();
}
@Override
public Olingo4Component getComponent() {
return (Olingo4Component)super.getComponent();
}
@Override
protected void doStart() throws Exception {
if (apiProxy == null) {
createProxy();
}
}
@Override
protected void doStop() throws Exception {
if (apiProxy != null) {
// close the apiProxy
getComponent().closeApiProxy(apiProxy);
apiProxy = null;
}
}
@Override
public void interceptPropertyNames(Set<String> propertyNames) {
// add edm, and responseHandler property names
// edm is computed on first call to getApiProxy(), and responseHandler
// is provided by consumer and producer
if (!DELETE_METHOD.equals(methodName)) {
propertyNames.add(EDM_PROPERTY);
}
propertyNames.add(RESPONSE_HANDLER_PROPERTY);
}
@Override
public void interceptProperties(Map<String, Object> properties) {
Map<String, String> endpointHttpHeaders = (Map<String, String>)properties.get(ENDPOINT_HTTP_HEADERS_PROPERTY);
// read Edm if not set yet
properties.put(EDM_PROPERTY, apiProxy.getEdm(endpointHttpHeaders));
// handle filterAlreadySeen property
properties.put(FILTER_ALREADY_SEEN, configuration.isFilterAlreadySeen());
// handle keyPredicate
final String keyPredicate = (String)properties.get(KEY_PREDICATE_PROPERTY);
if (keyPredicate != null) {
// make sure a resource path is provided
final String resourcePath = (String)properties.get(RESOURCE_PATH_PROPERTY);
if (resourcePath == null) {
throw new IllegalArgumentException("Resource path must be provided in endpoint URI, or URI parameter '" + RESOURCE_PATH_PROPERTY + "', or exchange header '"
+ Olingo4Constants.PROPERTY_PREFIX + RESOURCE_PATH_PROPERTY + "'");
}
// append keyPredicate to dynamically create resource path
properties.put(RESOURCE_PATH_PROPERTY, resourcePath + '(' + keyPredicate + ')');
}
// handle individual queryParams
parseQueryParams(properties);
}
private void createProxy() {
apiProxy = getComponent().createApiProxy(getConfiguration());
}
private void parseQueryParams(Map<String, Object> options) {
// extract non-endpoint properties as query params
final Map<String, String> queryParams = new HashMap<>();
for (Iterator<Map.Entry<String, Object>> it = options.entrySet().iterator(); it.hasNext();) {
final Map.Entry<String, Object> entry = it.next();
final String paramName = entry.getKey();
// Avoid swallowing consumer scheduler properties, which get processed in configureProperties()
if (paramName.startsWith("consumer.")) {
continue;
}
if (!endpointPropertyNames.contains(paramName)) {
// add to query params
final Object value = entry.getValue();
if (value == null) {
throw new IllegalArgumentException("Null value for query parameter " + paramName);
}
queryParams.put(paramName, value.toString());
// remove entry from supplied options
it.remove();
}
}
if (!queryParams.isEmpty()) {
@SuppressWarnings("unchecked")
final Map<String, String> oldParams = (Map<String, String>)options.get(QUERY_PARAMS_PROPERTY);
if (oldParams == null) {
// set queryParams property
options.put(QUERY_PARAMS_PROPERTY, queryParams);
} else {
// overwrite old params in supplied map
oldParams.putAll(queryParams);
}
}
}
}
| |
package com.tikal.jenkins.plugins.multijob.views;
import hudson.model.BallColor;
import hudson.model.HealthReport;
import hudson.model.Item;
import hudson.model.ItemGroup;
import hudson.model.Result;
import hudson.model.TopLevelItemDescriptor;
import hudson.model.AbstractProject;
import hudson.model.Hudson;
import hudson.model.Job;
import hudson.model.Run;
import hudson.search.SearchIndex;
import hudson.search.Search;
import hudson.security.Permission;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import org.acegisecurity.AccessDeniedException;
import com.tikal.jenkins.plugins.multijob.MultiJobProject;
@SuppressWarnings("rawtypes")
public class ProjectWrapper extends AbstractWrapper {
final MultiJobProject multijob;
final BuildState buildState;
final Run build;
public ProjectWrapper(MultiJobProject multijob, Job project,
BuildState buildState, int nestLevel, Run build) {
super(project, nestLevel);
this.multijob = multijob;
this.buildState = buildState;
this.build = build;
}
@SuppressWarnings("unchecked")
public Collection<? extends Job> getAllJobs() {
return project.getAllJobs();
}
public String getName() {
return project.getName();
}
public String getFullName() {
return project.getFullName();
}
public String getDisplayName() {
String displayName = buildState.getJobName();
if (buildState.getJobAlias() != null)
{
if (!buildState.getJobAlias().equals("")) {
displayName += " (" + buildState.getJobAlias() + ")";
}
}
return displayName;
}
public String getFullDisplayName() {
return project.getFullDisplayName();
}
public String getUrl() {
return project.getUrl();
}
public int getBuildNumber() {
return this.build.getNumber();
}
public String getBuildDuration() { return this.build.getDurationString(); }
public String getShortUrl() {
return project.getShortUrl();
}
@SuppressWarnings("unchecked")
public void onLoad(ItemGroup<? extends Item> parent, String name)
throws IOException {
project.onLoad(parent, name);
}
public void onCopiedFrom(Item src) {
project.onCopiedFrom(src);
}
public void onCreatedFromScratch() {
project.onCreatedFromScratch();
}
public void save() throws IOException {
project.save();
}
public void delete() throws IOException, InterruptedException {
project.delete();
}
public File getRootDir() {
return project.getRootDir();
}
public Search getSearch() {
return project.getSearch();
}
public String getSearchName() {
return project.getSearchName();
}
public String getSearchUrl() {
return project.getSearchUrl();
}
public SearchIndex getSearchIndex() {
return project.getSearchIndex();
}
public void checkPermission(Permission permission)
throws AccessDeniedException {
project.checkPermission(permission);
}
public boolean hasPermission(Permission permission) {
return project.hasPermission(permission);
}
public Hudson getParent() {
return Hudson.getInstance();
}
public TopLevelItemDescriptor getDescriptor() {
return (TopLevelItemDescriptor) project.getDescriptorByName(project
.getClass().getName());
}
Run findLastBuildForResult(Result result) {
if (buildState == null) {
return null;
}
if (build == null) {
return null;
}
if (Result.SUCCESS.equals(result)) {
return project.getBuildByNumber(buildState
.getLastSuccessBuildNumber());
}
if (Result.FAILURE.equals(result)) {
return project.getBuildByNumber(buildState
.getLastFailureBuildNumber());
}
return this.build;
}
public Run getLastFailedBuild() {
return findLastBuildForResult(Result.FAILURE);
}
public Run getLastSuccessfulBuild() {
return findLastBuildForResult(Result.SUCCESS);
}
public Run getLastBuild() {
return findLastBuildForResult(null);
}
public Job getProject() {
return project;
}
public BallColor getIconColor() {
/*if (project instanceof AbstractProject && ((AbstractProject) project).isDisabled())
return BallColor.DISABLED;
Run lastBuild = getLastBuild();
while (lastBuild != null && lastBuild.hasntStartedYet())
lastBuild = lastBuild.getPreviousBuild();
if (lastBuild != null)
return lastBuild.getIconColor();
else
return BallColor.GREY;*/
if( build != null )
{
return this.build.getIconColor();
}
return BallColor.GREY;
}
public String getCss() {
StringBuilder builder = new StringBuilder();
if (project instanceof MultiJobProject) {
builder.append("font-weight:bold;");
}
builder.append("padding-left:");
builder.append(String.valueOf((nestLevel + 1) * 20));
builder.append("px");
return builder.toString();
}
public HealthReport getBuildHealth() {
return project.getBuildHealth();
}
@SuppressWarnings("unchecked")
public List<HealthReport> getBuildHealthReports() {
return project.getBuildHealthReports();
}
public boolean isBuildable() {
return multijob == null && project.isBuildable();
}
public String getRelativeNameFrom(ItemGroup g) {
// TODO Auto-generated method stub
return null;
}
public String getRelativeNameFrom(Item item) {
// TODO Auto-generated method stub
return null;
}
}
| |
/*
* Copyright 2009-2011 Collaborative Research Centre SFB 632
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.hu_berlin.german.korpling.annis.kickstarter;
import java.awt.Dialog;
/**
*
* @author thomas
*/
public class ExceptionDialog extends javax.swing.JDialog
{
/** Creates new form ExceptionDialog */
public ExceptionDialog(Exception exception)
{
super((Dialog) null, true);
init(exception, null);
}
/** Creates new form ExceptionDialog */
public ExceptionDialog(Exception exception, String caption)
{
super((Dialog) null, true);
init(exception, caption);
}
/** Creates new form ExceptionDialog */
public ExceptionDialog(java.awt.Dialog parent, Exception exception)
{
super(parent, true);
init(exception, null);
}
/** Creates new form ExceptionDialog */
public ExceptionDialog(java.awt.Frame parent, Exception exception)
{
super(parent, true);
init(exception, null);
}
private void init(Exception exception, String caption)
{
initComponents();
if(caption != null)
{
lblCaption.setText(caption + ":");
}
if(exception != null)
{
lblType.setText(exception.getClass().getName());
txtMessage.setText(exception.getLocalizedMessage());
txtMessage.setCaretPosition(0);
StringBuilder details = new StringBuilder();
details.append(exception.getLocalizedMessage());
details.append("\nat\n");
StackTraceElement[] st = exception.getStackTrace();
for(int i=0; i < st.length; i++)
{
details.append(st[i].toString());
details.append("\n");
}
txtDetails.setText(details.toString());
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
lblCaption = new javax.swing.JLabel();
btClose = new javax.swing.JButton();
spDetails = new javax.swing.JScrollPane();
txtDetails = new javax.swing.JTextArea();
btDetails = new javax.swing.JToggleButton();
spMessage = new javax.swing.JScrollPane();
txtMessage = new javax.swing.JTextArea();
lblTypeCaption = new javax.swing.JLabel();
lblType = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Exception thrown");
setLocationByPlatform(true);
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowOpened(java.awt.event.WindowEvent evt) {
formWindowOpened(evt);
}
});
lblCaption.setFont(new java.awt.Font("DejaVu Sans", 1, 13)); // NOI18N
lblCaption.setText("Exception thrown:");
btClose.setMnemonic('C');
btClose.setText("Close");
btClose.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btCloseActionPerformed(evt);
}
});
txtDetails.setColumns(20);
txtDetails.setRows(5);
txtDetails.setText("<no details>");
spDetails.setViewportView(txtDetails);
btDetails.setMnemonic('D');
btDetails.setText("Details");
btDetails.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btDetailsActionPerformed(evt);
}
});
txtMessage.setColumns(20);
txtMessage.setEditable(false);
txtMessage.setLineWrap(true);
txtMessage.setRows(5);
txtMessage.setText("<no message>");
txtMessage.setWrapStyleWord(true);
spMessage.setViewportView(txtMessage);
lblTypeCaption.setText("Type:");
lblType.setText("<unknown>");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(24, 24, 24)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(spDetails, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 538, Short.MAX_VALUE)
.addComponent(lblCaption, javax.swing.GroupLayout.DEFAULT_SIZE, 484, Short.MAX_VALUE)
.addComponent(spMessage, javax.swing.GroupLayout.DEFAULT_SIZE, 538, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(btDetails)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btClose))
.addGroup(layout.createSequentialGroup()
.addComponent(lblTypeCaption)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(lblType, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(lblCaption)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(spMessage)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(lblTypeCaption)
.addComponent(lblType))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(spDetails, javax.swing.GroupLayout.PREFERRED_SIZE, 146, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btDetails)
.addComponent(btClose))
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void btDetailsActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btDetailsActionPerformed
{//GEN-HEADEREND:event_btDetailsActionPerformed
spDetails.setVisible(btDetails.isSelected());
pack();
validate();
}//GEN-LAST:event_btDetailsActionPerformed
private void btCloseActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_btCloseActionPerformed
{//GEN-HEADEREND:event_btCloseActionPerformed
this.setVisible(false);
this.dispose();
}//GEN-LAST:event_btCloseActionPerformed
private void formWindowOpened(java.awt.event.WindowEvent evt)//GEN-FIRST:event_formWindowOpened
{//GEN-HEADEREND:event_formWindowOpened
spDetails.setVisible(false);
pack();
validate();
}//GEN-LAST:event_formWindowOpened
/**
* @param args the command line arguments
*/
public static void main(String args[])
{
java.awt.EventQueue.invokeLater(new Runnable()
{
@Override
public void run()
{
ExceptionDialog dialog = new ExceptionDialog(new javax.swing.JFrame(), null);
dialog.addWindowListener(new java.awt.event.WindowAdapter()
{
@Override
public void windowClosing(java.awt.event.WindowEvent e)
{
System.exit(0);
}
});
dialog.setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btClose;
private javax.swing.JToggleButton btDetails;
private javax.swing.JLabel lblCaption;
private javax.swing.JLabel lblType;
private javax.swing.JLabel lblTypeCaption;
private javax.swing.JScrollPane spDetails;
private javax.swing.JScrollPane spMessage;
private javax.swing.JTextArea txtDetails;
private javax.swing.JTextArea txtMessage;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.util;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.memory.MemoryManagerBuilder;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.binary.BinaryRowData;
import org.apache.flink.table.data.writer.BinaryRowWriter;
import org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import static org.apache.flink.runtime.memory.MemoryManager.DEFAULT_PAGE_SIZE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test for {@link ResettableExternalBuffer}.
*/
public class ResettableExternalBufferTest {
private static final int MEMORY_SIZE = 1024 * DEFAULT_PAGE_SIZE;
private MemoryManager memManager;
private IOManager ioManager;
private Random random;
private BinaryRowDataSerializer serializer;
private BinaryRowDataSerializer multiColumnFixedLengthSerializer;
private BinaryRowDataSerializer multiColumnVariableLengthSerializer;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void before() {
this.memManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build();
this.ioManager = new IOManagerAsync();
this.random = new Random();
this.serializer = new BinaryRowDataSerializer(1);
this.multiColumnFixedLengthSerializer = new BinaryRowDataSerializer(3);
this.multiColumnVariableLengthSerializer = new BinaryRowDataSerializer(5);
}
private ResettableExternalBuffer newBuffer(long memorySize) {
return newBuffer(memorySize, this.serializer, true);
}
private ResettableExternalBuffer newBuffer(long memorySize,
BinaryRowDataSerializer serializer, boolean isRowAllInFixedPart) {
return new ResettableExternalBuffer(
ioManager,
new LazyMemorySegmentPool(this, memManager, (int) (memorySize / memManager.getPageSize())),
serializer, isRowAllInFixedPart);
}
@Test
public void testLess() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 100;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertEquals(buffer.getSpillChannels().size(), 0);
// repeat read
assertBuffer(expected, buffer);
buffer.newIterator();
assertBuffer(expected, buffer);
buffer.close();
}
@Test
public void testSpill() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000; // 16 * 5000
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// repeat read
assertBuffer(expected, buffer);
buffer.newIterator();
assertBuffer(expected, buffer);
buffer.close();
}
@Test
public void testBufferReset() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
// less
insertMulti(buffer, 10);
buffer.reset();
assertEquals(buffer.size(), 0);
// not spill
List<Long> expected = insertMulti(buffer, 100);
assertEquals(buffer.size(), 100);
assertBuffer(expected, buffer);
buffer.reset();
// spill
expected = insertMulti(buffer, 2500);
assertEquals(buffer.size(), 2500);
assertBuffer(expected, buffer);
buffer.close();
}
@Test
public void testBufferResetWithSpill() throws Exception {
int inMemoryThreshold = 20;
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
// spill
List<Long> expected = insertMulti(buffer, 2500);
assertEquals(buffer.size(), 2500);
assertBuffer(expected, buffer);
buffer.reset();
// spill, but not read the values
insertMulti(buffer, 2500);
buffer.newIterator();
assertEquals(buffer.size(), 2500);
buffer.reset();
// not spill
expected = insertMulti(buffer, inMemoryThreshold / 2);
assertBuffer(expected, buffer);
buffer.reset();
assertEquals(buffer.size(), 0);
// less
expected = insertMulti(buffer, 100);
assertEquals(buffer.size(), 100);
assertBuffer(expected, buffer);
buffer.reset();
buffer.close();
}
@Test
public void testHugeRecord() throws Exception {
thrown.expect(IOException.class);
try (ResettableExternalBuffer buffer = new ResettableExternalBuffer(
ioManager,
new LazyMemorySegmentPool(this, memManager, 3 * DEFAULT_PAGE_SIZE / memManager.getPageSize()),
new BinaryRowDataSerializer(1),
false)) {
writeHuge(buffer, 10);
writeHuge(buffer, 50000);
}
}
@Test
public void testRandomAccessLess() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 100;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertEquals(buffer.getSpillChannels().size(), 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
assertRandomAccess(expected, buffer, beginPos.get(i));
}
buffer.close();
}
@Test
public void testRandomAccessSpill() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
assertRandomAccess(expected, buffer, beginPos.get(i));
}
buffer.close();
}
@Test
public void testBufferResetWithSpillAndRandomAccess() throws Exception {
final int tries = 100;
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
// spill, random access and reset twice
List<Long> expected;
for (int i = 0; i < 2; i++) {
expected = insertMulti(buffer, 2500);
assertEquals(buffer.size(), 2500);
for (int j = 0; j < tries; j++) {
assertRandomAccess(expected, buffer);
}
buffer.reset();
}
// spill, but not read the values
insertMulti(buffer, 2500);
buffer.newIterator();
assertEquals(buffer.size(), 2500);
buffer.reset();
// not spill
expected = insertMulti(buffer, 10);
for (int i = 0; i < tries; i++) {
assertRandomAccess(expected, buffer);
}
buffer.reset();
assertEquals(buffer.size(), 0);
// less
expected = insertMulti(buffer, 100);
assertEquals(buffer.size(), 100);
for (int i = 0; i < tries; i++) {
assertRandomAccess(expected, buffer);
}
buffer.reset();
buffer.close();
}
@Test
public void testMultiColumnFixedLengthRandomAccessLess() throws Exception {
testMultiColumnRandomAccessLess(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test
public void testMultiColumnFixedLengthRandomAccessSpill() throws Exception {
testMultiColumnRandomAccessSpill(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test
public void testBufferResetWithSpillAndMultiColumnFixedLengthRandomAccess() throws Exception {
testBufferResetWithSpillAndMultiColumnRandomAccess(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test
public void testMultiColumnVariableLengthRandomAccessLess() throws Exception {
testMultiColumnRandomAccessLess(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
@Test
public void testMultiColumnVariableLengthRandomAccessSpill() throws Exception {
testMultiColumnRandomAccessSpill(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
@Test
public void testBufferResetWithSpillAndMultiColumnVariableLengthRandomAccess() throws Exception {
testBufferResetWithSpillAndMultiColumnRandomAccess(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
@Test
public void testIteratorOnFixedLengthEmptyBuffer() throws Exception {
testIteratorOnMultiColumnEmptyBuffer(multiColumnFixedLengthSerializer, true);
}
@Test
public void testFixedLengthRandomAccessOutOfRange() throws Exception {
testRandomAccessOutOfRange(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test
public void testIteratorOnVariableLengthEmptyBuffer() throws Exception {
testIteratorOnMultiColumnEmptyBuffer(multiColumnVariableLengthSerializer, false);
}
@Test
public void testVariableLengthRandomAccessOutOfRange() throws Exception {
testRandomAccessOutOfRange(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
@Test
public void testIteratorReset() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 100;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertEquals(buffer.getSpillChannels().size(), 0);
// reset and read
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator();
assertBuffer(expected, iterator);
iterator.reset();
assertBuffer(expected, iterator);
iterator.close();
buffer.close();
}
@Test
public void testIteratorResetWithSpill() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000; // 16 * 5000
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// reset and read
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator();
assertBuffer(expected, iterator);
iterator.reset();
assertBuffer(expected, iterator);
iterator.close();
buffer.close();
}
@Test
public void testIteratorResetWithRandomAccess() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 100;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertEquals(buffer.getSpillChannels().size(), 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
int begin = beginPos.get(i);
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin);
assertRandomAccess(expected, iterator, begin);
iterator.reset();
assertRandomAccess(expected, iterator, begin);
iterator.close();
}
buffer.close();
}
@Test
public void testIteratorResetWithRandomAccessSpill() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
int begin = beginPos.get(i);
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin);
assertRandomAccess(expected, iterator, begin);
iterator.reset();
assertRandomAccess(expected, iterator, begin);
iterator.close();
}
buffer.close();
}
@Test
public void testMultipleIteratorsLess() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 100;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertEquals(buffer.getSpillChannels().size(), 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
int beginIdx = beginPos.get(i);
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(beginIdx);
assertRandomAccess(expected, iterator, beginIdx);
if (i % 3 == 0) {
iterator.close();
}
}
buffer.close();
}
@Test
public void testMultipleIteratorsSpill() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000;
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
int beginIdx = beginPos.get(i);
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(beginIdx);
assertRandomAccess(expected, iterator, beginIdx);
if (i % 3 == 0) {
iterator.close();
}
}
buffer.close();
}
@Test
public void testMultipleIteratorsWithIteratorReset() throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2);
int number = 5000; // 16 * 5000
List<Long> expected = insertMulti(buffer, number);
assertEquals(buffer.size(), number);
assertBuffer(expected, buffer);
assertTrue(buffer.getSpillChannels().size() > 0);
// reset and read
ResettableExternalBuffer.BufferIterator iterator1 = buffer.newIterator();
assertBuffer(expected, iterator1);
iterator1.reset();
assertBuffer(expected, iterator1);
ResettableExternalBuffer.BufferIterator iterator2 = buffer.newIterator();
assertBuffer(expected, iterator2);
iterator2.reset();
assertBuffer(expected, iterator2);
iterator1.reset();
assertBuffer(expected, iterator1);
iterator2.reset();
assertBuffer(expected, iterator2);
iterator1.close();
iterator2.reset();
assertBuffer(expected, iterator2);
iterator2.close();
buffer.close();
}
@Test(expected = IllegalStateException.class)
public void testUpdateIteratorFixedLengthLess() throws Exception {
testUpdateIteratorLess(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test(expected = IllegalStateException.class)
public void testUpdateIteratorFixedLengthSpill() throws Exception {
testUpdateIteratorSpill(multiColumnFixedLengthSerializer, FixedLengthRowData.class, true);
}
@Test(expected = IllegalStateException.class)
public void testUpdateIteratorVariableLengthLess() throws Exception {
testUpdateIteratorLess(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
@Test(expected = IllegalStateException.class)
public void testUpdateIteratorVariableLengthSpill() throws Exception {
testUpdateIteratorSpill(multiColumnVariableLengthSerializer, VariableLengthRowData.class, false);
}
private <T extends RowData> void testMultiColumnRandomAccessLess(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
int number = 30;
List<RowData> expected = insertMultiColumn(buffer, number, clazz);
assertEquals(buffer.size(), number);
assertEquals(buffer.getSpillChannels().size(), 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
assertMultiColumnRandomAccess(expected, buffer, beginPos.get(i));
}
buffer.close();
}
private <T extends RowData> void testMultiColumnRandomAccessSpill(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
int number = 4000;
List<RowData> expected = insertMultiColumn(buffer, number, clazz);
assertEquals(buffer.size(), number);
assertTrue(buffer.getSpillChannels().size() > 0);
// repeat random access
List<Integer> beginPos = new ArrayList<>();
for (int i = 0; i < buffer.size(); i++) {
beginPos.add(i);
}
Collections.shuffle(beginPos);
for (int i = 0; i < buffer.size(); i++) {
assertMultiColumnRandomAccess(expected, buffer, beginPos.get(i));
}
buffer.close();
}
private <T extends RowData> void testBufferResetWithSpillAndMultiColumnRandomAccess(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
final int tries = 100;
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
// spill, random access and reset twice
List<RowData> expected;
for (int i = 0; i < 2; i++) {
expected = insertMultiColumn(buffer, 1500, clazz);
assertEquals(buffer.size(), 1500);
for (int j = 0; j < tries; j++) {
assertMultiColumnRandomAccess(expected, buffer);
}
buffer.reset();
}
// spill, but not read the values
insertMultiColumn(buffer, 1500, clazz);
buffer.newIterator();
assertEquals(buffer.size(), 1500);
buffer.reset();
// not spill
expected = insertMultiColumn(buffer, 10, clazz);
for (int i = 0; i < tries; i++) {
assertMultiColumnRandomAccess(expected, buffer);
}
buffer.reset();
assertEquals(buffer.size(), 0);
// less
expected = insertMultiColumn(buffer, 30, clazz);
assertEquals(buffer.size(), 30);
for (int i = 0; i < tries; i++) {
assertMultiColumnRandomAccess(expected, buffer);
}
buffer.reset();
buffer.close();
}
private void testIteratorOnMultiColumnEmptyBuffer(
BinaryRowDataSerializer serializer, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
ResettableExternalBuffer.BufferIterator iterator;
buffer.complete();
iterator = buffer.newIterator(0);
assertFalse(iterator.advanceNext());
iterator = buffer.newIterator(random.nextInt(Integer.MAX_VALUE));
assertFalse(iterator.advanceNext());
buffer.close();
}
private <T extends RowData> void testRandomAccessOutOfRange(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
int number = 100;
List<RowData> expected = insertMultiColumn(buffer, number, clazz);
assertEquals(buffer.size(), number);
assertMultiColumnRandomAccess(expected, buffer, 0);
ResettableExternalBuffer.BufferIterator iterator;
iterator = buffer.newIterator(number);
assertFalse(iterator.advanceNext());
iterator = buffer.newIterator(number + random.nextInt(Integer.MAX_VALUE));
assertFalse(iterator.advanceNext());
iterator = buffer.newIterator(random.nextInt(number));
assertTrue(iterator.advanceNext());
buffer.close();
}
private <T extends RowData> void testUpdateIteratorLess(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
int number = 20;
int iters = 3;
List<RowData> expected = new ArrayList<>();
List<ResettableExternalBuffer.BufferIterator> iterators = new ArrayList<>();
for (int i = 0; i < iters; i++) {
iterators.add(buffer.newIterator());
}
for (int i = 0; i < number; i++) {
RowData data = clazz.newInstance();
data.insertIntoBuffer(buffer);
expected.add(data);
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
assertTrue(iterator.advanceNext());
BinaryRowData row = iterator.getRow();
data.checkSame(row);
assertFalse(iterator.advanceNext());
}
}
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
iterator.reset();
}
for (int i = 0; i < number; i++) {
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
assertTrue(iterator.advanceNext());
BinaryRowData row = iterator.getRow();
expected.get(i).checkSame(row);
}
}
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
iterator.close();
}
assertMultiColumnRandomAccess(expected, buffer);
buffer.close();
}
private <T extends RowData> void testUpdateIteratorSpill(
BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception {
ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart);
int number = 100;
int step = 20;
int iters = 3;
List<RowData> expected = new ArrayList<>();
List<RowData> smallExpected = new ArrayList<>();
List<ResettableExternalBuffer.BufferIterator> iterators = new ArrayList<>();
for (int i = 0; i < iters; i++) {
iterators.add(buffer.newIterator());
}
for (int i = 0; i < number; i++) {
smallExpected.clear();
for (int j = 0; j < step; j++) {
RowData data = clazz.newInstance();
data.insertIntoBuffer(buffer);
expected.add(data);
smallExpected.add(data);
}
for (int j = 0; j < step; j++) {
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
assertTrue(iterator.advanceNext());
BinaryRowData row = iterator.getRow();
smallExpected.get(j).checkSame(row);
}
}
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
assertFalse(iterator.advanceNext());
}
}
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
iterator.reset();
}
for (int i = 0; i < number * step; i++) {
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
assertTrue(iterator.advanceNext());
BinaryRowData row = iterator.getRow();
expected.get(i).checkSame(row);
}
}
for (ResettableExternalBuffer.BufferIterator iterator : iterators) {
iterator.close();
}
assertMultiColumnRandomAccess(expected, buffer);
buffer.close();
}
private void writeHuge(ResettableExternalBuffer buffer, int size) throws IOException {
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.reset();
writer.writeString(0, StringData.fromString(RandomStringUtils.random(size)));
writer.complete();
buffer.add(row);
}
private void assertBuffer(List<Long> expected, ResettableExternalBuffer buffer) {
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator();
assertBuffer(expected, iterator);
iterator.close();
}
private void assertBuffer(
List<Long> expected, ResettableExternalBuffer.BufferIterator iterator
) {
List<Long> values = new ArrayList<>();
while (iterator.advanceNext()) {
values.add(iterator.getRow().getLong(0));
}
assertEquals(expected, values);
}
private List<Long> insertMulti(ResettableExternalBuffer buffer, int cnt) throws IOException {
ArrayList<Long> expected = new ArrayList<>(cnt);
insertMulti(buffer, cnt, expected);
buffer.complete();
return expected;
}
private void insertMulti(ResettableExternalBuffer buffer, int cnt,
List<Long> expected) throws IOException {
for (int i = 0; i < cnt; i++) {
expected.add(randomInsert(buffer));
}
}
private long randomInsert(ResettableExternalBuffer buffer) throws IOException {
long l = random.nextLong();
BinaryRowData row = new BinaryRowData(1);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.reset();
writer.writeLong(0, l);
writer.complete();
buffer.add(row);
return l;
}
private void assertRandomAccess(List<Long> expected, ResettableExternalBuffer buffer) {
int begin = random.nextInt(buffer.size());
assertRandomAccess(expected, buffer, begin);
}
private void assertRandomAccess(List<Long> expected, ResettableExternalBuffer buffer,
int begin) {
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin);
assertRandomAccess(expected, iterator, begin);
iterator.close();
}
private void assertRandomAccess(List<Long> expected,
ResettableExternalBuffer.BufferIterator iterator, int begin) {
List<Long> values = new ArrayList<>();
while (iterator.advanceNext()) {
values.add(iterator.getRow().getLong(0));
}
assertEquals(expected.subList(begin, expected.size()), values);
}
private <T extends RowData> List<RowData> insertMultiColumn(
ResettableExternalBuffer buffer, int cnt, Class<T> clazz)
throws IOException, IllegalAccessException, InstantiationException {
ArrayList<RowData> expected = new ArrayList<>(cnt);
insertMultiColumn(buffer, cnt, expected, clazz);
buffer.complete();
return expected;
}
private <T extends RowData> void insertMultiColumn(
ResettableExternalBuffer buffer, int cnt, List<RowData> expected, Class<T> clazz)
throws IOException, IllegalAccessException, InstantiationException {
for (int i = 0; i < cnt; i++) {
RowData data = clazz.newInstance();
data.insertIntoBuffer(buffer);
expected.add(data);
}
buffer.complete();
}
private void assertMultiColumnRandomAccess(List<RowData> expected,
ResettableExternalBuffer buffer) {
int begin = random.nextInt(buffer.size());
assertMultiColumnRandomAccess(expected, buffer, begin);
}
private void assertMultiColumnRandomAccess(List<RowData> expected,
ResettableExternalBuffer buffer, int begin) {
ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin);
for (int i = begin; i < buffer.size(); i++) {
assertTrue(iterator.advanceNext());
expected.get(i).checkSame(iterator.getRow());
}
}
private interface RowData {
void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException;
void checkSame(BinaryRowData row);
}
private static class FixedLengthRowData implements RowData {
private boolean col0;
private long col1;
private int col2;
private Random random;
FixedLengthRowData() {
random = new Random();
col0 = random.nextBoolean();
col1 = random.nextLong();
col2 = random.nextInt();
}
@Override
public void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException {
BinaryRowData row = new BinaryRowData(3);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.reset();
writer.writeBoolean(0, col0);
writer.writeLong(1, col1);
writer.writeInt(2, col2);
writer.complete();
buffer.add(row);
}
@Override
public void checkSame(BinaryRowData row) {
assertEquals(col0, row.getBoolean(0));
assertEquals(col1, row.getLong(1));
assertEquals(col2, row.getInt(2));
}
}
private static class VariableLengthRowData implements RowData {
private boolean col0;
private long col1;
private StringData col2;
private int col3;
private StringData col4;
private Random random;
public VariableLengthRowData() {
random = new Random();
col0 = random.nextBoolean();
col1 = random.nextLong();
col2 = StringData.fromString(RandomStringUtils.random(random.nextInt(50) + 1));
col3 = random.nextInt();
col4 = StringData.fromString(RandomStringUtils.random(random.nextInt(50) + 1));
}
@Override
public void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException {
BinaryRowData row = new BinaryRowData(5);
BinaryRowWriter writer = new BinaryRowWriter(row);
writer.reset();
writer.writeBoolean(0, col0);
writer.writeLong(1, col1);
writer.writeString(2, col2);
writer.writeInt(3, col3);
writer.writeString(4, col4);
writer.complete();
buffer.add(row);
}
@Override
public void checkSame(BinaryRowData row) {
assertEquals(col0, row.getBoolean(0));
assertEquals(col1, row.getLong(1));
assertEquals(col2, row.getString(2));
assertEquals(col3, row.getInt(3));
assertEquals(col4, row.getString(4));
}
}
}
| |
package inprotk.carchase2;
import inpro.apps.SimpleMonitor;
import inpro.audio.DispatchStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
import processing.data.StringDict;
import inprotk.carchase2.Articulator;
import inprotk.carchase2.CarChase;
import inprotk.carchase2.Articulator.Articulatable;
import inprotk.carchase2.Configuration.CarState;
import inprotk.carchase2.IncrementalArticulator;
import inprotk.carchase2.World.Street;
import inprotk.carchase2.World.WorldPoint;
public class CarChaseTTS {
private ArrayList<Pattern> patterns;
private HashMap<String, StreetReplacement> streetNames;
private Articulator articulator;
private DispatchStream dispatcher;
private DispatcherThread dispatchThread;
private String flexForm1, flexForm2, left, right;
public CarChaseTTS(String patternsFilename) {
try {
dispatcher = SimpleMonitor.setupDispatcher();
if (CarChase.getSuperConfig("baseline").equals("true"))
articulator = new StandardArticulator(dispatcher);
else
articulator = new IncrementalArticulator(dispatcher);
parsePatterns(patternsFilename);
dispatchThread = new DispatcherThread();
dispatchThread.start();
} catch (Exception e) {
e.printStackTrace();
}
}
public MyCurrentHypothesisViewer getHypothesisViewer() {
return articulator.getHypothesisViewer();
}
private void parsePatterns(String filename) throws Exception {
patterns = new ArrayList<Pattern>();
streetNames = new HashMap<String, StreetReplacement>();
Pattern currentPattern = null;
String[] lines = CarChase.readLines(filename);
int index = 0;
for (String line : lines) {
if (index++ == 0) continue;
line = line.trim();
if (line.startsWith("#")) continue;
if (line.startsWith("--msg")) {
String[] args = line.substring(6).split("=");
String[] meta = args[0].split("#");
MessageInformationLevel type = MessageInformationLevel.valueOf(meta[0]);
MessageType typeStart = MessageType.valueOf(meta[1]);
MessageType typeEnd = MessageType.valueOf(meta[2]);
String key = meta[3];
currentPattern.addTemplate(key, args[1], typeStart, typeEnd, type);
}
else if (line.startsWith("--cond")) {
currentPattern.addCondition(line.substring(7));
}
else if (line.startsWith("++")) {
patterns.add(currentPattern);
currentPattern = null;
}
else if (line.startsWith("pt")) {
String[] args = line.substring(3).split(",");
for (int i = 0; i < args.length; i++)
while (args[i].startsWith(" ")) args[i] = args[i].substring(1);
boolean optional = args.length > 1 && args[1].equals("y");
currentPattern = new Pattern(optional);
}
else if (line.startsWith("street")) {
String[] args = line.substring(7).split(",");
for (int i = 0; i < args.length; i++)
while (args[i].startsWith(" ")) args[i] = args[i].substring(1);
String key = args[0];
String name = args[1];
if (name.equals("%")) name = key;
String flex1 = args[2];
String flex2 = args[3];
flex1 = flex1.replace("%", key);
flex2 = flex2.replace("%", key);
streetNames.put(key, new StreetReplacement(name, flex1, flex2));
}
else if (line.startsWith("flex")) {
if (line.startsWith("flex1")) flexForm1 = line.substring(6);
if (line.startsWith("flex2")) flexForm2 = line.substring(6);
}
else if (line.startsWith("leftright")) {
String[] values = line.split(" ");
left = values[1];
right = values[2];
CarChase.log("LR", left, right);
}
else if (line.equals("")) continue;
else throw new RuntimeException("Illegal line: " + line + " in file " + filename);
}
}
public void matchAndTrigger(CarState state) {
dispatchThread.addDispatchTask(state);
}
private class DispatcherThread extends Thread {
private Vector<DispatchAction> actions;
public DispatcherThread() {
super("CarChaseTTS Dispatcher");
actions = new Vector<DispatchAction>();
}
public void run() {
while (true) {
Vector<DispatchAction> toDispatch = new Vector<DispatchAction>();
synchronized (this) {
toDispatch.addAll(actions);
actions.clear();
}
for (DispatchAction action : toDispatch) {
dispatch(action);
}
synchronized(this) {
try {
wait();
} catch (InterruptedException e) {
}
}
}
}
private void dispatch(DispatchAction a) {
CarChaseArticulatable startAction = null;
CarChaseArticulatable continuationAction = null;
CarChaseArticulatable lastIU = (CarChaseArticulatable) articulator.getLast();
boolean continuationPossible = articulator.isSpeaking();
ArrayList<Pattern> matchings = new ArrayList<CarChaseTTS.Pattern>();
for (Pattern p : patterns) {
if (p.doesMatch(a.state))
matchings.add(p);
}
if (matchings.size() == 0) return;
// Important to do this here, otherwise
// we can't be sure with the types.
articulator.reduceOffset();
for (Pattern p : matchings) {
if (startAction == null)
startAction = p.findMatch(a.state, null);
if (continuationAction == null && continuationPossible)
continuationAction = p.findMatch(a.state, lastIU);
if (startAction != null && (!continuationPossible || continuationAction != null)) break;
}
if (startAction == null)
return;
CarChaseArticulatable finalAction = startAction;
if (continuationPossible && continuationAction != null)
finalAction = continuationAction;
articulator.say(finalAction);
}
private void addDispatchTask(CarState state) {
synchronized (this) {
actions.add(new DispatchAction(state));
notify();
}
}
private class DispatchAction {
private CarState state;
public DispatchAction(CarState state) {
this.state = state;
}
}
}
public static class Message {
public MessageType typeStart, typeEnd;
public MessageInformationLevel ilevel;
public String text;
public boolean optional;
private Message(MessageType typeStart, MessageType typeEnd, MessageInformationLevel type, String text, boolean optional) {
this.typeStart = typeStart;
this.typeEnd = typeEnd;
this.ilevel = type;
this.text = text;
this.optional = optional;
}
public String toString() {
return "[TTSAction text=" + text + ",level=" + ilevel.toString() + "]";
}
}
public static enum MessageInformationLevel {
T1, // Information Level 1, high speed
T2, // Information Level 2, normal speed
T3; // Information Level 3, low speed
public static MessageInformationLevel fromInteger(int level) {
int intInformationLevel = Math.min(3, Math.max(1, level));
return MessageInformationLevel.valueOf("T" + intInformationLevel);
}
}
// Example: Das Auto faehrt in den Kreisel. (Begin: S2, End: S1)
// Example: und faehrt in den Kreisel. (Begin: R1, End: S1)
// Example: Das Auto faehrt auf die Kreuzung zu und (Begin: S1, End: R2)
public static enum MessageType {
F1(false),
F2(false),
F3(false),
R1(true),
R2(true),
R3(true),
R4(true),
R5(true),
R6(true);
// Moeglich ist: [ F1 F1 ] [ R1 F2 ] [ R2 F1 ] [ F1 F1 ]
// R benoetigt einen Satz davor, der gerade gesprochen wird;
// F geht immer, wenn der vorige Satz keinen nachfolgenden braucht.
private boolean requiresSentence;
private int manner;
private MessageType(boolean requiresSentence) {
this.requiresSentence = requiresSentence;
this.manner = Integer.parseInt(toString().substring(1));
}
/**
* Is a sentence of the same type required at the beginning / ending? If no, it is a valid beginning / ending of a sentence.
* @return
*/
public boolean requiresSentence() {
return requiresSentence;
}
public int getManner() {
return manner;
}
}
public class Pattern {
protected boolean optional;
public HashMap<String, Message> templates;
public ArrayList<Condition> conditions;
public Pattern(boolean optional) {
templates = new HashMap<String, Message>();
conditions = new ArrayList<Condition>();
this.optional = optional;
}
// Requires: XY#OP#XY
public void addCondition(String conditionLine) {
String[] condParts = conditionLine.split("#");
conditions.add(new Condition(condParts[0], condParts[2], condParts[1]));
}
public void addTemplate(String key, String value, MessageType sortStart, MessageType sortEnd, MessageInformationLevel type) {
if (templates.containsKey(key)) CarChase.log("WARNING: Duplicate key", key);
templates.put(key, new Message(sortStart, sortEnd, type, value, optional));
}
private StringDict instantiateVariables(CarState s) {
World w = CarChase.get().world();
Street currentStreet = w.streets.get(s.streetName);
Street prevStreet = w.streets.get(s.prevStreetName);
WorldPoint nextPoint = w.points.get(s.nextPointName);
WorldPoint prevPoint = w.points.get(s.prevPointName);
StringDict replace = new StringDict();
replace.set("*INTSTREET", s.streetName);
replace.set("*INTPREVSTREET", s.prevStreetName);
StreetReplacement streetRpl = streetNames.get(s.streetName);
if (streetRpl == null) streetRpl = new StreetReplacement(s.streetName);
StreetReplacement prevStreetRpl = streetNames.get(s.prevStreetName);
if (prevStreetRpl == null) prevStreetRpl = new StreetReplacement(s.prevStreetName);
replace.set("*STREET", streetRpl.name);
replace.set("*PREVSTREET", prevStreetRpl.name);
replace.set("*FLEX1STREET", streetRpl.flex1);
replace.set("*FLEX1PREVSTREET", prevStreetRpl.flex1);
replace.set("*FLEX2STREET", streetRpl.flex2);
replace.set("*FLEX2PREVSTREET", prevStreetRpl.flex2);
replace.set("*DIRECTION", s.direction + "");
replace.set("*PREVDIRECTION", s.prevDirection + "");
replace.set("*POINTNAME", s.nextPointName);
replace.set("*PREVPOINTNAME", s.prevPointName);
replace.set("*SPEED", "" + s.speed);
replace.set("*PREVSPEED", "" + s.prevSpeed);
replace.set("*BIDIRECTIONAL", "" + (currentStreet.bidirectional ? 1 : 0));
replace.set("*NUMSTREETS", "" + nextPoint.streets.size());
replace.set("*LEFTRIGHT", "" + (s.lr == 1 ? left : right));
// Junctions
applyJunction(nextPoint, currentStreet, replace, s.direction, nextPoint.streets, false);
applyJunction(prevPoint, prevStreet, replace, s.prevDirection, prevPoint.streets, true);
return replace;
}
public boolean doesMatch(CarState s) {
StringDict replace = instantiateVariables(s);
for (Condition cond : conditions) {
String instancedLeftSide = instanciate(cond.leftSide, replace);
String instancedRightSide = instanciate(cond.rightSide, replace);
if (cond.isDistance) {
int distance = Integer.parseInt(instancedRightSide);
if (cond.operator == '=') {
if (s.previousDistance > s.currentDistance) {
if (distance >= s.previousDistance || distance < s.currentDistance) return false;
}
else if (distance < s.previousDistance || distance >= s.currentDistance) return false;
} else if (cond.operator == '>') {
if (s.currentDistance <= distance) return false;
}
} else {
if (cond.operator == '=' && !instancedLeftSide.equals(instancedRightSide)) return false;
else if (cond.operator == '!' && instancedLeftSide.equals(instancedRightSide)) return false;
else if (cond.operator == '<' || cond.operator == '>') {
int left = Integer.parseInt(instancedLeftSide);
int right = Integer.parseInt(instancedRightSide);
if (cond.operator == '<' && left >= right) return false;
if (cond.operator == '>' && left <= right) return false;
}
}
}
return true;
}
public CarChaseArticulatable findMatch(CarState s, CarChaseArticulatable lastArticulatable) {
Message last = lastArticulatable != null ? lastArticulatable.preferred : null;
StringDict replace = instantiateVariables(s);
ArrayList<Message> instancedMessages = new ArrayList<Message>();
for (Map.Entry<String, Message> entry : templates.entrySet())
instancedMessages.add(new Message(entry.getValue().typeStart, entry.getValue().typeEnd, entry.getValue().ilevel, instanciate(entry.getValue().text, replace), entry.getValue().optional));
Message[] matches = instancedMessages.toArray(new Message[0]);
if (matches == null) return null;
HashMap<MessageInformationLevel,ArrayList<Message>> messages = new HashMap<MessageInformationLevel,ArrayList<Message>>();
for (MessageInformationLevel level : MessageInformationLevel.values())
messages.put(level, new ArrayList<Message>());
for (Message message : matches)
if (last == null)
if (!message.typeStart.requiresSentence())
messages.get(message.ilevel).add(message);
else;
else if (last.typeEnd.requiresSentence())
if (message.typeStart.getManner() == last.typeEnd.getManner() && message.typeStart.requiresSentence())
messages.get(message.ilevel).add(message);
else;
else if (message.typeStart.requiresSentence())
if (message.typeStart.getManner() == last.typeEnd.getManner())
messages.get(message.ilevel).add(message);
Random random = new Random();
// Compute ideal information level
MessageInformationLevel informationLevel = MessageInformationLevel.fromInteger(4 - s.speed);
ArrayList<Message> posPreferred = null, posShorter = null;
if (messages.get(informationLevel).size() > 0) {
posPreferred = messages.get(informationLevel);
} else {
for (int distance = 1; distance < MessageInformationLevel.values().length; distance++) {
MessageInformationLevel lowerLevel = MessageInformationLevel.fromInteger(4 - s.speed - distance);
MessageInformationLevel higherLevel = MessageInformationLevel.fromInteger(4 - s.speed + distance);
if (messages.get(lowerLevel).size() > 0) {
posPreferred = messages.get(lowerLevel);
break;
}
else if (messages.get(higherLevel).size() > 0) {
posPreferred = messages.get(higherLevel);
break;
}
}
}
for (int i = 1; i < MessageInformationLevel.values().length; i++) {
if (messages.get(MessageInformationLevel.fromInteger(i)).size() > 0) {
posShorter = messages.get(MessageInformationLevel.fromInteger(i));
break;
}
}
if (posPreferred == null) return null;
if (posShorter == null) posShorter = new ArrayList<Message>();
// now we find a pair of preferred and shorter message with the same start and end types.
HashMap<Message, Message> mapping = new HashMap<Message, Message>();
for (Message pref : posPreferred) {
boolean put = false;
for (Message m : posShorter) {
if (pref.typeStart != m.typeStart || pref.typeEnd != m.typeEnd)
continue;
mapping.put(pref, m);
put = true;
break;
}
if (!put) mapping.put(pref, null);
}
if (mapping.size() == 0) return null;
int chosen = random.nextInt(mapping.size());
@SuppressWarnings("unchecked")
Map.Entry<Message, Message> result = (Map.Entry<Message, Message>) mapping.entrySet().toArray()[chosen];
return new CarChaseArticulatable(result.getKey(), result.getValue(), optional);
}
private void applyJunction(WorldPoint point, Street street, StringDict replace, int direction, ArrayList<String> streetNamesCrossNextPoint, boolean was) {
String prefix = was ? "WAS" : "IS";
String prevPrefix = was ? "PREV" : "";
boolean isEndOfStreet = street.fetchNextPoint(point, direction) == null;
int isJunction = 0;
if (streetNamesCrossNextPoint.size() == 2 && streetNamesCrossNextPoint.indexOf(street.name) >= 0){
isJunction = isEndOfStreet ? 2 : 1;
int indexOfOther = 1 - streetNamesCrossNextPoint.indexOf(street.name);
assert streetNamesCrossNextPoint.indexOf(street.name) >= 0 : "Something somewhere went terribly wrong: " + streetNamesCrossNextPoint.toString() + street.name;
String streetName = streetNamesCrossNextPoint.get(indexOfOther);
Street crossStreet = CarChase.get().world().streets.get(streetName);
int indexInCross = crossStreet.streetPoints.indexOf(point.name);
if (indexInCross <= 0 || indexInCross >= crossStreet.streetPoints.size() - 1)
isJunction = 0;
else {
StreetReplacement streetRpl = streetNames.get(streetName);
if (streetRpl == null) streetRpl = new StreetReplacement(streetName);
replace.set("*INT" + prevPrefix + "JUNCTIONSTREET", streetName);
replace.set("*" + prevPrefix + "JUNCTIONSTREET", streetRpl.name);
replace.set("*FLEX1" + prevPrefix + "JUNCTIONSTREET", streetRpl.flex1);
replace.set("*FLEX2" + prevPrefix + "JUNCTIONSTREET", streetRpl.flex2);
}
}
replace.set("*" + prefix + "JUNCTION", "" + isJunction);
}
private class Condition {
public String leftSide;
public String rightSide;
public char operator;
public boolean isDistance;
public Condition(String left, String right, String op) {
this.leftSide = left;
this.rightSide = right;
this.operator = op.charAt(0);
this.isDistance = leftSide.equals("*DISTANCE");
}
}
private String instanciate(String original, StringDict replace) {
String newString = original;
for (String key : replace.keyArray())
newString = newString.replace((CharSequence) key, replace.get(key));
return newString;
}
}
public static class CarChaseArticulatable extends Articulatable {
private Message preferred, shorter;
private boolean optional;
public CarChaseArticulatable(Message preferred, Message shorter, boolean optional) {
this.preferred = preferred;
this.shorter = shorter;
this.optional = optional;
}
public String getPreferredText() {
return preferred.text;
}
public String getShorterText() {
if (shorter == null) return null;
return shorter.text;
}
public boolean isOptional() {
return optional;
}
public boolean canReplace(Articulatable other) {
if (other == null || !(other instanceof CarChaseArticulatable)) return false;
CarChaseArticulatable cca = (CarChaseArticulatable) other;
return cca.preferred.typeStart == preferred.typeStart;
}
public void setUseOfShorterText(boolean value) {}
public String toString() {
return "----\n--pr-" + preferred.text + "\n--sh-" + (shorter == null ? "null" : shorter.text);
}
}
private class StreetReplacement {
public String name, flex1, flex2;
public StreetReplacement(String name, String flex1, String flex2) {
this.name = name;
this.flex1 = flex1;
this.flex2 = flex2;
}
public StreetReplacement(String name) {
this(name, flexForm1 + " " + name, flexForm2 + " " + name);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.igfs;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import org.apache.ignite.igfs.IgfsMetrics;
import org.apache.ignite.internal.util.typedef.internal.S;
/**
* IGFS metrics adapter.
*/
public class IgfsMetricsAdapter implements IgfsMetrics, Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** Used space on local node. */
private long locSpaceSize;
/** Maximum space. */
private long maxSpaceSize;
/** Secondary file system used space. */
private long secondarySpaceSize;
/** Number of directories. */
private int dirsCnt;
/** Number of files. */
private int filesCnt;
/** Number of files opened for read. */
private int filesOpenedForRead;
/** Number of files opened for write. */
private int filesOpenedForWrite;
/** Total blocks read. */
private long blocksReadTotal;
/** Total blocks remote read. */
private long blocksReadRmt;
/** Total blocks write. */
private long blocksWrittenTotal;
/** Total blocks write remote. */
private long blocksWrittenRmt;
/** Total bytes read. */
private long bytesRead;
/** Total bytes read time. */
private long bytesReadTime;
/** Total bytes write. */
private long bytesWritten;
/** Total bytes write time. */
private long bytesWriteTime;
/**
* {@link Externalizable} support.
*/
public IgfsMetricsAdapter() {
// No-op.
}
/**
* @param locSpaceSize Used space on local node.
* @param maxSpaceSize Maximum space size.
* @param secondarySpaceSize Secondary space size.
* @param dirsCnt Number of directories.
* @param filesCnt Number of files.
* @param filesOpenedForRead Number of files opened for read.
* @param filesOpenedForWrite Number of files opened for write.
* @param blocksReadTotal Total blocks read.
* @param blocksReadRmt Total blocks read remotely.
* @param blocksWrittenTotal Total blocks written.
* @param blocksWrittenRmt Total blocks written remotely.
* @param bytesRead Total bytes read.
* @param bytesReadTime Total bytes read time.
* @param bytesWritten Total bytes written.
* @param bytesWriteTime Total bytes write time.
*/
public IgfsMetricsAdapter(long locSpaceSize, long maxSpaceSize, long secondarySpaceSize, int dirsCnt,
int filesCnt, int filesOpenedForRead, int filesOpenedForWrite, long blocksReadTotal, long blocksReadRmt,
long blocksWrittenTotal, long blocksWrittenRmt, long bytesRead, long bytesReadTime, long bytesWritten,
long bytesWriteTime) {
this.locSpaceSize = locSpaceSize;
this.maxSpaceSize = maxSpaceSize;
this.secondarySpaceSize = secondarySpaceSize;
this.dirsCnt = dirsCnt;
this.filesCnt = filesCnt;
this.filesOpenedForRead = filesOpenedForRead;
this.filesOpenedForWrite = filesOpenedForWrite;
this.blocksReadTotal = blocksReadTotal;
this.blocksReadRmt = blocksReadRmt;
this.blocksWrittenTotal = blocksWrittenTotal;
this.blocksWrittenRmt = blocksWrittenRmt;
this.bytesRead = bytesRead;
this.bytesReadTime = bytesReadTime;
this.bytesWritten = bytesWritten;
this.bytesWriteTime = bytesWriteTime;
}
/** {@inheritDoc} */
@Override public long localSpaceSize() {
return locSpaceSize;
}
/** {@inheritDoc} */
@Override public long maxSpaceSize() {
return maxSpaceSize;
}
/** {@inheritDoc} */
@Override public long secondarySpaceSize() {
return secondarySpaceSize;
}
/** {@inheritDoc} */
@Override public int directoriesCount() {
return dirsCnt;
}
/** {@inheritDoc} */
@Override public int filesCount() {
return filesCnt;
}
/** {@inheritDoc} */
@Override public int filesOpenedForRead() {
return filesOpenedForRead;
}
/** {@inheritDoc} */
@Override public int filesOpenedForWrite() {
return filesOpenedForWrite;
}
/** {@inheritDoc} */
@Override public long blocksReadTotal() {
return blocksReadTotal;
}
/** {@inheritDoc} */
@Override public long blocksReadRemote() {
return blocksReadRmt;
}
/** {@inheritDoc} */
@Override public long blocksWrittenTotal() {
return blocksWrittenTotal;
}
/** {@inheritDoc} */
@Override public long blocksWrittenRemote() {
return blocksWrittenRmt;
}
/** {@inheritDoc} */
@Override public long bytesRead() {
return bytesRead;
}
/** {@inheritDoc} */
@Override public long bytesReadTime() {
return bytesReadTime;
}
/** {@inheritDoc} */
@Override public long bytesWritten() {
return bytesWritten;
}
/** {@inheritDoc} */
@Override public long bytesWriteTime() {
return bytesWriteTime;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeLong(locSpaceSize);
out.writeLong(maxSpaceSize);
out.writeLong(secondarySpaceSize);
out.writeInt(dirsCnt);
out.writeInt(filesCnt);
out.writeInt(filesOpenedForRead);
out.writeInt(filesOpenedForWrite);
out.writeLong(blocksReadTotal);
out.writeLong(blocksReadRmt);
out.writeLong(blocksWrittenTotal);
out.writeLong(blocksWrittenRmt);
out.writeLong(bytesRead);
out.writeLong(bytesReadTime);
out.writeLong(bytesWritten);
out.writeLong(bytesWriteTime);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException {
locSpaceSize = in.readLong();
maxSpaceSize = in.readLong();
secondarySpaceSize = in.readLong();
dirsCnt = in.readInt();
filesCnt = in.readInt();
filesOpenedForRead = in.readInt();
filesOpenedForWrite = in.readInt();
blocksReadTotal = in.readLong();
blocksReadRmt = in.readLong();
blocksWrittenTotal = in.readLong();
blocksWrittenRmt = in.readLong();
bytesRead = in.readLong();
bytesReadTime = in.readLong();
bytesWritten = in.readLong();
bytesWriteTime = in.readLong();
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(IgfsMetricsAdapter.class, this);
}
}
| |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android.resources;
import com.google.common.base.Preconditions;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.text.DecimalFormat;
import java.util.stream.Stream;
import javax.annotation.Nullable;
/**
* ResTableType is a ResChunk holding the resource values for a given type and configuration. It
* consists of: ResChunk_header u32 chunk_type u32 header_size u32 chunk_size u8 id u32 entry_count
* u32 entry_start Config u32 config_size u8[config_size - 4] data
*
* <p>This is followed by entry_count u32s containing offsets from entry_start to the data for each
* entry. If the offset for a resource is -1, that resource has no value in this configuration.
*
* <p>After the offsets comes the entry data. Each entry is of the form: u16 size u16 flags
* StringRef key
*
* <p>If `(flags & FLAG_COMPLEX) == 0` this data is then followed by: ResValue value
*
* <p>Else it's followed by a map header: ResRef parent u32 count
*
* <p>and `count` map entries of the form: ResRef name ResValue value
*/
public class ResTableType extends ResChunk {
private static final int CONFIG_OFFSET = 20;
private static final int FLAG_COMPLEX = 0x1;
public static final int ATTRIBUTE_NAME_REF_OFFSET = 0;
public static final int ATTRIBUTE_SIZE_OFFSET = 4;
public static final int ATTRIBUTE_TYPE_OFFSET = 7;
public static final int ATTRIBUTE_DATA_OFFSET = 8;
private final int id;
private final int entryCount;
private final ByteBuffer config;
private final ByteBuffer entryOffsets;
private final ByteBuffer entryData;
@Nullable
public static ResTableType slice(ResTableType type, int count) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
int currentOffset = 0;
ByteBuffer entryOffsets = wrap(new byte[count * 4]);
for (int i = 0; i < count; i++) {
int offset = type.getEntryValueOffset(i);
if (offset == -1) {
entryOffsets.putInt(i * 4, -1);
} else {
entryOffsets.putInt(i * 4, currentOffset);
int dataSize = type.getEntrySizeAtOffset(offset);
currentOffset += dataSize;
output.write(type.entryData.array(), type.entryData.arrayOffset() + offset, dataSize);
}
}
byte[] entryData = output.toByteArray();
if (entryData.length == 0) {
return null;
}
int headerSize = type.getHeaderSize();
int chunkSize = headerSize + count * 4 + entryData.length;
return new ResTableType(
headerSize, chunkSize, type.id, count, copy(type.config), entryOffsets, wrap(entryData));
}
private int getEntrySizeAtOffset(int offset) {
int size = entryData.getShort(offset);
int flags = entryData.getShort(offset + 2);
if ((flags & FLAG_COMPLEX) == 0) {
return size + entryData.getShort(offset + size);
} else {
int count = entryData.getInt(offset + 12);
for (int i = 0; i < count; i++) {
size += 4 + entryData.getShort(offset + size + 4);
}
return size;
}
}
@Override
public void put(ByteBuffer output) {
Preconditions.checkState(output.remaining() >= getChunkSize());
int start = output.position();
putChunkHeader(output);
output.put((byte) (id + 1));
output.put((byte) 0);
output.putShort((byte) 0);
output.putInt(entryCount);
output.putInt(getHeaderSize() + 4 * entryCount);
output.put(slice(config, 0));
output.put(slice(entryOffsets, 0));
output.put(slice(entryData, 0));
Preconditions.checkState(output.position() == start + getChunkSize());
}
public static ResTableType get(ByteBuffer buf) {
int type = buf.getShort();
int headerSize = buf.getShort();
int chunkSize = buf.getInt();
int id = (buf.get() & 0xFF) - 1;
buf.get(); // ignored
buf.getShort(); // ignored
int entryCount = buf.getInt();
int entriesOffset = buf.getInt();
int configSize = buf.getInt(buf.position());
int entryDataSize = chunkSize - headerSize - 4 * entryCount;
Preconditions.checkState(type == CHUNK_RES_TABLE_TYPE);
Preconditions.checkState(headerSize == configSize + CONFIG_OFFSET);
return new ResTableType(
headerSize,
chunkSize,
id,
entryCount,
slice(buf, CONFIG_OFFSET, configSize),
slice(buf, headerSize, entryCount * 4),
slice(buf, entriesOffset, entryDataSize));
}
private ResTableType(
int headerSize,
int chunkSize,
int id,
int entryCount,
ByteBuffer config,
ByteBuffer entryOffsets,
ByteBuffer entryData) {
super(CHUNK_RES_TABLE_TYPE, headerSize, chunkSize);
this.id = id;
this.entryCount = entryCount;
this.config = config;
this.entryOffsets = entryOffsets;
this.entryData = entryData;
}
public void dump(StringPool strings, ResTablePackage resPackage, PrintStream out) {
out.format(" config (unknown):\n");
for (int entryIdx = 0; entryIdx < entryCount; entryIdx++) {
int offset = getEntryValueOffset(entryIdx);
if (offset != -1) {
int size = entryData.getShort(offset);
int flags = entryData.getShort(offset + 2);
int refId = entryData.getInt(offset + 4);
Preconditions.checkState(size > 0);
// Some of the formatting of this line is shared between maps/non-maps.
out.format(
" resource 0x7f%02x%04x %s:%s/%s:",
getResourceType(),
entryIdx,
resPackage.getPackageName(),
getTypeName(resPackage),
resPackage.getKeys().getString(refId));
if ((flags & FLAG_COMPLEX) != 0) {
out.format(" <bag>\n");
int parent = entryData.getInt(offset + 8);
int count = entryData.getInt(offset + 12);
out.format(
" Parent=0x%08x(Resolved=0x%08x), Count=%d\n",
parent, parent == 0 ? 0x7F000000 : parent, count);
int entryOffset = offset;
for (int attrIdx = 0; attrIdx < count; attrIdx++) {
int name = entryData.getInt(entryOffset + 16);
int vsize = entryData.getShort(entryOffset + 20);
int type = entryData.get(entryOffset + 23);
int data = entryData.getInt(entryOffset + 24);
String dataString = formatTypeForDump(strings, type, data);
out.format(" #%d (Key=0x%08x): %s\n", attrIdx, name, dataString);
entryOffset += 4 + vsize;
}
} else {
int vsize = entryData.getShort(offset + 8);
// empty(offset + 10)
int type = entryData.get(offset + 11);
int data = entryData.getInt(offset + 12);
out.format(" t=0x%02x d=0x%08x (s=0x%04x r=0x00)\n", type, data, vsize);
String dataString = formatTypeForDump(strings, type, data);
out.format(" %s\n", dataString);
}
}
}
}
private String getTypeName(ResTablePackage resPackage) {
return resPackage.getTypes().getString(id);
}
private String formatTypeForDump(StringPool strings, int type, int data) {
String typeString;
String dataString;
switch (type) {
case RES_REFERENCE:
typeString = "reference";
dataString = String.format("0x%08x", data);
break;
case RES_ATTRIBUTE:
typeString = "attribute";
dataString = String.format("0x%08x", data);
break;
case RES_STRING:
typeString = "string" + (strings.isUtf8() ? "8" : "16");
dataString = String.format("\"%s\"", strings.getOutputNormalizedString(data));
break;
case RES_FLOAT:
typeString = "float";
dataString = new DecimalFormat("0.######").format(Float.intBitsToFloat(data));
break;
case RES_DIMENSION:
typeString = "dimension";
dataString = formatComplex(data, false);
break;
case RES_FRACTION:
typeString = "fraction";
dataString = formatComplex(data, true);
break;
case RES_DECIMAL:
case RES_HEX:
case RES_BOOL:
case RES_COLOR_ARGB4:
case RES_COLOR_RGB4:
case RES_COLOR_ARGB8:
case RES_COLOR_RGB8:
typeString = "color";
dataString = String.format("#%08x", data);
break;
default:
typeString = String.format("unknown %02x", type);
dataString = String.format("xxx 0x%08x", data);
}
return String.format("(%s) %s", typeString, dataString);
}
private static final int RADIX_SHIFT = 4;
private static final int RADIX_MASK = 0x3;
private static final int MANTISSA_SHIFT = 8;
private static final int MANTISSA_MASK = 0xFFFFFF;
private static final int[] RADIX_MULTS = {0, 7, 15, 23};
private static final int UNIT_MASK = 0xF;
private static final int UNIT_PX = 0;
private static final int UNIT_DIP = 1;
private static final int UNIT_SP = 2;
private static final int UNIT_PT = 3;
private static final int UNIT_IN = 4;
private static final int UNIT_MM = 5;
private static final int UNIT_FRACTION = 0;
private static final int UNIT_FRACTION_PARENT = 1;
// See print_complex() at
// https://android.googlesource.com/platform/frameworks/base/+/kitkat-release/libs/androidfw/ResourceTypes.cpp
// The implementation there is really silly and results in different values if implemented in Java
// directly.
private String formatComplex(int data, boolean isFraction) {
int mantissa = (data >> MANTISSA_SHIFT) & MANTISSA_MASK;
int exp = RADIX_MULTS[(data >> RADIX_SHIFT) & RADIX_MASK];
float value = mantissa;
if (exp != 0) {
value = value / (1 << exp);
}
String unit;
if (isFraction) {
switch (data & UNIT_MASK) {
case UNIT_FRACTION:
unit = "%";
break;
case UNIT_FRACTION_PARENT:
unit = "%p";
break;
default:
unit = " (unknown unit)";
break;
}
} else {
switch (data & UNIT_MASK) {
case UNIT_PX:
unit = "px";
break;
case UNIT_DIP:
unit = "dp";
break;
case UNIT_SP:
unit = "sp";
break;
case UNIT_PT:
unit = "pt";
break;
case UNIT_IN:
unit = "in";
break;
case UNIT_MM:
unit = "mm";
break;
default:
unit = " (unknown unit)";
break;
}
}
return String.format("%f%s", value, unit);
}
int getResourceType() {
return id + 1;
}
public int getResourceRef(int id) {
int offset = getEntryValueOffset(id);
if (offset == -1) {
return -1;
}
return entryData.getInt(offset + 4);
}
public int getEntryValueOffset(int i) {
return entryOffsets.getInt(i * 4);
}
public int getEntryCount() {
return entryCount;
}
public void transformKeyReferences(RefTransformer visitor) {
for (int i = 0; i < entryCount; i++) {
int offset = getEntryValueOffset(i);
if (offset != -1) {
transformEntryDataOffset(entryData, offset + 4, visitor);
}
}
}
public void visitKeyReferences(RefVisitor visitor) {
transformKeyReferences(
i -> {
visitor.visit(i);
return i;
});
}
private void transformStringReferencesAt(RefTransformer visitor, int offset) {
int flags = entryData.getShort(offset + 2);
if ((flags & FLAG_COMPLEX) != 0) {
int count = entryData.getInt(offset + 12);
int entryOffset = offset;
for (int j = 0; j < count; j++) {
int name = entryData.getInt(entryOffset + 16);
if ((name >> 24) == ResTablePackage.APP_PACKAGE_ID) {
Preconditions.checkState(((name >> 16) & 0xFF) != 0);
}
int vsize = entryData.getShort(entryOffset + 20);
int type = entryData.get(entryOffset + 23);
if (type == RES_STRING) {
transformEntryDataOffset(entryData, entryOffset + 24, visitor);
}
entryOffset += 4 + vsize;
}
} else {
int type = entryData.get(offset + 11);
if (type == RES_STRING) {
transformEntryDataOffset(entryData, offset + 12, visitor);
}
}
}
public void transformStringReferences(RefTransformer visitor) {
for (int i = 0; i < entryCount; i++) {
int offset = getEntryValueOffset(i);
if (offset != -1) {
transformStringReferencesAt(visitor, offset);
}
}
}
public void transformStringReferences(int[] idsToVisit, RefTransformer visitor) {
for (int i : idsToVisit) {
int offset = getEntryValueOffset(i);
if (offset != -1) {
transformStringReferencesAt(visitor, offset);
}
}
}
public void visitStringReferences(RefVisitor visitor) {
transformStringReferences(
i -> {
visitor.visit(i);
return i;
});
}
public void visitStringReferences(int[] idsToVisit, RefVisitor visitor) {
transformStringReferences(
idsToVisit,
i -> {
visitor.visit(i);
return i;
});
}
private void transformReferencesAt(RefTransformer visitor, int offset) {
int flags = entryData.getShort(offset + 2);
if ((flags & FLAG_COMPLEX) != 0) {
int parent = entryData.getInt(offset + 8);
if (parent != 0) {
// An attribute map can derive from another. If it does, visit that parent.
transformEntryDataOffset(entryData, offset + 8, visitor);
}
int count = entryData.getInt(offset + 12);
int entryStart = offset + 16;
int entryOffset = entryStart;
for (int j = 0; j < count; j++) {
// Visit the name attribute reference.
transformEntryDataOffset(entryData, entryOffset + ATTRIBUTE_NAME_REF_OFFSET, visitor);
int type = entryData.get(entryOffset + ATTRIBUTE_TYPE_OFFSET);
if (type == RES_REFERENCE || type == RES_ATTRIBUTE) {
// Visit the value if it's a reference.
transformEntryDataOffset(entryData, entryOffset + ATTRIBUTE_DATA_OFFSET, visitor);
} else if (type == RES_DYNAMIC_REFERENCE || type == RES_DYNAMIC_ATTRIBUTE) {
throw new UnsupportedOperationException();
}
int size = entryData.getShort(entryOffset + ATTRIBUTE_SIZE_OFFSET);
entryOffset += 4 + size;
}
sortAttributesAt(entryData, count, entryStart);
} else {
int type = entryData.get(offset + 11);
if (type == RES_REFERENCE || type == RES_ATTRIBUTE) {
// Visit the value if it's a reference.
transformEntryDataOffset(entryData, offset + 12, visitor);
} else if (type == RES_DYNAMIC_REFERENCE || type == RES_DYNAMIC_ATTRIBUTE) {
throw new UnsupportedOperationException();
}
}
}
private void sortAttributesAt(ByteBuffer entryData, int attrCount, int attrStart) {
class AttrRef implements Comparable<AttrRef> {
final int offset;
final int size;
final int resId;
AttrRef(int offset) {
this.offset = offset;
this.resId = entryData.getInt(offset + ATTRIBUTE_NAME_REF_OFFSET);
this.size = 4 + entryData.getShort(offset + ATTRIBUTE_SIZE_OFFSET);
}
@Override
public int compareTo(AttrRef other) {
return resId - other.resId;
}
}
Stream.Builder<AttrRef> builder = Stream.builder();
int entryOffset = attrStart;
for (int j = 0; j < attrCount; j++) {
AttrRef ref = new AttrRef(entryOffset);
builder.add(ref);
entryOffset += ref.size;
}
byte[] newData = new byte[entryOffset - attrStart];
ByteBuffer newBuf = wrap(newData);
builder
.build()
.sorted()
.forEachOrdered(ref -> newBuf.put(slice(entryData, ref.offset, ref.size)));
slice(entryData, attrStart).put(newData);
}
public void transformReferences(RefTransformer visitor) {
for (int i = 0; i < entryCount; i++) {
int offset = getEntryValueOffset(i);
if (offset != -1) {
transformReferencesAt(visitor, offset);
}
}
}
public void transformReferences(int[] ids, RefTransformer visitor) {
for (int i : ids) {
int offset = getEntryValueOffset(i);
if (offset != -1) {
transformReferencesAt(visitor, offset);
}
}
}
public void visitReferences(int[] ids, RefVisitor visitor) {
transformReferences(
ids,
i -> {
visitor.visit(i);
return i;
});
}
public void reassignIds(ReferenceMapper refMapping) {
transformReferences(refMapping::map);
refMapping.rewrite(getResourceType(), entryOffsets.asIntBuffer());
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hyracks.algebricks.rewriter.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
import org.apache.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.LogicalPropertiesVisitor;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.AbstractJoinPOperator.JoinPartitioningType;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.HybridHashJoinPOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.InMemoryHashJoinPOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.NLJoinPOperator;
import org.apache.hyracks.algebricks.core.algebra.properties.ILogicalPropertiesVector;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
public class JoinUtils {
public static void setJoinAlgorithmAndExchangeAlgo(AbstractBinaryJoinOperator op, IOptimizationContext context)
throws AlgebricksException {
List<LogicalVariable> sideLeft = new LinkedList<LogicalVariable>();
List<LogicalVariable> sideRight = new LinkedList<LogicalVariable>();
List<LogicalVariable> varsLeft = op.getInputs().get(0).getValue().getSchema();
List<LogicalVariable> varsRight = op.getInputs().get(1).getValue().getSchema();
if (isHashJoinCondition(op.getCondition().getValue(), varsLeft, varsRight, sideLeft, sideRight)) {
BroadcastSide side = getBroadcastJoinSide(op.getCondition().getValue(), varsLeft, varsRight);
if (side == null) {
setHashJoinOp(op, JoinPartitioningType.PAIRWISE, sideLeft, sideRight, context);
} else {
switch (side) {
case RIGHT:
setHashJoinOp(op, JoinPartitioningType.BROADCAST, sideLeft, sideRight, context);
break;
case LEFT:
Mutable<ILogicalOperator> opRef0 = op.getInputs().get(0);
Mutable<ILogicalOperator> opRef1 = op.getInputs().get(1);
ILogicalOperator tmp = opRef0.getValue();
opRef0.setValue(opRef1.getValue());
opRef1.setValue(tmp);
setHashJoinOp(op, JoinPartitioningType.BROADCAST, sideRight, sideLeft, context);
break;
default:
setHashJoinOp(op, JoinPartitioningType.PAIRWISE, sideLeft, sideRight, context);
}
}
} else {
setNLJoinOp(op, context);
}
}
private static void setNLJoinOp(AbstractBinaryJoinOperator op, IOptimizationContext context) {
op.setPhysicalOperator(new NLJoinPOperator(op.getJoinKind(), JoinPartitioningType.BROADCAST, context
.getPhysicalOptimizationConfig().getMaxRecordsPerFrame()));
}
private static void setHashJoinOp(AbstractBinaryJoinOperator op, JoinPartitioningType partitioningType,
List<LogicalVariable> sideLeft, List<LogicalVariable> sideRight, IOptimizationContext context)
throws AlgebricksException {
op.setPhysicalOperator(new HybridHashJoinPOperator(op.getJoinKind(), partitioningType, sideLeft, sideRight,
context.getPhysicalOptimizationConfig().getMaxFramesHybridHash(), context
.getPhysicalOptimizationConfig().getMaxFramesLeftInputHybridHash(), context
.getPhysicalOptimizationConfig().getMaxRecordsPerFrame(), context
.getPhysicalOptimizationConfig().getFudgeFactor()));
if (partitioningType == JoinPartitioningType.BROADCAST) {
hybridToInMemHashJoin(op, context);
}
// op.setPhysicalOperator(new
// InMemoryHashJoinPOperator(op.getJoinKind(), partitioningType,
// sideLeft, sideRight,
// 1024 * 512));
}
private static void hybridToInMemHashJoin(AbstractBinaryJoinOperator op, IOptimizationContext context)
throws AlgebricksException {
ILogicalOperator opBuild = op.getInputs().get(1).getValue();
LogicalPropertiesVisitor.computeLogicalPropertiesDFS(opBuild, context);
ILogicalPropertiesVector v = context.getLogicalPropertiesVector(opBuild);
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("// HybridHashJoin inner branch -- Logical properties for " + opBuild
+ ": " + v + "\n");
if (v != null) {
int size2 = v.getMaxOutputFrames();
HybridHashJoinPOperator hhj = (HybridHashJoinPOperator) op.getPhysicalOperator();
if (size2 > 0 && size2 * hhj.getFudgeFactor() <= hhj.getMemSizeInFrames()) {
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("// HybridHashJoin inner branch " + opBuild
+ " fits in memory\n");
// maintains the local properties on the probe side
op.setPhysicalOperator(new InMemoryHashJoinPOperator(hhj.getKind(), hhj.getPartitioningType(), hhj
.getKeysLeftBranch(), hhj.getKeysRightBranch(), v.getNumberOfTuples() * 2));
}
}
}
private static boolean isHashJoinCondition(ILogicalExpression e, Collection<LogicalVariable> inLeftAll,
Collection<LogicalVariable> inRightAll, Collection<LogicalVariable> outLeftFields,
Collection<LogicalVariable> outRightFields) {
switch (e.getExpressionTag()) {
case FUNCTION_CALL: {
AbstractFunctionCallExpression fexp = (AbstractFunctionCallExpression) e;
FunctionIdentifier fi = fexp.getFunctionIdentifier();
if (fi.equals(AlgebricksBuiltinFunctions.AND)) {
for (Mutable<ILogicalExpression> a : fexp.getArguments()) {
if (!isHashJoinCondition(a.getValue(), inLeftAll, inRightAll, outLeftFields,
outRightFields)) {
return false;
}
}
return true;
} else {
ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(fi);
if (ck != ComparisonKind.EQ) {
return false;
}
ILogicalExpression opLeft = fexp.getArguments().get(0).getValue();
ILogicalExpression opRight = fexp.getArguments().get(1).getValue();
if (opLeft.getExpressionTag() != LogicalExpressionTag.VARIABLE
|| opRight.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
return false;
}
LogicalVariable var1 = ((VariableReferenceExpression) opLeft).getVariableReference();
if (inLeftAll.contains(var1) && !outLeftFields.contains(var1)) {
outLeftFields.add(var1);
} else if (inRightAll.contains(var1) && !outRightFields.contains(var1)) {
outRightFields.add(var1);
} else {
return false;
}
LogicalVariable var2 = ((VariableReferenceExpression) opRight).getVariableReference();
if (inLeftAll.contains(var2) && !outLeftFields.contains(var2)) {
outLeftFields.add(var2);
} else if (inRightAll.contains(var2) && !outRightFields.contains(var2)) {
outRightFields.add(var2);
} else {
return false;
}
return true;
}
}
default: {
return false;
}
}
}
private static BroadcastSide getBroadcastJoinSide(ILogicalExpression e, List<LogicalVariable> varsLeft,
List<LogicalVariable> varsRight) {
if (e.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return null;
}
AbstractFunctionCallExpression fexp = (AbstractFunctionCallExpression) e;
IExpressionAnnotation ann = fexp.getAnnotations().get(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY);
if (ann == null) {
return null;
}
BroadcastSide side = (BroadcastSide) ann.getObject();
if (side == null) {
return null;
}
int i;
switch (side) {
case LEFT:
i = 0;
break;
case RIGHT:
i = 1;
break;
default:
return null;
}
ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
fexp.getArguments().get(i).getValue().getUsedVariables(vars);
if (varsLeft.containsAll(vars)) {
return BroadcastSide.LEFT;
} else if (varsRight.containsAll(vars)) {
return BroadcastSide.RIGHT;
} else {
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.environment.portlet;
import org.apache.cocoon.environment.Cookie;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.environment.Session;
import org.apache.cocoon.environment.impl.AbstractRequest;
import org.apache.cocoon.portlet.multipart.MultipartActionRequest;
import org.apache.commons.collections.IteratorUtils;
import javax.portlet.PortalContext;
import javax.portlet.PortletMode;
import javax.portlet.PortletPreferences;
import javax.portlet.WindowState;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Vector;
/**
* Implements the {@link Request} interface for the JSR-168 (Portlet) environment.
*
* @version $Id$
*/
public abstract class PortletRequest extends AbstractRequest {
/** Portlet request does not has servletPath, so it will be passed via constructor */
private String servletPath;
/** Portlet request does not has pathInfo, so it will be passed via constructor */
private String pathInfo;
/** The real PortletRequest object */
private final javax.portlet.PortletRequest request;
/** The HttpEnvironment object */
private final PortletEnvironment environment;
/** The character encoding of parameters */
private String form_encoding;
/** The default form encoding of the servlet container */
private String container_encoding;
/** The current session */
private PortletSession session;
private Cookie[] wrappedCookies;
private Map wrappedCookieMap;
protected String portletRequestURI;
private final Map attributes = new HashMap();
/**
* Creates a PortletRequest based on a real PortletRequest object
*/
protected PortletRequest(String servletPath,
String pathInfo,
javax.portlet.PortletRequest request,
PortletEnvironment environment) {
super();
this.servletPath = servletPath;
this.pathInfo = pathInfo;
this.request = request;
this.environment = environment;
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#get(java.lang.String)
*/
public Object get(String name) {
// if the request has been wrapped then access its method
if (request instanceof MultipartActionRequest) {
return ((MultipartActionRequest) request).get(name);
}
String[] values = request.getParameterValues(name);
if (values == null) {
return null;
}
if (values.length == 1) {
return values[0];
}
if (values.length > 1) {
Vector vect = new Vector(values.length);
for (int i = 0; i < values.length; i++) {
vect.add(values[i]);
}
return vect;
}
return null;
}
/* The Request interface methods */
public String getAuthType() {
return this.request.getAuthType();
}
private synchronized void wrapCookies() {
this.wrappedCookieMap = new HashMap();
PortletPreferences cookies = this.request.getPreferences();
if (cookies != null) {
this.wrappedCookies = new Cookie[cookies.getMap().size()];
int i = 0;
for (Enumeration e = cookies.getNames(); e.hasMoreElements(); i++) {
String name = (String) e.nextElement();
PortletCookie cookie = new PortletCookie(name, cookies.getValue(name, null));
this.wrappedCookies[i] = cookie;
this.wrappedCookieMap.put(cookie.getName(), cookie);
}
}
this.wrappedCookieMap = Collections.unmodifiableMap(this.wrappedCookieMap);
}
public Cookie[] getCookies() {
if (this.wrappedCookieMap == null) {
wrapCookies();
}
return this.wrappedCookies;
}
public Map getCookieMap() {
if (this.wrappedCookieMap == null) {
wrapCookies();
}
return this.wrappedCookieMap;
}
public long getDateHeader(String name) {
return Long.parseLong(this.request.getProperty(name));
}
public String getHeader(String name) {
if (PortletEnvironment.HEADER_PORTLET_MODE.equals(name)) {
return this.request.getPortletMode().toString();
} else if (PortletEnvironment.HEADER_WINDOW_STATE.equals(name)) {
return this.request.getWindowState().toString();
} else {
return this.request.getProperty(name);
}
}
public Enumeration getHeaders(String name) {
return this.request.getProperties(name);
}
public Enumeration getHeaderNames() {
final Enumeration names = this.request.getPropertyNames();
// return this.request.getPropertyNames();
return new Enumeration() {
int position;
public boolean hasMoreElements() {
return names.hasMoreElements() || position < 2;
}
public Object nextElement() throws NoSuchElementException {
if (names.hasMoreElements()) {
return names.nextElement();
}
if (position == 0) {
position++;
return PortletEnvironment.HEADER_PORTLET_MODE;
} else if (position == 1) {
position++;
return PortletEnvironment.HEADER_WINDOW_STATE;
} else {
throw new NoSuchElementException();
}
}
};
}
/**
* Concrete request object will implement this
*/
public abstract String getMethod();
public String getPathInfo() {
return this.pathInfo;
}
public String getPathTranslated() {
// TODO: getPathTranslated
return null;
}
public String getContextPath() {
return this.request.getContextPath();
}
public String getQueryString() {
// TODO: getQueryString
return "";
}
public String getRemoteUser() {
return this.request.getRemoteUser();
}
public boolean isUserInRole(String role) {
return this.request.isUserInRole(role);
}
public java.security.Principal getUserPrincipal() {
return this.request.getUserPrincipal();
}
public String getRequestedSessionId() {
return this.request.getRequestedSessionId();
}
public String getRequestURI() {
if (this.portletRequestURI == null) {
final StringBuffer buffer = new StringBuffer();
buffer.append(this.request.getContextPath());
if (getServletPath() != null) {
if (buffer.charAt(buffer.length()-1) != '/') {
buffer.append('/');
}
buffer.append(getServletPath());
}
if (getPathInfo() != null) {
if (buffer.charAt(buffer.length()-1) != '/') {
buffer.append('/');
}
buffer.append(getPathInfo());
}
this.portletRequestURI = buffer.toString();
}
return this.portletRequestURI;
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getSitemapURI()
*/
public String getSitemapURI() {
return this.environment.getURI();
}
public String getSitemapURIPrefix() {
return this.environment.getURIPrefix();
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getSitemapPath()
*/
public String getSitemapPath() {
return this.environment.getURIPrefix();
}
public String getServletPath() {
return this.servletPath;
}
public Session getSession(boolean create) {
javax.portlet.PortletSession serverSession = this.request.getPortletSession(create);
if (null != serverSession) {
if (null != this.session) {
if (this.session.session != serverSession) {
// update wrapper
this.session.session = serverSession;
}
} else {
// new wrapper
this.session = new PortletSession(serverSession,
this.environment.getDefaultSessionScope());
}
} else {
// invalidate
this.session = null;
}
return this.session;
}
public Session getSession() {
return this.getSession(true);
}
public boolean isRequestedSessionIdValid() {
return this.request.isRequestedSessionIdValid();
}
/**
* Portlet does not know how portal manages session.
* This method returns false always.
*/
public boolean isRequestedSessionIdFromCookie() {
return false;
}
/**
* Portlet does not know how portal manages session.
* This method returns true always.
*/
public boolean isRequestedSessionIdFromURL() {
return true;
}
public String getCharacterEncoding() {
return this.form_encoding;
}
public void setCharacterEncoding(String form_encoding) throws java.io.UnsupportedEncodingException {
this.form_encoding = form_encoding;
}
/**
* Sets the default encoding of the servlet container.
*/
public void setContainerEncoding(String container_encoding) {
this.container_encoding = container_encoding;
}
public int getContentLength() {
// TODO getContentLength
// return this.request.getContentLength();
return -1;
}
public String getContentType() {
// TODO getContentType
// return this.request.getContentType();
return null;
}
public String getParameter(String name) {
String value = this.request.getParameter(name);
if (this.form_encoding == null || value == null) {
return value;
}
return decode(value);
}
private String decode(String str) {
if (str == null) {
return null;
}
try {
if (this.container_encoding == null) {
this.container_encoding = "ISO-8859-1";
}
byte[] bytes = str.getBytes(this.container_encoding);
return new String(bytes, form_encoding);
} catch (java.io.UnsupportedEncodingException uee) {
throw new PortletException("Unsupported Encoding Exception", uee);
}
}
public Enumeration getParameterNames() {
return this.request.getParameterNames();
}
public String[] getParameterValues(String name) {
String[] values = this.request.getParameterValues(name);
if (values == null) {
return null;
} else if (this.form_encoding == null) {
return values;
}
String[] decoded_values = new String[values.length];
for (int i = 0; i < values.length; ++i) {
decoded_values[i] = decode(values[i]);
}
return decoded_values;
}
public String getProtocol() {
return "JSR168";
}
public String getScheme() {
return this.request.getScheme();
}
public String getServerName() {
return this.request.getServerName();
}
public int getServerPort() {
return this.request.getServerPort();
}
public String getRemoteAddr() {
// TODO getRemoteAddr
// return this.request.getRemoteAddr();
return null;
}
public String getRemoteHost() {
// TODO getRemoteHost
// return this.request.getRemoteHost();
return null;
}
public Locale getLocale() {
return this.request.getLocale();
}
public Enumeration getLocales() {
return this.request.getLocales();
}
public boolean isSecure() {
return this.request.isSecure();
}
/* The PortletRequest interface methods */
/**
* Returns underlying portlet API request object
* @return portlet requesst object
*/
public javax.portlet.PortletRequest getPortletRequest() {
return request;
}
public Map getParameterMap() {
return this.request.getParameterMap();
}
public PortalContext getPortalContext() {
return this.request.getPortalContext();
}
public PortletMode getPortletMode() {
return this.request.getPortletMode();
}
public javax.portlet.PortletSession getPortletSession() {
return this.request.getPortletSession();
}
public javax.portlet.PortletSession getPortletSession(boolean create) {
return this.request.getPortletSession(create);
}
public PortletPreferences getPreferences() {
return this.request.getPreferences();
}
public Enumeration getProperties(String name) {
return this.request.getProperties(name);
}
public String getProperty(String name) {
return this.request.getProperty(name);
}
public Enumeration getPropertyNames() {
return this.request.getPropertyNames();
}
public String getResponseContentType() {
return this.request.getResponseContentType();
}
public Enumeration getResponseContentTypes() {
return this.request.getResponseContentTypes();
}
public WindowState getWindowState() {
return this.request.getWindowState();
}
public boolean isPortletModeAllowed(PortletMode mode) {
return this.request.isPortletModeAllowed(mode);
}
public boolean isWindowStateAllowed(WindowState state) {
return this.request.isWindowStateAllowed(state);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getAttribute(java.lang.String)
*/
public Object getAttribute(String name) {
return this.getAttribute(name, Request.GLOBAL_SCOPE);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getAttributeNames()
*/
public Enumeration getAttributeNames() {
return this.getAttributeNames(Request.GLOBAL_SCOPE);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#setAttribute(java.lang.String, java.lang.Object)
*/
public void setAttribute(String name, Object value) {
this.setAttribute(name, value, Request.GLOBAL_SCOPE);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#removeAttribute(java.lang.String)
*/
public void removeAttribute(String name) {
this.removeAttribute(name, Request.GLOBAL_SCOPE);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getAttribute(java.lang.String, int)
*/
public Object getAttribute(String name, int scope) {
if ( scope == Request.REQUEST_SCOPE ) {
return this.attributes.get(name);
}
return this.request.getAttribute(name);
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#getAttributeNames(int)
*/
public Enumeration getAttributeNames(int scope) {
if ( scope == Request.REQUEST_SCOPE ) {
return IteratorUtils.asEnumeration(this.attributes.keySet().iterator());
}
return this.request.getAttributeNames();
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#setAttribute(java.lang.String, java.lang.Object, int)
*/
public void setAttribute(String name, Object value, int scope) {
if ( scope == Request.REQUEST_SCOPE ) {
this.attributes.put(name, value);
} else {
this.request.setAttribute(name, value);
}
}
/* (non-Javadoc)
* @see org.apache.cocoon.environment.Request#removeAttribute(java.lang.String, int)
*/
public void removeAttribute(String name, int scope) {
if ( scope == Request.REQUEST_SCOPE ) {
this.attributes.remove(name);
} else {
this.request.removeAttribute(name);
}
}
/**
* @see org.apache.cocoon.environment.Request#searchAttribute(java.lang.String)
*/
public Object searchAttribute(String name) {
Object result = this.getAttribute(name, REQUEST_SCOPE);
if ( result == null ) {
result = this.getAttribute(name, GLOBAL_SCOPE);
}
return result;
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package org.jetbrains.java.decompiler.struct.gen.generics;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.main.DecompilerContext;
import org.jetbrains.java.decompiler.main.extern.IFernflowerLogger;
import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor;
import org.jetbrains.java.decompiler.modules.decompiler.typeann.TypeAnnotationWriteHelper;
import org.jetbrains.java.decompiler.struct.StructTypePathEntry;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public final class GenericMain {
private static final String[] typeNames = {
"byte",
"char",
"double",
"float",
"int",
"long",
"short",
"boolean",
};
public static GenericClassDescriptor parseClassSignature(String signature) {
String original = signature;
try {
GenericClassDescriptor descriptor = new GenericClassDescriptor();
signature = parseFormalParameters(signature, descriptor.fparameters, descriptor.fbounds);
String superCl = GenericType.getNextType(signature);
descriptor.superclass = new GenericType(superCl);
signature = signature.substring(superCl.length());
while (signature.length() > 0) {
String superIf = GenericType.getNextType(signature);
descriptor.superinterfaces.add(new GenericType(superIf));
signature = signature.substring(superIf.length());
}
return descriptor;
}
catch (RuntimeException e) {
DecompilerContext.getLogger().writeMessage("Invalid signature: " + original, IFernflowerLogger.Severity.WARN);
return null;
}
}
public static GenericFieldDescriptor parseFieldSignature(String signature) {
try {
return new GenericFieldDescriptor(new GenericType(signature));
}
catch (RuntimeException e) {
DecompilerContext.getLogger().writeMessage("Invalid signature: " + signature, IFernflowerLogger.Severity.WARN);
return null;
}
}
public static GenericMethodDescriptor parseMethodSignature(String signature) {
String original = signature;
try {
List<String> typeParameters = new ArrayList<>();
List<List<GenericType>> typeParameterBounds = new ArrayList<>();
signature = parseFormalParameters(signature, typeParameters, typeParameterBounds);
int to = signature.indexOf(")");
String parameters = signature.substring(1, to);
signature = signature.substring(to + 1);
List<GenericType> parameterTypes = new ArrayList<>();
while (parameters.length() > 0) {
String par = GenericType.getNextType(parameters);
parameterTypes.add(new GenericType(par));
parameters = parameters.substring(par.length());
}
String ret = GenericType.getNextType(signature);
GenericType returnType = new GenericType(ret);
signature = signature.substring(ret.length());
List<GenericType> exceptionTypes = new ArrayList<>();
if (signature.length() > 0) {
String[] exceptions = signature.split("\\^");
for (int i = 1; i < exceptions.length; i++) {
exceptionTypes.add(new GenericType(exceptions[i]));
}
}
return new GenericMethodDescriptor(typeParameters, typeParameterBounds, parameterTypes, returnType, exceptionTypes);
}
catch (RuntimeException e) {
DecompilerContext.getLogger().writeMessage("Invalid signature: " + original, IFernflowerLogger.Severity.WARN);
return null;
}
}
private static String parseFormalParameters(String signature, List<? super String> parameters, List<? super List<GenericType>> bounds) {
if (signature.charAt(0) != '<') {
return signature;
}
int counter = 1;
int index = 1;
loop:
while (index < signature.length()) {
switch (signature.charAt(index)) {
case '<':
counter++;
break;
case '>':
counter--;
if (counter == 0) {
break loop;
}
}
index++;
}
String value = signature.substring(1, index);
signature = signature.substring(index + 1);
while (value.length() > 0) {
int to = value.indexOf(":");
String param = value.substring(0, to);
value = value.substring(to + 1);
List<GenericType> lstBounds = new ArrayList<>();
while (true) {
if (value.charAt(0) == ':') {
// empty superclass, skip
value = value.substring(1);
}
String bound = GenericType.getNextType(value);
lstBounds.add(new GenericType(bound));
value = value.substring(bound.length());
if (value.length() == 0 || value.charAt(0) != ':') {
break;
}
else {
value = value.substring(1);
}
}
parameters.add(param);
bounds.add(lstBounds);
}
return signature;
}
public static String getGenericCastTypeName(GenericType type, List<TypeAnnotationWriteHelper> typePathWriteStack) {
List<TypeAnnotationWriteHelper> arrayPaths = new ArrayList<>();
List<TypeAnnotationWriteHelper> notArrayPath = typePathWriteStack.stream().filter(stack -> {
boolean isArrayPath = stack.getPaths().size() < type.arrayDim;
if (stack.getPaths().size() > type.arrayDim) {
for (int i = 0; i < type.arrayDim; i++) {
stack.getPaths().poll(); // remove all trailing
}
}
if (isArrayPath) {
arrayPaths.add(stack);
}
return !isArrayPath;
}).collect(Collectors.toList());
StringBuilder sb = new StringBuilder(getTypeName(type, notArrayPath));
ExprProcessor.writeArray(sb, type.arrayDim, arrayPaths);
return sb.toString();
}
private static String getTypeName(GenericType type, List<TypeAnnotationWriteHelper> typePathWriteStack) {
int tp = type.type;
if (tp <= CodeConstants.TYPE_BOOLEAN) {
return typeNames[tp];
}
else if (tp == CodeConstants.TYPE_VOID) {
return "void";
}
else if (tp == CodeConstants.TYPE_GENVAR) {
StringBuilder sb = new StringBuilder();
appendTypeAnnotationBeforeType(type, sb, typePathWriteStack);
sb.append(type.value);
return sb.toString();
}
else if (tp == CodeConstants.TYPE_OBJECT) {
StringBuilder sb = new StringBuilder();
appendClassName(type, sb, typePathWriteStack);
return sb.toString();
}
throw new RuntimeException("Invalid type: " + type);
}
private static void appendClassName(GenericType type, StringBuilder sb, List<TypeAnnotationWriteHelper> typePathWriteStack) {
List<GenericType> enclosingClasses = type.getEnclosingClasses();
appendTypeAnnotationBeforeType(type, sb, typePathWriteStack);
if (enclosingClasses.isEmpty()) {
String name = type.value.replace('/', '.');
sb.append(DecompilerContext.getImportCollector().getShortName(name));
}
else {
for (GenericType tp : enclosingClasses) {
String[] nestedClasses = DecompilerContext.getImportCollector().getShortName(tp.value.replace('/', '.')).split("\\.");
for (int i = 0; i < nestedClasses.length; i++) {
String nestedType = nestedClasses[i];
if (i != 0) { // first annotation is written already
ExprProcessor.checkNestedTypeAnnotation(sb, typePathWriteStack);
}
sb.append(nestedType);
if (i != nestedClasses.length - 1) sb.append(".");
}
appendTypeArguments(tp, sb, typePathWriteStack);
sb.append('.');
ExprProcessor.checkNestedTypeAnnotation(sb, typePathWriteStack);
}
sb.append(type.value);
}
appendTypeArguments(type, sb, typePathWriteStack);
}
private static void appendTypeAnnotationBeforeType(GenericType type, StringBuilder sb, List<TypeAnnotationWriteHelper> typePathWriteStack) {
typePathWriteStack.removeIf(writeHelper -> {
StructTypePathEntry path = writeHelper.getPaths().peek();
if (path == null) {
writeHelper.writeTo(sb);
return true;
}
if (path.getTypePathEntryKind() == StructTypePathEntry.Kind.ARRAY.getOpcode() && type.arrayDim == writeHelper.getPaths().size()) {
writeHelper.writeTo(sb);
return true;
}
return false;
});
}
private static void appendTypeArguments(GenericType type, StringBuilder sb, List<TypeAnnotationWriteHelper> typePathWriteStack) {
if (!type.getArguments().isEmpty()) {
sb.append('<');
for (int i = 0; i < type.getArguments().size(); i++) {
if (i > 0) {
sb.append(", ");
}
GenericType genPar = type.getArguments().get(i);
int wildcard = type.getWildcards().get(i);
final int it = i;
// only take type paths that are in the generic
List<TypeAnnotationWriteHelper> locTypePathWriteStack = typePathWriteStack.stream().filter(writeHelper -> {
StructTypePathEntry path = writeHelper.getPaths().peek();
boolean inGeneric = path != null && path.getTypeArgumentIndex() == it && path.getTypePathEntryKind() == StructTypePathEntry.Kind.TYPE.getOpcode();
if (inGeneric) {
writeHelper.getPaths().pop();
}
return inGeneric;
}).collect(Collectors.toList());
locTypePathWriteStack.removeIf(writeHelper -> {
StructTypePathEntry path = writeHelper.getPaths().peek();
if (path == null && wildcard != GenericType.WILDCARD_NO) {
writeHelper.writeTo(sb);
return true;
}
if (path != null && path.getTypePathEntryKind() == StructTypePathEntry.Kind.TYPE.getOpcode() && path.getTypeArgumentIndex() == it &&
genPar.arrayDim != 0 && genPar.arrayDim == writeHelper.getPaths().size()
) {
writeHelper.writeTo(sb);
return true;
}
return false;
});
switch (wildcard) {
case GenericType.WILDCARD_UNBOUND:
sb.append('?');
break;
case GenericType.WILDCARD_EXTENDS:
sb.append("? extends ");
break;
case GenericType.WILDCARD_SUPER:
sb.append("? super ");
break;
}
typePathWriteStack.forEach(writeHelper -> { // remove all wild card entries
StructTypePathEntry path = writeHelper.getPaths().peek();
boolean isWildCard = path != null && path.getTypePathEntryKind() == StructTypePathEntry.Kind.TYPE_WILDCARD.getOpcode();
if (isWildCard && path.getTypeArgumentIndex() == it) writeHelper.getPaths().pop();
});
locTypePathWriteStack.removeIf(writeHelper -> {
StructTypePathEntry path = writeHelper.getPaths().peek();
if (path != null && path.getTypeArgumentIndex() == it &&
path.getTypePathEntryKind() == StructTypePathEntry.Kind.TYPE_WILDCARD.getOpcode() &&
writeHelper.getPaths().size() - 1 == genPar.arrayDim
) {
writeHelper.writeTo(sb);
return true;
}
return false;
});
if (genPar != null) {
sb.append(getGenericCastTypeName(genPar, locTypePathWriteStack));
}
}
sb.append(">");
}
}
}
| |
package npanday.executable;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import npanday.vendor.VendorInfo;
import javax.annotation.Nullable;
import java.io.File;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Holds the configured executable capability.
*
* @author <a href="mailto:lcorneliussen@apache.org">Lars Corneliussen</a>
*/
// TODO: Refactor to be based on the configured plugins
public class MutableExecutableCapability
implements ExecutableCapability
{
protected VendorInfo vendorInfo;
protected String operatingSystem;
private String architecture;
protected String pluginClassName;
protected Properties pluginConfiguration;
private String executable;
private String executableVersion;
protected String identifier;
private CommandCapability commandCapability;
private List<String> frameworkVersions;
private String profile;
private List<String> probingPaths;
public String getProfile()
{
return profile;
}
public void setProfile( String profile )
{
this.profile = profile;
}
public List<String> getFrameworkVersions()
{
return frameworkVersions;
}
public void setFrameworkVersions( List<String> frameworkVersions )
{
this.frameworkVersions = frameworkVersions;
}
public String getIdentifier()
{
return identifier;
}
public void setIdentifier( String identifier )
{
this.identifier = identifier;
}
public String getExecutableName()
{
return executable;
}
public void setExecutableName( String executableName )
{
this.executable = executableName;
}
public String getExecutableVersion()
{
return executableVersion;
}
public void setExectuableVersion( String executableVersion )
{
this.executableVersion = executableVersion;
}
public VendorInfo getVendorInfo()
{
return vendorInfo;
}
public void setVendorInfo( VendorInfo vendorInfo )
{
this.vendorInfo = vendorInfo;
}
public String getOperatingSystem()
{
return operatingSystem;
}
public void setOperatingSystem( String operatingSystem )
{
this.operatingSystem = operatingSystem;
}
public String getArchitecture()
{
return architecture;
}
public void setArchitecture( String architecture )
{
this.architecture = architecture;
}
public String getPluginClassName()
{
return pluginClassName;
}
public void setPluginClassName( String pluginClassName )
{
this.pluginClassName = pluginClassName;
}
public Properties getPluginConfiguration()
{
return pluginConfiguration;
}
public void setPluginConfiguration( Properties pluginConfiguration )
{
this.pluginConfiguration = pluginConfiguration;
}
public CommandCapability getCommandCapability()
{
return commandCapability;
}
public void setCommandCapability( CommandCapability commandCapability )
{
this.commandCapability = commandCapability;
}
public List<String> getProbingPaths()
{
// if probing paths are defined fot the capability, these are to be
// used when searching executables.
if ( probingPaths != null && !probingPaths.isEmpty() )
{
return probingPaths;
}
// if not, we expect the executable is provided by the vendor
final List<File> vendorPaths = checkNotNull(
getVendorInfo(), "Vendor info is unavailable"
).getExecutablePaths();
List<String> vendorPathsAsString = Lists.transform(
vendorPaths, new Function<File, String>()
{
public String apply( @Nullable File file )
{
return checkNotNull( file, "file was null").toString();
}
}
);
return Collections.unmodifiableList( vendorPathsAsString );
}
public void setProbingPaths( List<String> probingPaths )
{
this.probingPaths = probingPaths;
}
@Override
public String toString()
{
return "ExecutableCapability [" + "vendorInfo=" + vendorInfo + ", operatingSystem='" + operatingSystem
+ '\'' + ", profile='" + profile + '\'' + ']';
}
}
| |
/**
* This project is licensed under the Apache License, Version 2.0
* if the following condition is met:
* (otherwise it cannot be used by anyone but the author, Kevin, only)
*
* The original JSON Statham project is owned by Lee, Seong Hyun (Kevin).
*
* -What does it mean to you?
* Nothing, unless you want to take the ownership of
* "the original project" (not yours or forked & modified one).
* You are free to use it for both non-commercial and commercial projects
* and free to modify it as the Apache License allows.
*
* -So why is this condition necessary?
* It is only to protect the original project (See the case of Java).
*
*
* Copyright 2009 Lee, Seong Hyun (Kevin)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elixirian.jsonstatham.json;
import static org.elixirian.kommonlee.util.Objects.*;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.elixirian.jsonstatham.annotation.Json;
import org.elixirian.jsonstatham.annotation.JsonField;
import org.elixirian.jsonstatham.annotation.ValueAccessor;
/**
* <pre>
* ___ _____ _____
* / \/ /_________ ___ ____ __ ______ / / ______ ______
* / / / ___ \ \/ //___// // / / / / ___ \/ ___ \
* / \ / _____/\ // // __ / / /___/ _____/ _____/
* /____/\____\\_____/ \__//___//___/ /__/ /________/\_____/ \_____/
* </pre>
*
* @author Lee, SeongHyun (Kevin)
* @version 0.0.1 (2009-12-20)
*/
@Json
public final class ComplexJsonObjectWithValueAccessor
{
@JsonField(name = "id")
private Long primaryKey;
@JsonField(name = "name")
private String name;
@JsonField(name = "address")
private Address address;
@JsonField(name = "date")
private Date date;
@JsonField(name = "dateWithValueAccessor")
@ValueAccessor(name = "getDateString")
private Date dateWithValueAccessor;
@JsonField(name = "calendar")
private Calendar calendar;
@JsonField(name = "calendarWithValueAccessor")
@ValueAccessor(name = "getCalendarString")
private Calendar calendarWithValueAccessor;
private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
/**
* @return the primaryKey
*/
public Long getPrimaryKey()
{
return primaryKey;
}
/**
* @param primaryKey
* the primaryKey to set
*/
public void setPrimaryKey(final Long primaryKey)
{
this.primaryKey = primaryKey;
}
/**
* @return the name
*/
public String getName()
{
return name;
}
/**
* @param name
* the name to set
*/
public void setName(final String name)
{
this.name = name;
}
/**
* @return the address
*/
public Address getAddress()
{
return address;
}
/**
* @param address
* the address to set
*/
public void setAddress(final Address address)
{
this.address = address;
}
/**
* @return the date
*/
public Date getDate()
{
return date;
}
/**
* @param date
* the date to set
*/
public void setDate(final Date date)
{
this.date = date;
}
/**
* @return the dateWithValueAccessor
*/
public Date getDateWithValueAccessor()
{
return dateWithValueAccessor;
}
/**
* @param dateWithValueAccessor
* the dateWithValueAccessor to set
*/
public void setDateWithValueAccessor(final Date dateWithValueAccessor)
{
this.dateWithValueAccessor = dateWithValueAccessor;
}
public String getDateString()
{
synchronized (simpleDateFormat)
{
return simpleDateFormat.format(dateWithValueAccessor);
}
}
/**
* @return the calendar
*/
public Calendar getCalendar()
{
return calendar;
}
/**
* @param calendar
* the calendar to set
*/
public void setCalendar(final Calendar calendar)
{
this.calendar = calendar;
}
/**
* @return the calendarWithValueAccessor
*/
public Calendar getCalendarWithValueAccessor()
{
return calendarWithValueAccessor;
}
/**
* @param calendarWithValueAccessor
* the calendarWithValueAccessor to set
*/
public void setCalendarWithValueAccessor(final Calendar calendarWithValueAccessor)
{
this.calendarWithValueAccessor = calendarWithValueAccessor;
}
public String getCalendarString()
{
return simpleDateFormat.format(calendarWithValueAccessor.getTime());
}
@Override
public int hashCode()
{
return hash(primaryKey, name, address, date, dateWithValueAccessor, calendar, calendarWithValueAccessor);
}
@Override
public boolean equals(final Object complexJsonObjectWithValueAccessor)
{
if (identical(this, complexJsonObjectWithValueAccessor))
{
return true;
}
final ComplexJsonObjectWithValueAccessor that =
castIfInstanceOf(ComplexJsonObjectWithValueAccessor.class, complexJsonObjectWithValueAccessor);
/* @formatter:off */
return isNotNull(that) &&
(equal(this.primaryKey, that.getPrimaryKey()) &&
equal(this.name, that.getName()) &&
equal(this.address, that.getAddress()) &&
equal(this.date, that.getDate()) &&
equal(this.dateWithValueAccessor, that.getDateWithValueAccessor()) &&
equal(this.calendar, that.getCalendar()) &&
equal(this.calendarWithValueAccessor, that.getCalendarWithValueAccessor()));
/* @formatter:on */
}
@Override
public String toString()
{
return toStringBuilder(this).add("id", primaryKey)
.add("name", name)
.add("address", address)
.add("date", date)
.add("dateWithValueAccessor", dateWithValueAccessor)
.add("calendar", calendar)
.add("calendarWithValueAccessor", calendarWithValueAccessor)
.toString();
}
}
| |
/**
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data;
import static org.kitesdk.data.filesystem.DatasetTestUtilities.*;
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public abstract class TestMetadataProviders extends MiniDFSTest {
protected static final String NAME = "provider_test1";
@Parameterized.Parameters
public static Collection<Object[]> data() {
Object[][] data = new Object[][] {
{ false }, // default to local FS
{ true } }; // default to distributed FS
return Arrays.asList(data);
}
// whether this should use the DFS provided by MiniDFSTest
protected boolean distributed;
protected Configuration conf;
protected DatasetDescriptor testDescriptor;
protected DatasetDescriptor anotherDescriptor;
protected MetadataProvider provider;
abstract public MetadataProvider newProvider(Configuration conf);
public TestMetadataProviders(boolean distributed) {
this.distributed = distributed;
}
@Before
public void setUp() throws IOException, URISyntaxException {
this.conf = (distributed ?
MiniDFSTest.getConfiguration() :
new Configuration());
this.testDescriptor = new DatasetDescriptor.Builder()
.format(Formats.AVRO)
.schemaUri(USER_SCHEMA_URL)
.partitionStrategy(new PartitionStrategy.Builder()
.year("timestamp")
.month("timestamp")
.day("timestamp")
.build())
.build();
// something completely different
this.anotherDescriptor = new DatasetDescriptor.Builder()
.format(Formats.PARQUET)
.schema(Schema.createArray(Schema.create(Schema.Type.FLOAT)))
.partitionStrategy(new PartitionStrategy.Builder()
.hash("some_field", 20000)
.build())
.build();
this.provider = newProvider(conf);
}
@Test
public void testCreate() {
Assert.assertFalse("Sanity check", provider.exists(NAME));
DatasetDescriptor created = provider.create(NAME, testDescriptor);
Assert.assertNotNull("Descriptor should be returned", created);
Assert.assertTrue("Descriptor should exist", provider.exists(NAME));
Assert.assertEquals("Schema should match",
testDescriptor.getSchema(), created.getSchema());
Assert.assertEquals("PartitionStrategy should match",
testDescriptor.getPartitionStrategy(), created.getPartitionStrategy());
Assert.assertEquals("Format should match",
testDescriptor.getFormat(), created.getFormat());
Assert.assertNotNull("Location URI should be set",
created.getLocation());
Assert.assertNotNull("Location URI should have a scheme",
created.getLocation().getScheme());
}
public void testCreateWithLocation() throws URISyntaxException {
Assert.assertFalse("Sanity check", provider.exists(NAME));
URI requestedLocation = new URI("hdfs:/tmp/data/my_data_set");
DatasetDescriptor requested = new DatasetDescriptor.Builder(testDescriptor)
.location(requestedLocation)
.build();
final DatasetDescriptor created;
try {
created = provider.create(NAME, requested);
} catch (MetadataProviderException ex) {
// this is expected if the provider doesn't support requested locations
return;
}
// if supported, the location should be unchanged.
Assert.assertNotNull("Descriptor should be returned", created);
Assert.assertTrue("Descriptor should exist", provider.exists(NAME));
Assert.assertEquals("Requested locations should match",
requestedLocation, created.getLocation());
}
public void ensureCreated() {
// use testCreate to create NAME
testCreate();
Assert.assertTrue("Sanity check", provider.exists(NAME));
}
@Test(expected=DatasetExistsException.class)
public void testCreateAlreadyExists() {
ensureCreated();
provider.create(NAME, anotherDescriptor);
}
@Test(expected=IllegalArgumentException.class)
public void testCreateFailsNullName() {
provider.create(null, testDescriptor);
}
@Test(expected=IllegalArgumentException.class)
public void testCreateFailsNullDescriptor() {
provider.create(NAME, null);
}
@Test
public void testLoad() {
ensureCreated();
DatasetDescriptor loaded = provider.load(NAME);
Assert.assertNotNull("DatasetDescriptor should be returned", loaded);
Assert.assertEquals("Schema should match",
testDescriptor.getSchema(), loaded.getSchema());
Assert.assertEquals("PartitionStrategy should match",
testDescriptor.getPartitionStrategy(), loaded.getPartitionStrategy());
Assert.assertEquals("Format should match",
testDescriptor.getFormat(), loaded.getFormat());
}
@Test(expected=DatasetNotFoundException.class)
public void testLoadNoDataset() {
Assert.assertFalse("Sanity check", provider.exists(NAME));
provider.load(NAME);
}
@Test(expected=IllegalArgumentException.class)
public void testLoadFailsNullName() {
provider.load(null);
}
public void testUpdate() {
ensureCreated();
/*
* To be clear: we are testing that even crazy, incompatible changes are
* happily saved by the MetadataProvider. Rule enforcement is done upstream
* by libraries that are in a better position to make decisions about what
* changes are incompatible.
*/
final DatasetDescriptor saved = provider.update(NAME, anotherDescriptor);
Assert.assertNotNull("Updated Descriptor should be returned", saved);
Assert.assertEquals("Schema should match update",
anotherDescriptor.getSchema(), saved.getSchema());
Assert.assertEquals("PartitionStrategy should match update",
anotherDescriptor.getPartitionStrategy(), saved.getPartitionStrategy());
Assert.assertEquals("Format should match update",
anotherDescriptor.getFormat(), saved.getFormat());
}
@Test(expected=DatasetNotFoundException.class)
public void testUpdateFailsNoDataset() {
provider.update(NAME, testDescriptor);
}
@Test(expected=IllegalArgumentException.class)
public void testUpdateFailsNullName() {
provider.update(null, testDescriptor);
}
@Test(expected=IllegalArgumentException.class)
public void testUpdateFailsNullDescriptor() {
provider.update(NAME, null);
}
public void testDelete() {
ensureCreated();
boolean result = provider.delete(NAME);
Assert.assertTrue("Delete descriptor should return true", result);
result = provider.delete(NAME);
Assert.assertFalse("Delete non-existent descriptor should return false", result);
}
@Test(expected=IllegalArgumentException.class)
public void testDeleteFailsNullName() {
provider.delete(null);
}
@Test
public void testExists() {
Assert.assertFalse(provider.exists(NAME));
provider.create(NAME, testDescriptor);
Assert.assertTrue(provider.exists(NAME));
provider.delete(NAME);
Assert.assertFalse(provider.exists(NAME));
}
@Test(expected=IllegalArgumentException.class)
public void testExistsNullName() {
provider.exists(null);
}
@Test
public void testList() {
Assert.assertEquals(ImmutableMultiset.of(),
ImmutableMultiset.copyOf(provider.list()));
provider.create("test1", testDescriptor);
Assert.assertEquals(ImmutableMultiset.of("test1"),
ImmutableMultiset.copyOf(provider.list()));
provider.create("test2", testDescriptor);
Assert.assertEquals(ImmutableMultiset.of("test1", "test2"),
ImmutableMultiset.copyOf(provider.list()));
provider.create("test3", testDescriptor);
Assert.assertEquals(ImmutableMultiset.of("test1", "test2", "test3"),
ImmutableMultiset.copyOf(provider.list()));
provider.delete("test2");
Assert.assertEquals(ImmutableMultiset.of("test1", "test3"),
ImmutableMultiset.copyOf(provider.list()));
provider.delete("test3");
Assert.assertEquals(ImmutableMultiset.of("test1"),
ImmutableMultiset.copyOf(provider.list()));
provider.delete("test1");
Assert.assertEquals(ImmutableMultiset.of(),
ImmutableMultiset.copyOf(provider.list()));
}
@Test
public void testCustomProperties() {
final String propName = "my.custom.property";
final String propValue = "string";
DatasetDescriptor descriptorWithProp =
new DatasetDescriptor.Builder(testDescriptor)
.property(propName, propValue)
.build();
DatasetDescriptor created = provider.create(NAME, descriptorWithProp);
junit.framework.Assert.assertTrue("Should have custom property",
created.hasProperty(propName));
junit.framework.Assert.assertEquals(
"Should have correct custom property value",
propValue, created.getProperty(propName));
junit.framework.Assert.assertEquals("Should correctly list property names",
Sets.newHashSet(propName), created.listProperties());
DatasetDescriptor loaded = provider.load(NAME);
junit.framework.Assert.assertTrue("Should have custom property",
loaded.hasProperty(propName));
junit.framework.Assert.assertEquals(
"Should have correct custom property value",
propValue, loaded.getProperty(propName));
junit.framework.Assert.assertEquals("Should correctly list property names",
Sets.newHashSet(propName), loaded.listProperties());
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.codegen.bazel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
class BazelBuildFileView {
private static final String COMMON_RESOURCES_PROTO = "//google/cloud:common_resources_proto";
private static final Pattern LABEL_NAME = Pattern.compile(":\\w+$");
private final Map<String, String> tokens = new HashMap<>();
private final Map<String, Map<String, String>> overriddenStringAttributes = new HashMap<>();
private final Map<String, Map<String, List<String>>> overriddenListAttributes = new HashMap<>();
private final Map<String, String> assemblyPkgRulesNames = new HashMap<>();
BazelBuildFileView(ApiVersionedDir bp) {
if (bp.getProtoPackage() == null) {
return;
}
tokens.put("name", bp.getName());
tokens.put("assembly_name", bp.getAssemblyName());
tokens.put("proto_srcs", joinSetWithIndentation(bp.getProtos()));
tokens.put("version", bp.getVersion());
tokens.put("package", bp.getProtoPackage());
Set<String> extraProtosNodeJS = new TreeSet<>();
Set<String> extraImports = new TreeSet<>();
extraImports.add(COMMON_RESOURCES_PROTO);
// Add location_proto dependency for mix-in if individual language rules need it.
if (bp.hasLocations() && !bp.getProtoPackage().equals("google.cloud.location")) {
extraImports.add("//google/cloud/location:location_proto");
}
// Add iam_policy_proto dependency for mix-in if individual language rules need it.
if (bp.hasIAMPolicy() && !bp.getProtoPackage().equals("google.iam.v1")) {
extraImports.add("//google/iam/v1:iam_policy_proto");
}
tokens.put("extra_imports", joinSetWithIndentation(extraImports));
// Remove common_resources.proto because it is only needed for the proto_library_with_info target.
extraImports.remove(COMMON_RESOURCES_PROTO);
String packPrefix = bp.getProtoPackage().replace(".", "/") + '/';
Set<String> actualImports = new TreeSet<>();
for (String imp : bp.getImports()) {
if (imp.startsWith(packPrefix) && imp.indexOf('/', packPrefix.length()) == -1) {
// Ignore imports from same package, as all protos in same package are put in same
// proto_library target.
continue;
}
String actualImport = imp.replace(".proto", "_proto");
if (actualImport.startsWith("google/protobuf/")) {
actualImport = actualImport.replace("google/protobuf/", "@com_google_protobuf//:");
} else if (actualImport.equals("google/cloud/common/operation_metadata_proto")) {
actualImport = "//google/cloud/common:common_proto";
extraProtosNodeJS.add(actualImport);
} else {
actualImport = convertPathToLabel("", actualImport);
}
actualImports.add(actualImport);
}
tokens.put("proto_deps", joinSetWithIndentation(actualImports));
tokens.put("extra_protos_nodejs", joinSetWithIndentationNl(extraProtosNodeJS));
tokens.put("go_proto_importpath", bp.getLangProtoPackages().get("go").split(";")[0]);
tokens.put("go_proto_deps", joinSetWithIndentation(mapGoProtoDeps(actualImports)));
boolean isGapicLibrary =
bp.getServiceYamlPath() != null || bp.getServiceConfigJsonPath() != null;
if (!isGapicLibrary) {
return;
}
// Default grpc_service_config to None, unless there is one present.
tokens.put("grpc_service_config", "None");
if (bp.getServiceConfigJsonPath() != null) {
// Wrap the label in quotes, because the template doesn't supply them
// in case that None is supplied, which is a built-in value.
tokens.put(
"grpc_service_config",
"\"" + convertPathToLabel(bp.getProtoPackage(), bp.getServiceConfigJsonPath()) + "\"");
}
String serviceYaml = "None";
if (bp.getServiceYamlPath() != null) {
// Wrap the label in quotations, because None doesn't need them, so they can't be in the template.
serviceYaml = "\""+convertPathToLabel(bp.getProtoPackage(), bp.getServiceYamlPath())+"\"";
}
tokens.put("service_yaml", serviceYaml);
// We need to continue to supply the gapic_yaml to Java targets when the
// gapic_yaml is available, because that means it was added for some override.
String gapicYaml = "None";
String gapicYamlPath = bp.getGapicYamlPath();
if (gapicYamlPath != null && !gapicYamlPath.isEmpty()) {
gapicYaml = "\""+convertPathToLabel(bp.getProtoPackage(), gapicYamlPath)+"\"";
}
tokens.put("gapic_yaml", gapicYaml);
Set<String> javaTests = new TreeSet<>();
for (String service : bp.getServices()) {
// Prioritize the language override in gapic.yaml if it is present.
// New APIs (circa 2020) should rely on the protobuf options instead.
String javaPackage =
bp.getLangGapicPackages().containsKey("java")
? bp.getLangGapicPackages().get("java")
: bp.getLangProtoPackages().get("java");
if (javaPackage == null) {
continue;
}
String actualService =
bp.getLangGapicNameOverrides().containsKey("java")
// The service name is overridden in gapic.yaml.
? bp.getLangGapicNameOverrides()
.get("java")
.getOrDefault(bp.getProtoPackage() + "." + service, service)
// Default service name as it appears in the proto.
: service;
javaTests.add(javaPackage + "." + actualService + "ClientTest");
}
actualImports.addAll(extraImports);
tokens.put("java_tests", joinSetWithIndentation(javaTests));
tokens.put("java_gapic_deps", joinSetWithIndentationNl(mapJavaGapicDeps(actualImports)));
tokens.put(
"java_gapic_test_deps", joinSetWithIndentationNl(mapJavaGapicTestDeps(actualImports)));
actualImports.addAll(extraImports);
// Construct GAPIC import path & package name based on go_package proto option
String protoPkg = bp.getProtoPackage();
boolean isCloud = bp.getCloudScope() || protoPkg.contains("cloud");
String goImport = assembleGoImportPath(isCloud, protoPkg, bp.getLangProtoPackages().get("go"));
tokens.put("go_gapic_importpath", goImport);
tokens.put("go_gapic_test_importpath", goImport.split(";")[0]);
tokens.put("go_gapic_deps", joinSetWithIndentationNl(mapGoGapicDeps(actualImports)));
overriddenStringAttributes.putAll(bp.getOverriddenStringAttributes());
overriddenListAttributes.putAll(bp.getOverriddenListAttributes());
assemblyPkgRulesNames.putAll(bp.getAssemblyPkgRulesNames());
}
private String assembleGoImportPath(boolean isCloud, String protoPkg, String goPkg) {
goPkg = goPkg.replaceFirst("google\\.golang\\.org\\/genproto\\/googleapis\\/", "");
goPkg = goPkg.replaceFirst("cloud\\/", "");
String goImport = "";
if (isCloud) {
goImport = "cloud.google.com/go/";
goPkg = goPkg.replaceFirst("\\/v([a-z0-9]+);", "\\/apiv$1;");
} else {
goImport = "google.golang.org/";
String pkgName = goPkg.split(";")[1];
// use the proto package path for a non-Cloud Go import path
// example: google.golang.org/google/ads/googleads/v3/services;services
goPkg = protoPkg.replaceAll("\\.", "\\/");
goPkg += ";" + pkgName;
}
return goImport + goPkg;
}
private String convertPathToLabel(String pkg, String path) {
if (path == null) {
return path;
}
if (!path.contains("/")) {
return path;
}
String[] pkgTokens = pkg.isEmpty() ? new String[0] : pkg.split("\\.");
String[] pathTokens = path.split("/");
// Find pkgTokens suffix & pathTokens prefix intersection
int index = 0;
for (; index < pkgTokens.length && index < pathTokens.length; index++) {
if (!pathTokens[index].equals(pkgTokens[pkgTokens.length - index - 1])) {
break;
}
}
List<String> tokens = new ArrayList<>();
for (int i = 0; i < pkgTokens.length - index; i++) {
tokens.add(pkgTokens[i]);
}
for (int i = index; i < pathTokens.length; i++) {
tokens.add(pathTokens[i]);
}
StringBuilder sb = new StringBuilder("/");
for (String token : tokens) {
sb.append('/').append(token);
}
int lastSlashIndex = sb.lastIndexOf("/");
sb.replace(lastSlashIndex, lastSlashIndex + 1, ":");
return sb.toString();
}
private String joinSetWithIndentation(Set<String> set) {
return set.isEmpty() ? "" : '"' + String.join("\",\n \"", set) + "\",";
}
private String joinSetWithIndentationNl(Set<String> set) {
String rv = joinSetWithIndentation(set);
return rv.isEmpty() ? rv : "\n " + rv;
}
private String replaceLabelName(String labelPathAndName, String newLabelName) {
return LABEL_NAME.matcher(labelPathAndName).replaceAll(newLabelName);
}
private Set<String> mapJavaGapicDeps(Set<String> protoImports) {
Set<String> javaImports = new TreeSet<>();
for (String protoImport : protoImports) {
if (protoImport.endsWith(":iam_policy_proto")
|| protoImport.endsWith(":policy_proto")
|| protoImport.endsWith(":options_proto")) {
javaImports.add(replaceLabelName(protoImport, ":iam_java_proto"));
} else if (protoImport.startsWith("//google/api:")) {
javaImports.add(replaceLabelName(protoImport, ":api_java_proto"));
} else if (protoImport.endsWith(":location_proto")) {
javaImports.add("//google/cloud/location:location_java_proto");
} else if (protoImport.endsWith(":common_proto")) {
javaImports.add(replaceLabelName(protoImport, ":common_java_proto"));
}
}
return javaImports;
}
private Set<String> mapJavaGapicTestDeps(Set<String> protoImports) {
Set<String> javaImports = new TreeSet<>();
for (String protoImport : protoImports) {
if (protoImport.endsWith(":iam_policy_proto")
|| protoImport.endsWith(":policy_proto")
|| protoImport.endsWith(":options_proto")) {
javaImports.add(replaceLabelName(protoImport, ":iam_java_grpc"));
} else if (protoImport.endsWith(":location_proto")) {
javaImports.add("//google/cloud/location:location_java_grpc");
}
}
return javaImports;
}
private Set<String> mapGoProtoDeps(Set<String> protoImports) {
Set<String> goImports = new TreeSet<>();
for (String protoImport : protoImports) {
if (protoImport.startsWith("@com_google_protobuf//")) {
continue;
}
if (protoImport.endsWith(":resource_proto")
|| protoImport.endsWith(":client_proto")
|| protoImport.endsWith(":field_behavior_proto")
|| protoImport.endsWith(":http_proto")) {
goImports.add(replaceLabelName(protoImport, ":annotations_go_proto"));
} else if (protoImport.endsWith(":operations_proto")) {
goImports.add(replaceLabelName(protoImport, ":longrunning_go_proto"));
} else if (protoImport.endsWith(":iam_policy_proto")
|| protoImport.endsWith(":policy_proto")
|| protoImport.endsWith(":options_proto")) {
goImports.add(replaceLabelName(protoImport, ":iam_go_proto"));
} else if (protoImport.endsWith(":config_change_proto")) {
goImports.add(replaceLabelName(protoImport, ":configchange_go_proto"));
} else if (protoImport.endsWith(":service_proto") || protoImport.endsWith(":quota_proto")) {
goImports.add(replaceLabelName(protoImport, ":serviceconfig_go_proto"));
} else if (protoImport.endsWith(":postal_address_proto")) {
goImports.add(replaceLabelName(protoImport, ":postaladdress_go_proto"));
} else if (protoImport.endsWith(":monitored_resource_proto")) {
goImports.add(replaceLabelName(protoImport, ":monitoredres_go_proto"));
} else if (protoImport.endsWith(":launch_stage_proto")) {
goImports.add(replaceLabelName(protoImport, ":api_go_proto"));
} else if (protoImport.endsWith(":error_details_proto")) {
goImports.add(replaceLabelName(protoImport, ":errdetails_go_proto"));
} else {
goImports.add(protoImport.replaceAll("_proto$", "_go_proto"));
}
}
return goImports;
}
private Set<String> mapGoGapicDeps(Set<String> protoImports) {
Set<String> goImports = new TreeSet<>();
for (String protoImport : protoImports) {
if (protoImport.startsWith("@com_google_protobuf//")) {
if (protoImport.endsWith(":duration_proto")) {
goImports.add("@io_bazel_rules_go//proto/wkt:duration_go_proto");
}
continue;
}
if (protoImport.endsWith(":operations_proto")) {
goImports.add(replaceLabelName(protoImport, ":longrunning_go_proto"));
goImports.add("@com_google_cloud_go//longrunning:go_default_library");
goImports.add("@com_google_cloud_go//longrunning/autogen:go_default_library");
for (String pi : protoImports) {
if (pi.startsWith("@com_google_protobuf//")) {
if (pi.endsWith(":struct_proto")) {
goImports.add("@io_bazel_rules_go//proto/wkt:struct_go_proto");
} else if (pi.endsWith(":any_proto")) {
goImports.add("@io_bazel_rules_go//proto/wkt:any_go_proto");
}
}
}
} else if (protoImport.endsWith(":iam_policy_proto")
|| protoImport.endsWith(":policy_proto")
|| protoImport.endsWith(":options_proto")) {
goImports.add(replaceLabelName(protoImport, ":iam_go_proto"));
} else if (protoImport.endsWith(":service_proto")) {
goImports.add(replaceLabelName(protoImport, ":serviceconfig_go_proto"));
} else if (protoImport.endsWith(":httpbody_proto")) {
goImports.add(replaceLabelName(protoImport, ":httpbody_go_proto"));
} else if (protoImport.endsWith(":monitored_resource_proto")) {
goImports.add(replaceLabelName(protoImport, ":monitoredres_go_proto"));
} else if (protoImport.endsWith(":metric_proto")) {
goImports.add(replaceLabelName(protoImport, ":metric_go_proto"));
} else if (protoImport.endsWith(":location_proto")) {
goImports.add(replaceLabelName(protoImport, ":location_go_proto"));
} else if (protoImport.endsWith(":common_proto")) {
goImports.add(replaceLabelName(protoImport, ":common_go_proto"));
}
}
return goImports;
}
Map<String, String> getTokens() {
return Collections.unmodifiableMap(this.tokens);
}
Map<String, Map<String, String>> getOverriddenStringAttributes() {
return overriddenStringAttributes;
}
Map<String, Map<String, List<String>>> getOverriddenListAttributes() {
return overriddenListAttributes;
}
Map<String, String> getAssemblyPkgRulesNames() {
return assemblyPkgRulesNames;
}
}
| |
/*
* $Revision$
* $Date$
*
* Copyright (C) 1999-$year$ Jive Software. All rights reserved.
*
* This software is the proprietary information of Jive Software. Use is subject to license terms.
*/
package com.jivesoftware.os.filer.queue.store;
import com.jivesoftware.os.filer.io.FilerIO;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
/**
* basically a file of length,byte[], tuples
*
* @author jonathan
*/
public class FileQueue {
public static final long ENQEUED = 0;
public static final long CONSUMED = 1;
public static final long FAILED = 2;
public static final long REMOVED = Long.MAX_VALUE;
private static final MetricLogger logger = MetricLoggerFactory.getLogger();
public static long openFilesCount = 0;
private static final int END_OF_DATA_MARKER = 0xFFFF;
public final static int endOfDataMarkerSize = 4;
public static final int headerSize = 8 + 8 + 8;
public final static long ABSOLUTE_MAX_QUEUE_ENTRY_SIZE_IN_BYTES = 1024 * 1024 * 1024; // 1gb
enum State {
ERROR_READING,
READING,
OPENING_READING,
OFF,
OPENING_WRITING,
WRITING,
ERROR_WRITING,
DELETED,
FAILED_TO_DELETE
}
final private File file;
final private Object ioLock = new Object();
private State state = State.OFF;
private RandomAccessFile write;
private RandomAccessFile read;
final private long creationTimestamp;
private long appendedTimestamp;
private boolean hardFlush;
private final byte[] modeLengthAndTimestamp = new byte[headerSize];
private final byte[] endOfDataMarker = new byte[endOfDataMarkerSize];
public FileQueue(File file, boolean hardFlush) {
this.file = file;
this.hardFlush = hardFlush;
this.creationTimestamp = System.currentTimeMillis();
}
/**
* Not thread safe. use ioLock to be thread safe
*
* @return
*/
State getState() {
return state;
}
Object getIoLock() {
return ioLock;
}
@Override
public String toString() {
return "FileQueue:" + file.getName();
}
public File getFile() {
return file;
}
public long length() {
return file.length();
}
public long creationTimestamp() {
return creationTimestamp;
}
public long lastAppendTimestamp() {
return appendedTimestamp;
}
public void ensureFileExists() throws IOException {
ensureDirectory(file); // todo is there a better way
if (!file.exists()) {
logger.debug("Creating new Queue File for " + file);
file.createNewFile();
}
}
public Exception ensureDirectory(File _file) {
if (_file == null) {
return null;
}
try {
if (_file.exists()) {
return null;
}
File parent = _file.getParentFile();
if (parent != null) {
parent.mkdirs();
}
return null;
} catch (Exception x) {
return x;
}
}
/**
*
* Will throw exception under the following conditions: if someone is coping this queue if some on is already reading from this queue
*
* @param append
*/
public void append(long timestamp, byte[] append) {
synchronized (ioLock) {
RandomAccessFile writeTo = write(); // this outside of the try so it doesn't effect the state if we are making a copy
long fp = -1;
try {
if (append == null) {
throw new RuntimeException("null entries aren't supported by this queue!");
}
int l = append.length;
if (l == 0) {
throw new RuntimeException("zero lengthed entries aren't supported by this queue!");
}
if (l > ABSOLUTE_MAX_QUEUE_ENTRY_SIZE_IN_BYTES) {
throw new RuntimeException("entry length exceeds ABSOLUTE_MAX_QUEUE_ENTRY_SIZE_IN_BYTES=" + ABSOLUTE_MAX_QUEUE_ENTRY_SIZE_IN_BYTES);
}
fp = writeTo.getFilePointer(); // get current fp so that if anything goes wrong we can put it back in the right place.
writeTo.write(FilerIO.longBytes(ENQEUED));
writeTo.write(FilerIO.longBytes(l));
writeTo.write(FilerIO.longBytes(timestamp));
writeTo.write(append);
writeTo.write(FilerIO.intBytes(END_OF_DATA_MARKER));
if (hardFlush) {
writeTo.getFD().sync();
}
appendedTimestamp = System.currentTimeMillis();
} catch (IOException e) {
try {
writeTo.seek(fp); // something went wrong try to put the fp back in the right place.
writeTo.setLength(fp); // try to truncate the file
} catch (IOException ioe) {
// swallow exception. oh well we tried.
state = State.ERROR_WRITING;
}
logger.error("failed to write to file=" + file + " openFilesCount=" + openFilesCount, e);
throw new RuntimeException(e);
}
}
}
/**
*
* Will throw exception under the following conditions: if someone is coping this queue if some on is already writing to this queue
*
* @param append
*/
public void read(long ifInThisMode, long afterReadSetToThisMode, QueueEntryStream<FileQueueEntry> stream) {
synchronized (ioLock) {
RandomAccessFile readFrom = read(); // this outside of the try so it doesn't effect the state if we are making a copy
if (readFrom == null) {
state = State.ERROR_READING;
logger.error("failed to read from file=" + file + " openFilesCount=" + openFilesCount);
try {
stream.stream(null); // denots end of stream
} catch (Exception ex) {
logger.error("failed marking end of stream=", ex);
}
return;
}
try {
while (true) {
FileQueueEntry fileQueueEntry = read(readFrom, ifInThisMode, afterReadSetToThisMode);
if (fileQueueEntry == null) {
break;
} else {
FileQueueEntry returned = stream.stream(fileQueueEntry);
if (returned == null) {
break;
}
}
}
stream.stream(null); // denots end of stream
close();
} catch (IOException ioex) {
state = State.ERROR_READING;
logger.error("failed to read from file=" + file + " openFilesCount=" + openFilesCount, ioex);
throw new RuntimeException("Failed to close file=" + file + " openFilesCount=" + openFilesCount, ioex);
} catch (Exception ex) {
state = State.ERROR_READING;
logger.error("failed to read from file=" + file + " openFilesCount=" + openFilesCount, ex);
throw new RuntimeException("Failed to close file=" + file + " openFilesCount=" + openFilesCount, ex);
}
}
}
public FileQueueEntry readNext(long ifInThisMode, long afterReadSetToThisMode) {
synchronized (ioLock) {
RandomAccessFile readFrom = read(); // this outside of the try so it doesn't effect the state if we are making a copy
if (readFrom == null) {
return null;
}
try {
logger.trace("read from " + getFile());
return read(readFrom, ifInThisMode, afterReadSetToThisMode);
} catch (IOException ioex) {
state = State.ERROR_READING;
logger.error("failed to read from file=" + file + " openFilesCount=" + openFilesCount, ioex);
throw new RuntimeException("Failed to close file=" + file + " openFilesCount=" + openFilesCount, ioex);
} catch (Exception ex) {
state = State.ERROR_READING;
logger.error("failed to read from file=" + file + " openFilesCount=" + openFilesCount, ex);
throw new RuntimeException("Failed to close file=" + file + " openFilesCount=" + openFilesCount, ex);
}
}
}
// assumes called is holding ioLock
private FileQueueEntry read(RandomAccessFile readFrom, long ifInThisMode, long afterReadSetToThisMode) throws IOException {
while (true) {
long fp = readFrom.getFilePointer();
int r = readFrom.read(modeLengthAndTimestamp);
if (r == -1) {
// likely end of file
return null;
}
if (r != headerSize) {
corruptionDetected();
return null;
}
long[] mlt = FilerIO.bytesLongs(modeLengthAndTimestamp);
long mode = mlt[0];
if (mlt[1] > ABSOLUTE_MAX_QUEUE_ENTRY_SIZE_IN_BYTES) {
corruptionDetected();
throw new IOException("single entry length=" + mlt[1] + " will out strip ram!");
}
if (fp + headerSize + mlt[1] > readFrom.length()) {
logger.warn("encoutered premature end of file:{}", getFile());
return null;
}
int l = (int) mlt[1];
long timestamp = mlt[2];
long beginingOfMessage = readFrom.getFilePointer();
if (mode == ifInThisMode && ifInThisMode != afterReadSetToThisMode) {
readFrom.seek(fp);
readFrom.writeLong(afterReadSetToThisMode); // should we fsync?
// todo: may want to make this configurable
// readFrom.getFD().sync();// this is hacky but hey we want this to happen a soon as possible
// readFrom.seek(fp); // put fp back to where it was
readFrom.seek(beginingOfMessage);
}
byte[] appended = new byte[l];
r = readFrom.read(appended);
if (r != l) {
corruptionDetected();
throw new IOException("expected " + l + " bytes");
}
r = readFrom.read(endOfDataMarker);
if (FilerIO.bytesInt(endOfDataMarker, 0) != END_OF_DATA_MARKER) {
corruptionDetected();
throw new IOException("expected END_OF_DATA_MARKER " + END_OF_DATA_MARKER + " == " + FilerIO.bytesInt(endOfDataMarker, 0));
}
if (mode != ifInThisMode) {
continue; //not in desired mode so continue to next
}
return new FileQueueEntry(fp, timestamp, appended) {
@Override
public void processed() {
setMode(this, REMOVED);
}
@Override
public void failed(long modeIfFailed) {
setMode(this, FAILED);
}
};
}
}
public void corruptionDetected() {
try {
File f = file;
if (f.length() == 0) {
try {
file.delete();
} catch (Exception ex) {
logger.error("Failed to delete corrupt file!", ex);
}
return;
}
long free = f.getFreeSpace();
if (free < f.length() * 2) {
logger.error("we are out of disk space.");
throw new IOException("out of disk space");
}
if (!f.canWrite()) {
logger.error("we don't have permissions to write to " + f);
throw new IOException("invalid permissions");
}
File corrupt = new File(file.getAbsolutePath() + ".corrupt");
while (corrupt.exists()) {
corrupt = new File(corrupt.getAbsolutePath() + ".corrupt");
}
copyTo(file, corrupt);
try {
file.delete();
} catch (Exception ex) {
logger.error("Failed to delete corrupt file!", ex);
}
} catch (Exception x) {
logger.error("Failed to make a copy of the corrupt file!", x);
try {
file.delete();
} catch (Exception ex) {
logger.error("Failed to delete corrupt file!", x);
}
}
}
public boolean copyTo(File _from, File _to) throws Exception {
boolean fromIsDir = _from.isDirectory();
boolean toIsDir = _to.isDirectory();
if (fromIsDir != toIsDir) {
throw new Exception(_from + " isn't the same type as " + _to);
}
if (_from.isDirectory()) {
File[] array = _from.listFiles();
if (array != null) {
for (int i = 0; i < array.length; i++) {
File copyTo = new File(_to, array[i].getName());
if (array[i].isDirectory()) {
copyTo.mkdir();
}
copyTo(array[i], copyTo); //!!recursion
}
}
} else {
if (_to.exists()) {
return true; // replace or skip
}
File parent = _to.getParentFile();
if (parent != null) {
parent.mkdirs();
//_to.createNewFile();
}
OutputStream to;
try (InputStream from = new FileInputStream(_from)) {
to = new FileOutputStream(_to);
BufferedInputStream f = new BufferedInputStream(from, 16384);
BufferedOutputStream t = new BufferedOutputStream(to, 16384);
int i = -1;
while ((i = f.read()) != -1) {
t.write(i);
}
t.flush();
}
to.close();
}
return true;
}
public void setMode(FileQueueEntry entry, long mode) {
setMode(entry.getFp(), mode);
}
private void setMode(long entryFp, long mode) {
synchronized (ioLock) {
if (state == State.DELETED) {
return; // all are already gone!
}
RandomAccessFile readFrom = read(); // this outside of the try so it doesn't effect the state if we are making a copy
if (readFrom == null) {
return;
}
try {
long fp = readFrom.getFilePointer();
readFrom.seek(entryFp);
int r = readFrom.read(modeLengthAndTimestamp);
if (r != headerSize || r == -1) {
close();
return; // EOF
}
long[] mlt = FilerIO.bytesLongs(modeLengthAndTimestamp);
mlt[0] = mode;
readFrom.seek(entryFp);
readFrom.write(FilerIO.longsBytes(mlt));
// todo: may want to make this configurable
if (hardFlush) {
readFrom.getFD().sync(); // this is hacky but hey we want this to happen a soon as possible
} else {
// let RandomAccessFile flush when it feels like
}
readFrom.seek(fp); // put fp back to where it was
} catch (IOException ioex) {
logger.error("failed to remove entryFp=" + entryFp + " from file=" + file + " openFilesCount=" + openFilesCount, ioex);
throw new RuntimeException("Failed to close file=" + file + " openFilesCount=" + openFilesCount, ioex);
}
}
}
/**
* Should only be called during startup
*
* @return
*/
public long bruteForceCount(long makeItThisMode, boolean removeIfCountIsZero) {
if (file.length() == 0) {
return 0;
}
long bruteForceCount = 0;
synchronized (ioLock) {
if (state == State.DELETED) {
return 0; // all are already gone!
}
boolean closeAfterCounting = false;
if (state == State.OFF) {
closeAfterCounting = true;
}
RandomAccessFile readFrom = read(); // this outside of the try so it doesn't effect the state if we are making a copy
if (readFrom == null) {
return 0;
}
try {
long fp = readFrom.getFilePointer();
readFrom.seek(0);
while (true) {
long entryFp = readFrom.getFilePointer();
int r = readFrom.read(modeLengthAndTimestamp);
if (r != headerSize || r == -1) {
break;
}
long[] mlt = FilerIO.bytesLongs(modeLengthAndTimestamp);
readFrom.seek(readFrom.getFilePointer() + mlt[1] + endOfDataMarkerSize);
if (mlt[0] == REMOVED) {
continue;
}
if (mlt[0] != 0) {
setMode(entryFp, makeItThisMode);
}
bruteForceCount++;
}
readFrom.seek(fp); // put fp back to where it was
if (closeAfterCounting) {
close();
}
if (bruteForceCount == 0 && removeIfCountIsZero) {
logger.debug("Deleting " + file + " because bruteForceCount is zero");
delete();
}
} catch (IOException ioex) {
throw new RuntimeException("Failed to count file=" + file + " openFilesCount=" + openFilesCount, ioex);
}
}
return bruteForceCount;
}
@Override
public int hashCode() {
return file.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final FileQueue other = (FileQueue) obj;
if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) {
return false;
}
return true;
}
private RandomAccessFile write() {
synchronized (ioLock) {
if (state == State.OFF || state == State.READING) {
if (write != null) {
return write;
}
try {
state = State.OPENING_READING;
ensureFileExists();
write = new RandomAccessFile(file, "rw"); // open for appending
openFilesCount++;
state = State.READING;
return write;
} catch (FileNotFoundException e) {
throw new RuntimeException("couldnt create writer FileNotFoundException " + e.getMessage(), e);
} catch (IOException e) {
throw new RuntimeException("couldnt create writer " + e.getMessage() + " openFilesCount=" + openFilesCount, e);
}
} else {
throw new RuntimeException("Queue is in a bad state=" + state + " expected " + State.OFF + " or " + State.READING);
}
}
}
private RandomAccessFile read() {
synchronized (ioLock) {
if (state == State.OFF || state == State.WRITING) {
if (read != null) {
return read;
}
try {
state = State.OPENING_WRITING;
ensureFileExists();
read = new RandomAccessFile(file, "rw");
openFilesCount++;
state = State.WRITING;
return read;
} catch (FileNotFoundException e) {
logger.error("FileNotFoundException openFilesCount=" + openFilesCount, e);
return null;
} catch (IOException e) {
logger.error("IOException openFilesCount=" + openFilesCount, e);
return null;
}
} else {
logger.error("Queue is in a bad state=" + state + " expected " + State.OFF + " or " + State.WRITING);
return null;
}
}
}
public void close() {
synchronized (ioLock) {
if (state == State.FAILED_TO_DELETE || state == State.DELETED) {
logger.warn("WARNING you cannot close a file that was deleted!");
return;
}
try {
if (read != null) {
read.close();
openFilesCount--;
}
read = null;
if (write != null) {
write.getFD().sync(); // this is hacky but hey we want this to happen a soon as possible
write.close();
openFilesCount--;
}
write = null;
state = State.OFF;
} catch (IOException ex) {
state = State.OFF;
throw new RuntimeException("Failed to close file=" + file, ex);
}
}
}
public boolean delete() {
synchronized (ioLock) {
close();
if (!file.exists()) {
state = State.DELETED;
return true;
} else if (file.delete()) {
logger.debug("Deleted " + file);
state = State.DELETED;
return true;
} else {
state = State.FAILED_TO_DELETE;
throw new RuntimeException("Tried to delete but failed for file=" + file);
}
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.das.integration.tests.restservice.analytics;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.annotation.NotThreadSafe;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.carbon.analytics.api.AnalyticsDataAPI;
import org.wso2.carbon.analytics.api.CarbonAnalyticsAPI;
import org.wso2.carbon.analytics.datasource.commons.AnalyticsSchema;
import org.wso2.carbon.analytics.datasource.commons.ColumnDefinition;
import org.wso2.carbon.analytics.datasource.commons.Record;
import org.wso2.carbon.automation.test.utils.http.client.HttpRequestUtil;
import org.wso2.carbon.automation.test.utils.http.client.HttpResponse;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.das.analytics.rest.beans.DrillDownPathBean;
import org.wso2.das.analytics.rest.beans.DrillDownRequestBean;
import org.wso2.das.analytics.rest.beans.QueryBean;
import org.wso2.das.analytics.rest.beans.RecordBean;
import org.wso2.das.integration.common.utils.DASIntegrationTest;
import org.wso2.das.integration.common.utils.TestConstants;
import org.wso2.das.integration.common.utils.Utils;
import java.io.File;
import java.lang.reflect.Type;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class AnalyticsRestTestCase extends DASIntegrationTest {
private static final Log log = LogFactory.getLog(AnalyticsRestTestCase.class);
private static final String TABLE_NAME = "testtable";
private static final String TABLE_NAME2 = "doesntExists";
private static final String INDICES = "indexData";
private static final long ONE_HOUR_MILLISECOND = 3600000;
private static final Gson gson = new Gson();
// private AnalyticsSchemaBean schemaBean;
private Map<String, String> headers;
private Map<String, Object> updateValueSet1, valueSet1;
private Map<String, Object> updateValueSet2, valueSet2;
private RecordBean record1;
private RecordBean record2;
/* private RecordBean record3;
private RecordBean record4;*/
AnalyticsDataAPI analyticsDataAPI;
@BeforeClass(alwaysRun = true)
protected void init() throws Exception {
super.init();
headers = new HashMap<>();
headers.put("Content-Type", TestConstants.CONTENT_TYPE_JSON);
headers.put("Accept", TestConstants.CONTENT_TYPE_JSON);
headers.put("Authorization", TestConstants.BASE64_ADMIN_ADMIN);
/*Map<String, ColumnDefinitionBean> indices = new HashMap<>();
indices.put("key1@", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("key2@", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("key3", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("key4@", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("key5@", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("IndexedKey", new ColumnDefinitionBean(ColumnTypeBean.STRING, true, false));
indices.put("facet", new ColumnDefinitionBean(ColumnTypeBean.FACET, true, false));
schemaBean = new AnalyticsSchemaBean(indices, null);*/
// Map<String, Object> valueSet1 = new LinkedHashMap<>();
valueSet1 = new LinkedHashMap<>();
valueSet1.put("key1@", "@value1");
valueSet1.put("key2@", "@value2");
valueSet1.put("key3", "value3");
valueSet1.put("key4@", "@value4");
valueSet1.put("key5@", "@value5");
// Map<String, Object> valueSet2 = new LinkedHashMap<>();
valueSet2 = new LinkedHashMap<>();
valueSet2.put("key7@", "@value1");
valueSet2.put("key6@", "@value2");
valueSet2.put("key9@", "@value3");
valueSet2.put("key0@", "@value4");
valueSet2.put("key4@", "@value5");
updateValueSet1 = new LinkedHashMap<>();
updateValueSet1.put("updatedkey7@", "updated@value1");
updateValueSet1.put("updatedkey6@", "updated@value2");
updateValueSet1.put("IndexedKey", "IndexedValue");
updateValueSet1.put("updatedkey0@", "updated@value4");
updateValueSet1.put("updatedkey4@", "updated@value5");
updateValueSet2 = new LinkedHashMap<>();
updateValueSet2.put("key1@", "@value1");
updateValueSet2.put("key2@", "@value2");
updateValueSet2.put("key3", "value3");
updateValueSet2.put("key4@", "@value4");
updateValueSet2.put("key5@", "@value5");
record1 = new RecordBean();
record1.setTableName(TABLE_NAME);
record1.setValues(valueSet1);
record2 = new RecordBean();
record2.setTableName(TABLE_NAME);
record2.setValues(valueSet2);
/* record3 = new RecordBean();
record3.setTableName(TABLE_NAME);
record3.setValues(valueSet1);
record4 = new RecordBean();
record4.setTableName(TABLE_NAME);
record4.setValues(valueSet2);*/
String apiConf =
new File(this.getClass().getClassLoader().
getResource("dasconfig" + File.separator + "api" + File.separator + "analytics-data-config.xml").toURI())
.getAbsolutePath();
analyticsDataAPI = new CarbonAnalyticsAPI(apiConf);
}
/*@Test(groups = "wso2.das", description = "Create table")
public void createTable() throws Exception {
log.info("Executing create table test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL);
TableBean tableBean = new TableBean();
tableBean.setTableName(TABLE_NAME);
HttpResponse response = HttpRequestUtil.doPost(restUrl,gson.toJson(tableBean), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 201, "Status code is different");
Assert.assertTrue(response.getData().
contains("Successfully created table: " + TABLE_NAME));
}*/
@Test(groups = "wso2.das", description = "Create table")
public void createTable() throws Exception {
log.info("Executing create table test case ...");
analyticsDataAPI.createTable(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME);
}
@Test(groups = "wso2.das", description = "Checks if table exists", dependsOnMethods = "createTable")
public void tableExists() throws Exception {
log.info("Executing Table Exist test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_ENDPOINT_URL +
TestConstants.TABLE_EXISTS + TABLE_NAME, headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
@Test(groups = "wso2.das", description = "Checks if table doesnt exist", dependsOnMethods = "tableExists")
public void tableNotExist() throws Exception {
log.info("Executing TableNotExist test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_ENDPOINT_URL +
TestConstants.TABLE_EXISTS + TABLE_NAME2, headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 404, "Status code is different");
}
@Test(groups = "wso2.das", description = "lists all the tables", dependsOnMethods = "tableNotExist")
public void getAllTables() throws Exception {
log.info("Executing getAllTables test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL, headers);
log.info("Response: " + response.getData());
Type listType = new TypeToken<List<String>>(){}.getType();
List< String> tableNames = gson.fromJson(response.getData(), listType);
Assert.assertTrue(tableNames.contains("testtable".toUpperCase()), "Table : testtable not found");
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
/*@Test(groups = "wso2.das", description = "Create table schema", dependsOnMethods = "createTable")
public void setTableSchema() throws Exception {
log.info("Executing createTableSchema test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME + TestConstants.SCHEMA);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(schemaBean), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}*/
@Test(groups = "wso2.das", description = "Create table schema", dependsOnMethods = "getAllTables")
public void setTableSchema() throws Exception {
log.info("Executing createTableSchema test case ...");
List<ColumnDefinition> columns = new ArrayList<>();
columns.add(new ColumnDefinition("key1@", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("key2@", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("key3", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("key4@", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("key5@", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("IndexedKey", AnalyticsSchema.ColumnType.STRING, true, false));
columns.add(new ColumnDefinition("facet", AnalyticsSchema.ColumnType.FACET, true, false));
AnalyticsSchema analyticsSchema = new AnalyticsSchema(columns, null);
analyticsDataAPI.setTableSchema(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, analyticsSchema);
}
@Test(groups = "wso2.das", description = "Get table schema", dependsOnMethods = "setTableSchema")
public void getTableSchema() throws Exception {
log.info("Executing getTableSchema test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL +
TABLE_NAME + TestConstants.SCHEMA, headers);
log.info("Response: " + response.getData());
Assert.assertFalse(response.getData().contains("{}"), "Schema is not set");
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
/*@Test(groups = "wso2.das", description = "Create records without optional parameters", dependsOnMethods = "getAllTables")
public void createRecordsWithoutOptionalParams() throws Exception {
log.info("Executing create records without Optional Parameters test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_RECORDS_ENDPOINT_URL);
List<RecordBean> recordList = new ArrayList<>();
recordList.add(record1);
recordList.add(record2);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertFalse(response.getData().contains("[]"));
}*/
@Test(groups = "wso2.das", description = "Create records without optional parameters", dependsOnMethods = "getTableSchema")
public void createRecordsWithoutOptionalParams() throws Exception {
log.info("Executing create records without Optional Parameters test case ...");
List<Record> records = new ArrayList<>();
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, valueSet1));
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, valueSet2));
analyticsDataAPI.put(records);
}
/*@Test(groups = "wso2.das", description = "Create records with optional params", dependsOnMethods = "createRecordsWithoutOptionalParams")
public void createRecordsWithOptionalParams() throws Exception {
log.info("Executing create records test case ...");
long currentTime = System.currentTimeMillis();
URL restUrl = new URL(TestConstants.ANALYTICS_RECORDS_ENDPOINT_URL);
List<RecordBean> recordList = new ArrayList<>();
record3.setId("id1");
record3.setTableName(TABLE_NAME);
record3.setTimestamp(currentTime);
record4.setId("id2");
record4.setTableName(TABLE_NAME);
record4.setTimestamp(currentTime);
recordList.add(record3);
recordList.add(record4);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("id1"));
Assert.assertTrue(response.getData().contains("id2"));
}*/
@Test(groups = "wso2.das", description = "Create records with optional params", dependsOnMethods = "createRecordsWithoutOptionalParams")
public void createRecordsWithOptionalParams() throws Exception {
log.info("Executing create records test case ...");
List<Record> records = new ArrayList<>();
records.add(new Record("id1", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, valueSet1));
records.add(new Record("id2", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, valueSet2));
analyticsDataAPI.put(records);
}
@Test(groups = "wso2.das", description = "Get the record count of a table", dependsOnMethods = "createRecordsWithOptionalParams")
public void getRecordCount() throws Exception {
log.info("Executing getRecordCount test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME +
"/recordcount", headers);
log.info("Response: " + response.getData());
if (!response.getData().equals("-1")) {
Assert.assertEquals(response.getData(), "4", "record count is different");
}
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
@Test(groups = "wso2.das", description = "Get records without pagination", dependsOnMethods = "createRecordsWithoutOptionalParams")
public void getRecordsWithoutPagination() throws Exception {
log.info("Executing get records without pagination test case ...");
long currentTime = System.currentTimeMillis();
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME + "/" +
(currentTime - ONE_HOUR_MILLISECOND) + "/" +
(currentTime + ONE_HOUR_MILLISECOND), headers);
Type listType = new TypeToken<List<RecordBean>>(){}.getType();
List< RecordBean> recordList = gson.fromJson(response.getData(), listType);
Assert.assertTrue(recordList.size() == 4,
"Size mismatch!");
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
@Test(groups = "wso2.das", description = "Get records with pagination", dependsOnMethods =
"createRecordsWithOptionalParams")
public void getRecordsWithPagination() throws Exception {
log.info("Executing get records with pagination test case ...");
long currentTime = System.currentTimeMillis();
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME +
"/" +
(currentTime - ONE_HOUR_MILLISECOND) + "/" +
(currentTime + ONE_HOUR_MILLISECOND) + "/" +
"0" + "/" + "2", headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("\"values\":{\"key1@\":\"@value1\",\"key2@\":\"@value2\"," +
"\"key3\":\"value3\",\"key4@\":\"@value4\",\"key5@\":\"@value5\"}"));
Assert.assertTrue(response.getData().contains("\"values\":{\"key7@\":\"@value1\",\"key6@\":\"@value2\"," +
"\"key9@\":\"@value3\",\"key0@\":\"@value4\",\"key4@\":\"@value5\"}"));
}
@Test(groups = "wso2.das", description = "Get all records", dependsOnMethods = "getRecordCount")
public void getAllRecords() throws Exception {
log.info("Executing get All records test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL +
TABLE_NAME, headers);
Type listType = new TypeToken<List<RecordBean>>(){}.getType();
List< RecordBean> recordList = gson.fromJson(response.getData(), listType);
log.info("Response :" + response.getData());
Assert.assertTrue(recordList.size() == 4,
"Size mismatch!");
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
}
/*@Test(groups = "wso2.das", description = "update existing records", dependsOnMethods = "getRecordCount")
public void updateRecords() throws Exception {
log.info("Executing updateRecords test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_RECORDS_ENDPOINT_URL);
List<RecordBean> recordList = new ArrayList<>();
record3.setId("id1");
record3.setTableName(TABLE_NAME);
record3.setValues(updateValueSet1);
record4.setId("id2");
record4.setTableName(TABLE_NAME);
record4.setValues(updateValueSet2);
recordList.add(record3);
recordList.add(record4);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("id1"));
Assert.assertTrue(response.getData().contains("id2"));
}*/
@Test(groups = "wso2.das", description = "update existing records", dependsOnMethods = "search")
public void updateRecords() throws Exception {
log.info("Executing updateRecords test case ...");
List<Record> records = new ArrayList<>();
records.add(new Record("id1", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet1));
records.add(new Record("id2", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet2));
analyticsDataAPI.put(records);
}
/*@Test(groups = "wso2.das", description = "update existing records in a specific table", dependsOnMethods = "insertRecordsToTable")
public void updateRecordsInTable() throws Exception {
log.info("Executing updateRecordsInTable test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME);
List<RecordBean> recordList = new ArrayList<>();
updateValueSet1 = new LinkedHashMap<>();
updateValueSet1.put("newupdatedkey7@", "newupdated@value1");
updateValueSet1.put("newupdatedkey6@", "newupdated@value2");
updateValueSet1.put("newupdatedkey9@", "newupdated@value3");
updateValueSet1.put("newupdatedkey0@", "newupdated@value4");
updateValueSet1.put("newupdatedkey4@", "newupdated@value5");
updateValueSet2 = new LinkedHashMap<>();
updateValueSet2.put("newkey1@", "new@value1");
updateValueSet2.put("newkey2@", "new@value2");
updateValueSet2.put("newkey3@", "new@value3");
updateValueSet2.put("newkey4@", "new@value4");
updateValueSet2.put("newkey5@", "new@value5");
record3 = new RecordBean();
record3.setId("id1");
record3.setValues(updateValueSet1);
record4 = new RecordBean();
record4.setId("id2");
record4.setValues(updateValueSet2);
recordList.add(record3);
recordList.add(record4);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("id1"));
Assert.assertTrue(response.getData().contains("id2"));
}*/
@Test(groups = "wso2.das", description = "update existing records in a specific table", dependsOnMethods = "insertRecordsToTable")
public void updateRecordsInTable() throws Exception {
log.info("Executing updateRecordsInTable test case ...");
updateValueSet1 = new LinkedHashMap<>();
updateValueSet1.put("newupdatedkey7@", "newupdated@value1");
updateValueSet1.put("newupdatedkey6@", "newupdated@value2");
updateValueSet1.put("newupdatedkey9@", "newupdated@value3");
updateValueSet1.put("newupdatedkey0@", "newupdated@value4");
updateValueSet1.put("newupdatedkey4@", "newupdated@value5");
updateValueSet2 = new LinkedHashMap<>();
updateValueSet2.put("newkey1@", "new@value1");
updateValueSet2.put("newkey2@", "new@value2");
updateValueSet2.put("newkey3@", "new@value3");
updateValueSet2.put("newkey4@", "new@value4");
updateValueSet2.put("newkey5@", "new@value5");
List<Record> records = new ArrayList<>();
records.add(new Record("id1", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet1));
records.add(new Record("id2", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet2));
analyticsDataAPI.put(records);
analyticsDataAPI.waitForIndexing(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, 10000L);
}
@Test(groups = "wso2.das", description = "Insert records in a specific table", dependsOnMethods = "updateRecords")
public void insertRecordsToTable() throws Exception {
log.info("Executing insertRecordsInTable test case ...");
updateValueSet1 = new LinkedHashMap<>();
updateValueSet1.put("newKey1", "new Value1");
updateValueSet1.put("newKey2", "new Value2");
updateValueSet2 = new LinkedHashMap<>();
updateValueSet2.put("newKey3", "new value3");
updateValueSet2.put("newKey4", "new value4");
List<Record> records = new ArrayList<>();
records.add(new Record("id3", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet1));
records.add(new Record("id4", MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, updateValueSet2));
analyticsDataAPI.put(records);
}
@Test(groups = "wso2.das", description = "search records in a specific table", dependsOnMethods = "getAllRecords")
public void search() throws Exception {
log.info("Executing search test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_WAITFOR_INDEXING_ENDPOINT_URL,
headers); //wait till indexing finishes
Assert.assertEquals(response.getResponseCode(), 200, "Waiting till indexing finished - failed");
URL restUrl = new URL(TestConstants.ANALYTICS_SEARCH_ENDPOINT_URL);
QueryBean query = new QueryBean();
query.setTableName(TABLE_NAME);
query.setQuery("key3:value3");
query.setStart(0);
query.setCount(10);
response = HttpRequestUtil.doPost(restUrl, gson.toJson(query), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("\"key3\":\"value3\""), "Search result not found");
}
@Test(groups = "wso2.das", description = "get the search record count in a specific table", dependsOnMethods = "search")
public void searchCount() throws Exception {
log.info("Executing searchCount test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_SEARCH_COUNT_ENDPOINT_URL);
QueryBean query = new QueryBean();
query.setTableName(TABLE_NAME);
query.setQuery("key3:value3");
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(query), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertTrue(response.getData().contains("2"), "Search Count mismatch!");
}
/* @Test(groups = "wso2.das", description = "delete records by ids in a specific table", dependsOnMethods = "searchCount")
public void deleteRecordsByIds() throws Exception {
log.info("Executing deleteRecordsByIds test case ...");
List<String> recordList = new ArrayList<>();
recordList.add("id3");
recordList.add("id4");
HttpClient httpClient = new DefaultHttpClient();
HttpDeleteWithBody httpDelete = new HttpDeleteWithBody(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL +
TABLE_NAME);
httpDelete.setHeader("Content-Type", TestConstants.CONTENT_TYPE_JSON);
httpDelete.setHeader("Authorization", TestConstants.BASE64_ADMIN_ADMIN);
HttpEntity entity = new StringEntity(gson.toJson(recordList));
httpDelete.setEntity(entity);
org.apache.http.HttpResponse response = httpClient.execute(httpDelete);
String responseBody = EntityUtils.toString(response.getEntity());
log.info("Response: " + responseBody);
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200, "Status code is different");
Assert.assertTrue(responseBody.contains("Successfully deleted records"), "Record deletion by IDs failed");
EntityUtils.consume(response.getEntity()); //ensures the http connection is closed
}*/
@Test(groups = "wso2.das", description = "delete records by ids in a specific table", dependsOnMethods = "updateRecordsInTable")
public void deleteRecordsByIds() throws Exception {
log.info("Executing deleteRecordsByIds test case ...");
List<String> recordList = new ArrayList<>();
recordList.add("id3");
recordList.add("id4");
analyticsDataAPI.delete(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, recordList);
}
/*@Test(groups = "wso2.das", description = "delete records given a time range in a specific table"
, dependsOnMethods = "deleteRecordsByIds")
public void deleteRecordsByTimeRange() throws Exception {
log.info("Executing deleteRecordsByTimeRange test case ...");
StringBuilder url = new StringBuilder(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL);
long currentTime = System.currentTimeMillis();
url.append(TABLE_NAME);
url.append("/");
url.append(currentTime - ONE_HOUR_MILLISECOND);
url.append("/");
url.append(currentTime + ONE_HOUR_MILLISECOND);
HttpClient httpClient = new DefaultHttpClient();
HttpDeleteWithBody httpDelete = new HttpDeleteWithBody(url.toString());
httpDelete.setHeader("Authorization", TestConstants.BASE64_ADMIN_ADMIN);
org.apache.http.HttpResponse response = httpClient.execute(httpDelete);
String responseBody = EntityUtils.toString(response.getEntity());
log.info("Response: " + responseBody);
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200, "Status code is different");
Assert.assertTrue(responseBody.contains("Successfully deleted records"), "Record deletion by timeRange failed");
EntityUtils.consume(response.getEntity()); //ensures the http connection is closed
}*/
@Test(groups = "wso2.das", description = "delete records given a time range in a specific table"
, dependsOnMethods = "deleteRecordsByIds")
public void deleteRecordsByTimeRange() throws Exception {
log.info("Executing deleteRecordsByTimeRange test case ...");
long currentTime = System.currentTimeMillis();
analyticsDataAPI.delete(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME,
currentTime - ONE_HOUR_MILLISECOND, currentTime + ONE_HOUR_MILLISECOND);
}
/*@Test(groups = "wso2.das", description = "Add records which have facet fields", dependsOnMethods = "deleteRecordsByTimeRange")
public void addFacetRecords() throws Exception {
log.info("Executing addFacetRecords test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_RECORDS_ENDPOINT_URL);
List<RecordBean> recordList = new ArrayList<>();
Map<String, Object> values1 = record1.getValues();
values1.put("facet", new String[]{"SriLanka", "Colombo", "Maradana"});
Map<String, Object> values2 = record2.getValues();
values2.put("facet", new String[]{"2015", "April", "28"});
recordList.add(record1);
recordList.add(record2);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertFalse(response.getData().contains("[]"));
}*/
@Test(groups = "wso2.das", description = "Add records which have facet fields", dependsOnMethods = "deleteRecordsByTimeRange")
public void addFacetRecords() throws Exception {
log.info("Executing addFacetRecords test case ...");
Map<String, Object> values1 = record1.getValues();
/* this must be an ArrayList, since it needs to have a no-arg constructor to work with Kryo serialization */
values1.put("facet", "SriLanka,Colombo");
Map<String, Object> values2 = record2.getValues();
values2.put("facet", "2015,April,28,12,34,24");
List<Record> records = new ArrayList<>();
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, values1));
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, values2));
analyticsDataAPI.put(records);
analyticsDataAPI.waitForIndexing(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME.toUpperCase(), 10000L);
}
/*@Test(groups = "wso2.das", description = "Add records which have facet fields to a table",
dependsOnMethods = "addFacetRecords")
public void addFacetRecordsToTable() throws Exception {
log.info("Executing addFacetRecordsToTable test case ...");
URL restUrl = new URL(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL + TABLE_NAME);
List<RecordBean> recordList = new ArrayList<>();
Map<String, Object> values1 = record1.getValues();
values1.put("facet", new String[]{"SriLanka", "Colombo"});
Map<String, Object> values2 = record2.getValues();
values2.put("facet", new String[]{"2015", "April", "28", "12", "34", "24"});
record1.setTableName(null);
record2.setTableName(null);
recordList.add(record1);
recordList.add(record2);
HttpResponse response = HttpRequestUtil.doPost(restUrl, gson.toJson(recordList), headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertFalse(response.getData().contains("[]"));
}*/
@Test(groups = "wso2.das", description = "Add records which have facet fields to a table",
dependsOnMethods = "addFacetRecords")
public void addFacetRecordsToTable() throws Exception {
log.info("Executing addFacetRecordsToTable test case ...");
Map<String, Object> values1 = record1.getValues();
values1.put("facet", new ArrayList<String>(Arrays.asList("SriLanka", "Colombo")));
Map<String, Object> values2 = record2.getValues();
values2.put("facet", new ArrayList<String>(Arrays.asList("2015", "April", "28", "12", "34", "24")));
List<Record> records = new ArrayList<>();
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, values1));
records.add(new Record(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME, values2));
analyticsDataAPI.waitForIndexing(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME.toUpperCase(), 10000L);
analyticsDataAPI.put(records);
}
@Test(groups = "wso2.das", description = "drilldown through the faceted fields",
dependsOnMethods = "addFacetRecordsToTable")
public void drillDownSearchWithoutSearchQuery() throws Exception {
log.info("Executing drillDownSearch test case ...");
HttpResponse response = Utils.doGet(TestConstants.ANALYTICS_WAITFOR_INDEXING_ENDPOINT_URL,
headers); //wait till indexing finishes
Assert.assertEquals(response.getResponseCode(), 200, "Waiting till indexing finished - failed");
URL restUrl = new URL(TestConstants.ANALYTICS_DRILLDOWN_ENDPOINT_URL);
DrillDownRequestBean request = new DrillDownRequestBean();
List<DrillDownPathBean> paths = new ArrayList<>();
DrillDownPathBean path = new DrillDownPathBean();
path.setPath(new String[]{"SriLanka", "Colombo"});
path.setFieldName("facet");
paths.add(path);
request.setTableName(TABLE_NAME);
request.setRecordStart(0);
request.setRecordCount(1);
request.setCategories(paths);
String postBody = gson.toJson(request);
response = HttpRequestUtil.doPost(restUrl, postBody, headers);
log.info("Response: " + response.getData());
Assert.assertEquals(response.getResponseCode(), 200, "Status code is different");
Assert.assertFalse(response.getData().contains("[]"));
}
@Test(groups = "wso2.das", description = "clear indexData in a specific table"
, dependsOnMethods = "drillDownSearchWithoutSearchQuery")
public void clearIndices() throws Exception {
log.info("Executing clearIndices test case ...");
HttpClient httpClient = new DefaultHttpClient();
HttpDeleteWithBody httpDelete = new HttpDeleteWithBody(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL +
TABLE_NAME + "/" + INDICES);
httpDelete.setHeader("Authorization", TestConstants.BASE64_ADMIN_ADMIN);
org.apache.http.HttpResponse response = httpClient.execute(httpDelete);
String responseBody = EntityUtils.toString(response.getEntity());
log.info("Response: " + responseBody);
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200, "Status code is different");
Assert.assertTrue(responseBody.contains("Successfully cleared indices"), "Record deletion by IDs failed");
EntityUtils.consume(response.getEntity()); //ensures the http connection is closed
}
/*@Test(groups = "wso2.das", description = "deletes a specific table"
, dependsOnMethods = "clearIndices")
public void deleteTable() throws Exception {
log.info("Executing deleteTable test case ...");
TableBean table = new TableBean();
table.setTableName(TABLE_NAME);
HttpClient httpClient = new DefaultHttpClient();
HttpDeleteWithBody httpDelete = new HttpDeleteWithBody(TestConstants.ANALYTICS_TABLES_ENDPOINT_URL);
httpDelete.setHeader("Content-Type", TestConstants.CONTENT_TYPE_JSON);
httpDelete.setHeader("Authorization", TestConstants.BASE64_ADMIN_ADMIN);
httpDelete.setEntity(new StringEntity(gson.toJson(table)));
org.apache.http.HttpResponse response = httpClient.execute(httpDelete);
String responseBody = EntityUtils.toString(response.getEntity());
log.info("Response: " + responseBody);
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200, "Status code is different");
Assert.assertTrue(responseBody.contains("Successfully deleted table"), "Table deletion failed");
EntityUtils.consume(response.getEntity()); //ensures the http connection is closed
}*/
@Test(groups = "wso2.das", description = "deletes a specific table"
, dependsOnMethods = "clearIndices")
public void deleteTable() throws Exception {
log.info("Executing deleteTable test case ...");
analyticsDataAPI.deleteTable(MultitenantConstants.SUPER_TENANT_ID, TABLE_NAME);
}
}
@NotThreadSafe
class HttpDeleteWithBody extends HttpEntityEnclosingRequestBase {
public static final String METHOD_NAME = "DELETE";
public String getMethod() { return METHOD_NAME; }
public HttpDeleteWithBody(final String uri) {
super();
setURI(URI.create(uri));
}
}
| |
package com.audacious_software.pennyworth.activities;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.audacious_software.passive_data_kit.activities.DiagnosticsActivity;
import com.audacious_software.passive_data_kit.generators.device.ForegroundApplication;
import com.audacious_software.pennyworth.PennyworthApplication;
import com.audacious_software.pennyworth.R;
import com.audacious_software.pennyworth.ScheduleManager;
import net.hockeyapp.android.UpdateManager;
import org.apache.commons.lang3.time.DurationFormatUtils;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.cardview.widget.CardView;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
public class AppsUsedActivity extends AppCompatActivity {
private RecyclerView mAppsList = null;
private LinearLayoutManager mLayoutManager;
private ArrayList<ForegroundApplication.ForegroundApplicationUsage> mAppsUsed = new ArrayList<>();
private AppsAdapter mAdapter = null;
private Menu mMenu = null;
private Handler mHandler;
private Runnable mRefreshRunnable = null;
enum AppPeriod {
DAY,
WEEK,
ALL
}
private AppPeriod mCurrentPeriod = AppPeriod.DAY;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setContentView(R.layout.activity_apps_used);
this.getSupportActionBar().setTitle(R.string.app_name);
this.mAppsList = this.findViewById(R.id.apps_list);
this.mAppsList.setHasFixedSize(true);
this.mLayoutManager = new LinearLayoutManager(this);
this.mAppsList.setLayoutManager(mLayoutManager);
this.mAdapter = new AppsAdapter(this.mAppsUsed);
this.mAppsList.setAdapter(this.mAdapter);
final AppsUsedActivity me = this;
this.mRefreshRunnable = new Runnable() {
@Override
public void run() {
if (me.mMenu != null) {
DiagnosticsActivity.setUpDiagnosticsItem(me, me.mMenu, true, true);
}
me.refreshApps(me.mCurrentPeriod);
me.mHandler.postDelayed(this, 10000);
}
};
this.mHandler = new Handler(Looper.getMainLooper());
}
public void onDestroy() {
super.onDestroy();
}
@Override
protected void onResume() {
super.onResume();
if (this.mMenu != null) {
DiagnosticsActivity.setUpDiagnosticsItem(this, this.mMenu, true, true);
}
PennyworthApplication app = (PennyworthApplication) this.getApplication();
final AppsUsedActivity me = this;
String userId = app.getIdentifier();
if (userId != null) {
ScheduleManager.getInstance(this).setUserId(userId);
} else {
app.promptForIdentifier(this, new Runnable() {
@Override
public void run() {
Handler handler = new Handler(Looper.getMainLooper());
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (me.mMenu != null) {
DiagnosticsActivity.setUpDiagnosticsItem(me, me.mMenu, true, true);
}
me.refreshApps(me.mCurrentPeriod);
}
}, 1000);
}
});
}
this.mHandler.postDelayed(this.mRefreshRunnable, 1000);
}
@Override
protected void onPause() {
super.onPause();
this.mHandler.removeCallbacks(this.mRefreshRunnable);
}
private void refreshApps(AppPeriod period) {
this.mCurrentPeriod = period;
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
long when = cal.getTimeInMillis();
if (this.mCurrentPeriod == AppPeriod.WEEK) {
cal.add(Calendar.DATE, -6);
when = cal.getTimeInMillis();
this.getSupportActionBar().setSubtitle(R.string.subtitle_app_usage_week);
} else if (this.mCurrentPeriod == AppPeriod.ALL){
when = 0;
this.getSupportActionBar().setSubtitle(R.string.subtitle_app_usage_all);
} else {
this.getSupportActionBar().setSubtitle(R.string.subtitle_app_usage_day);
}
HashMap<String, ForegroundApplication.ForegroundApplicationUsage> totals = new HashMap<>();
this.mAppsUsed.clear();
List<ForegroundApplication.ForegroundApplicationUsage> usages = ForegroundApplication.getInstance(this).fetchUsagesBetween(when, System.currentTimeMillis(), true);
for (ForegroundApplication.ForegroundApplicationUsage usage : usages) {
ForegroundApplication.ForegroundApplicationUsage total = null;
if (totals.containsKey(usage.packageName)) {
total = totals.get(usage.packageName);
} else {
total = new ForegroundApplication.ForegroundApplicationUsage();
total.packageName = usage.packageName;
total.duration = 0;
total.start = usage.start;
totals.put(usage.packageName, total);
this.mAppsUsed.add(total);
}
if (total.start > usage.start) {
total.start = usage.start;
}
total.duration += usage.duration;
}
Collections.sort(this.mAppsUsed, new Comparator<ForegroundApplication.ForegroundApplicationUsage>() {
@Override
public int compare(ForegroundApplication.ForegroundApplicationUsage one, ForegroundApplication.ForegroundApplicationUsage two) {
if (one.duration < two.duration) {
return 1;
} else if (one.duration > two.duration) {
return -1;
}
return 0;
}
});
this.mAdapter.notifyDataSetChanged();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
this.getMenuInflater().inflate(R.menu.menu_apps_used, menu);
this.mMenu = menu;
DiagnosticsActivity.setUpDiagnosticsItem(this, this.mMenu, true, true);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
final AppsUsedActivity me = this;
if (DiagnosticsActivity.diagnosticItemSelected(this, item)) {
} else if (id == R.id.action_change_filter) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_select_duration);
String[] items = {
this.getString(R.string.duration_day),
this.getString(R.string.duration_week),
this.getString(R.string.duration_all),
};
builder.setItems(items, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
if (i == 0) {
me.refreshApps(AppPeriod.DAY);
} else if (i == 1) {
me.refreshApps(AppPeriod.WEEK);
} else if (i == 2) {
me.refreshApps(AppPeriod.ALL);
}
}
});
builder.create().show();
} else if (id == R.id.action_transmit_data) {
ScheduleManager.getInstance(this).transmitData();
}
return super.onOptionsItemSelected(item);
}
public static class AppsAdapter extends RecyclerView.Adapter<AppsAdapter.AppViewHolder> {
private ArrayList<ForegroundApplication.ForegroundApplicationUsage> mAppsList;
public static class AppViewHolder extends RecyclerView.ViewHolder {
public CardView mCardView;
public AppViewHolder(CardView v) {
super(v);
this.mCardView = v;
}
}
public AppsAdapter(ArrayList<ForegroundApplication.ForegroundApplicationUsage> list) {
this.mAppsList = list;
}
@Override
public AppsAdapter.AppViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
// create a new view
CardView card = (CardView) LayoutInflater.from(parent.getContext()).inflate(R.layout.card_app_used, parent, false);
return new AppViewHolder(card);
}
@Override
public void onBindViewHolder(final AppViewHolder holder, int position) {
ForegroundApplication.ForegroundApplicationUsage usage = this.mAppsList.get(position);
Context context = holder.mCardView.getContext();
TextView appName = holder.mCardView.findViewById(R.id.app_name);
ImageView appIcon = holder.mCardView.findViewById(R.id.app_icon);
try {
PackageManager packageManager = context.getPackageManager();
ApplicationInfo info = packageManager.getApplicationInfo(usage.packageName, PackageManager.GET_META_DATA);
appName.setText(context.getString(R.string.numbered_list_app, holder.getAdapterPosition() + 1, packageManager.getApplicationLabel(info)));
appIcon.setImageDrawable(packageManager.getApplicationIcon(usage.packageName));
} catch (PackageManager.NameNotFoundException e) {
appName.setText(usage.packageName);
}
TextView appDuration = holder.mCardView.findViewById(R.id.app_used_duration);
appDuration.setText(DurationFormatUtils.formatDurationWords(usage.duration, true, true));
}
@Override
public int getItemCount() {
int size = this.mAppsList.size();
if (size > 20) {
size = 20;
}
return size;
}
}
}
| |
package org.cagrid.gaards.dorian.federation;
import gov.nih.nci.cagrid.common.FaultUtil;
import gov.nih.nci.cagrid.dorian.common.CommonUtils;
import java.math.BigInteger;
import java.security.KeyPair;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.cagrid.gaards.dorian.X509Certificate;
import org.cagrid.gaards.dorian.ca.CertificateAuthority;
import org.cagrid.gaards.dorian.common.Lifetime;
import org.cagrid.gaards.dorian.stubs.types.InvalidHostCertificateFault;
import org.cagrid.gaards.dorian.stubs.types.InvalidHostCertificateRequestFault;
import org.cagrid.gaards.dorian.test.Utils;
import org.cagrid.gaards.pki.KeyUtil;
import org.cagrid.tools.database.Database;
/**
* @author <A href="mailto:langella@bmi.osu.edu">Stephen Langella </A>
* @author <A href="mailto:oster@bmi.osu.edu">Scott Oster </A>
* @author <A href="mailto:hastings@bmi.osu.edu">Shannon Hastings </A>
* @version $Id: ArgumentManagerTable.java,v 1.2 2004/10/15 16:35:16 langella
* Exp $
*/
public class TestHostCertificateManager extends TestCase implements Publisher {
public final static String OWNER = "owner";
private Database db;
private CertificateAuthority ca;
private CertificateBlacklistManager blackList;
public void publishCRL() {
// TODO Auto-generated method stub
}
public void testCreateAndDestroy() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testRenewHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateRecord renewed = hcm.renewHostCertificate(id);
assertEquals(record.getId(), renewed.getId());
assertEquals(record.getHost(), renewed.getHost());
assertEquals(record.getOwner(), renewed.getOwner());
assertEquals(record.getPublicKey(), renewed.getPublicKey());
assertEquals(record.getSubject(), renewed.getSubject());
assertEquals(record.getStatus(), renewed.getStatus());
if (record.getSerialNumber() == renewed.getSerialNumber()) {
fail("Serial number should not equal.");
}
if (record.getCertificate().equals(renewed.getCertificate())) {
fail("Certificates should not equal.");
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testRenewHostCertificateInvalidStatus() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
try {
hcm.renewHostCertificate(id);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
setHostCertificateStatus(hcm, id, HostCertificateStatus.Suspended);
try {
hcm.renewHostCertificate(id);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
setHostCertificateStatus(hcm, id, HostCertificateStatus.Compromised);
try {
hcm.renewHostCertificate(id);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
req = getHostCertificateRequest("localhost");
id = hcm.requestHostCertifcate(OWNER, req);
setHostCertificateStatus(hcm, id, HostCertificateStatus.Rejected);
try {
hcm.renewHostCertificate(id);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testGetHostCertificateSerialNumbers() {
try {
int total = 5;
String hostPrefix = "localhost";
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
List<Long> ids = new ArrayList<Long>();
String owner = OWNER;
for (int i = 0; i < total; i++) {
String host = hostPrefix + i;
HostCertificateRequest req = getHostCertificateRequest(host);
long id = hcm.requestHostCertifcate(owner, req);
assertEquals(0, hcm.getHostCertificateRecordsSerialNumbers(owner).size());
ids.add(Long.valueOf(id));
}
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
hcm.approveHostCertifcate(id);
List<Long> sn = hcm.getHostCertificateRecordsSerialNumbers(owner);
assertEquals((i + 1), sn.size());
for (int j = 0; j < (i + 1); j++) {
HostCertificateRecord r = hcm.getHostCertificateRecord(ids.get(j));
boolean found = false;
for (int x = 0; x < sn.size(); x++) {
if (r.getSerialNumber() == sn.get(x).longValue()) {
found = true;
break;
}
}
if (!found) {
fail("Serial Number not returned.");
}
}
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testGetDisabledHostCertificates() {
try {
int total = 5;
String hostPrefix = "localhost";
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
List<Long> ids = new ArrayList<Long>();
String owner = OWNER;
for (int i = 0; i < total; i++) {
String host = hostPrefix + i;
HostCertificateRequest req = getHostCertificateRequest(host);
long id = hcm.requestHostCertifcate(owner, req);
assertEquals(0, hcm.getDisabledHostCertificatesSerialNumbers().size());
ids.add(Long.valueOf(id));
}
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
hcm.approveHostCertifcate(id);
assertEquals(0, hcm.getDisabledHostCertificatesSerialNumbers().size());
List<Long> sn = hcm.getHostCertificateRecordsSerialNumbers(owner);
assertEquals((i + 1), sn.size());
}
for (int i = 0; i < 3; i++) {
long id = ids.get(i).longValue();
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Suspended);
hcm.updateHostCertificateRecord(update);
List<Long> sn = hcm.getDisabledHostCertificatesSerialNumbers();
assertEquals((i + 1), sn.size());
for (int j = 0; j < (i + 1); j++) {
HostCertificateRecord r = hcm.getHostCertificateRecord(ids.get(j));
boolean found = false;
for (int x = 0; x < sn.size(); x++) {
if (r.getSerialNumber() == sn.get(x).longValue()) {
found = true;
break;
}
}
if (!found) {
fail("Serial Number not returned.");
}
}
}
for (int i = 3; i < 5; i++) {
long id = ids.get(i).longValue();
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Compromised);
hcm.updateHostCertificateRecord(update);
List<Long> sn = hcm.getDisabledHostCertificatesSerialNumbers();
assertEquals((i + 1), sn.size());
for (int j = 0; j < (i + 1); j++) {
HostCertificateRecord r = hcm.getHostCertificateRecord(ids.get(j));
boolean found = false;
for (int x = 0; x < sn.size(); x++) {
if (r.getSerialNumber() == sn.get(x).longValue()) {
found = true;
break;
}
}
if (!found) {
fail("Serial Number not returned.");
}
}
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testHostSearch() {
try {
int total = 5;
String hostPrefix = "localhost";
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
List<Long> ids = new ArrayList<Long>();
String owner = OWNER;
for (int i = 0; i < total; i++) {
String host = hostPrefix + i;
HostCertificateRequest req = getHostCertificateRequest(host);
long id = hcm.requestHostCertifcate(owner, req);
assertEquals(0, hcm.getHostRecords(new HostSearchCriteria()).size());
HostSearchCriteria hs = new HostSearchCriteria();
hs.setHostname(host);
assertEquals(0, hcm.getHostRecords(hs).size());
ids.add(Long.valueOf(id));
}
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
HostCertificateRecord hr = hcm.approveHostCertifcate(id);
assertEquals((i + 1), hcm.getHostRecords(new HostSearchCriteria()).size());
HostSearchCriteria hs = new HostSearchCriteria();
hs.setHostname(hr.getHost());
assertEquals(1, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setIdentity(CommonUtils.subjectToIdentity(hr.getSubject()));
assertEquals(1, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setHostCertificateSubject(hr.getSubject());
assertEquals(1, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setOwner(hr.getOwner());
assertEquals((i + 1), hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setHostname(hr.getHost());
hs.setIdentity(CommonUtils.subjectToIdentity(hr.getSubject()));
hs.setHostCertificateSubject(hr.getSubject());
hs.setOwner(hr.getOwner());
assertEquals(1, hcm.getHostRecords(hs).size());
}
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
if (i % 2 == 0) {
update.setStatus(HostCertificateStatus.Suspended);
} else {
update.setStatus(HostCertificateStatus.Compromised);
}
hcm.updateHostCertificateRecord(update);
HostCertificateRecord hr = hcm.getHostCertificateRecord(id);
assertEquals((total - (i + 1)), hcm.getHostRecords(new HostSearchCriteria()).size());
HostSearchCriteria hs = new HostSearchCriteria();
hs.setHostname(hr.getHost());
assertEquals(0, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setIdentity(CommonUtils.subjectToIdentity(hr.getSubject()));
assertEquals(0, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setHostCertificateSubject(hr.getSubject());
assertEquals(0, hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setOwner(hr.getOwner());
assertEquals((total - (i + 1)), hcm.getHostRecords(hs).size());
hs = new HostSearchCriteria();
hs.setHostname(hr.getHost());
hs.setIdentity(CommonUtils.subjectToIdentity(hr.getSubject()));
hs.setHostCertificateSubject(hr.getSubject());
hs.setOwner(hr.getOwner());
assertEquals(0, hcm.getHostRecords(hs).size());
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateAndApproveManyHostCertificate() {
try {
int total = 5;
String hostPrefix = "localhost";
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
List<HostCertificateRequest> requests = new ArrayList<HostCertificateRequest>();
List<Long> ids = new ArrayList<Long>();
for (int i = 0; i < total; i++) {
String host = hostPrefix + i;
HostCertificateRequest req = getHostCertificateRequest(host);
String owner = OWNER + i;
long id = hcm.requestHostCertifcate(owner, req);
validateAfterCertificateRequest((i + 1), (i + 1), hcm, owner, req, id);
requests.add(req);
ids.add(Long.valueOf(id));
assertEquals(0, hcm.getHostCertificateRecords(OWNER).size());
List<HostCertificateRecord> records = hcm.getHostCertificateRecords(owner);
assertEquals(1, records.size());
assertEquals(host, records.get(0).getHost());
assertEquals(owner, records.get(0).getOwner());
}
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
HostCertificateRequest req = requests.get(i);
String owner = OWNER + i;
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(total, (i + 1), hcm, id, owner, req, record);
HostCertificateFilter f = new HostCertificateFilter();
f.setStatus(HostCertificateStatus.Pending);
assertEquals(total - (i + 1), hcm.findHostCertificates(f).size());
List<HostCertificateRecord> records = hcm.getHostCertificateRecords(owner);
assertEquals(1, records.size());
assertEquals(record, records.get(0));
}
// Test find by host
try {
HostCertificateFilter f = new HostCertificateFilter();
f.setHost("foobar");
assertEquals(0, hcm.findHostCertificates(f).size());
f.setHost("localhost");
assertEquals(5, hcm.findHostCertificates(f).size());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
// Test find by owner
try {
HostCertificateFilter f = new HostCertificateFilter();
f.setOwner("foobar");
assertEquals(0, hcm.findHostCertificates(f).size());
f.setOwner(OWNER);
assertEquals(5, hcm.findHostCertificates(f).size());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
// Test find by subject
try {
HostCertificateFilter f = new HostCertificateFilter();
f.setSubject("foobar");
assertEquals(0, hcm.findHostCertificates(f).size());
String caSubject = ca.getCACertificate().getSubjectDN().getName();
int caindex = caSubject.lastIndexOf(",");
String caPreSub = caSubject.substring(0, caindex);
f.setSubject(caPreSub);
assertEquals(5, hcm.findHostCertificates(f).size());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
// Test Find by Multiple
try {
HostCertificateFilter f = new HostCertificateFilter();
String caSubject = ca.getCACertificate().getSubjectDN().getName();
int caindex = caSubject.lastIndexOf(",");
String caPreSub = caSubject.substring(0, caindex);
f.setStatus(HostCertificateStatus.Active);
f.setHost(hostPrefix);
f.setOwner(OWNER);
f.setSubject(caPreSub);
assertEquals(5, hcm.findHostCertificates(f).size());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
// Update the owner and status
for (int i = 0; i < total; i++) {
long id = ids.get(i).longValue();
String newOwner = "new";
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner(newOwner);
update.setStatus(HostCertificateStatus.Suspended);
hcm.updateHostCertificateRecord(update);
HostCertificateFilter f = new HostCertificateFilter();
f.setStatus(HostCertificateStatus.Suspended);
f.setOwner(newOwner);
assertEquals((i + 1), hcm.findHostCertificates(f).size());
List<HostCertificateRecord> records = hcm.getHostCertificateRecords(newOwner);
assertEquals((i + 1), records.size());
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testFindExpiredHostCertificates() {
try {
IdentityFederationProperties conf = getExpiringCredentialsConf();
HostCertificateManager hcm = new HostCertificateManager(db, conf, ca, this, blackList);
hcm.clearDatabase();
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
Thread.currentThread().yield();
long id1 = hcm.requestHostCertifcate(OWNER, getHostCertificateRequest("localhost1"));
hcm.approveHostCertifcate(id1);
long id2 = hcm.requestHostCertifcate(OWNER, getHostCertificateRequest("localhost2"));
HostCertificateFilter f1 = new HostCertificateFilter();
f1.setIsExpired(Boolean.TRUE);
HostCertificateFilter f2 = new HostCertificateFilter();
f2.setIsExpired(Boolean.FALSE);
assertEquals(0, hcm.findHostCertificates(f1).size());
assertEquals(1, hcm.findHostCertificates(f2).size());
Thread.sleep((conf.getIssuedCertificateLifetime().getSeconds() * 1000) + 100);
assertEquals(1, hcm.findHostCertificates(f1).size());
assertEquals(0, hcm.findHostCertificates(f2).size());
hcm.approveHostCertifcate(id2);
assertEquals(1, hcm.findHostCertificates(f1).size());
assertEquals(1, hcm.findHostCertificates(f2).size());
Thread.sleep((conf.getIssuedCertificateLifetime().getSeconds() * 1000) + 100);
assertEquals(2, hcm.findHostCertificates(f1).size());
assertEquals(0, hcm.findHostCertificates(f2).size());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateAndApproveHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testApproveActiveHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
try {
hcm.approveHostCertifcate(id);
fail("Should have failed.");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testApproveRejectedHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Rejected);
hcm.updateHostCertificateRecord(update);
try {
hcm.approveHostCertifcate(id);
fail("Should have failed.");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testApproveSuspendedHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Suspended);
hcm.updateHostCertificateRecord(update);
try {
hcm.approveHostCertifcate(id);
fail("Should have failed.");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testApproveCompromisedHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Compromised);
hcm.updateHostCertificateRecord(update);
try {
hcm.approveHostCertifcate(id);
fail("Should have failed.");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateDuplicateHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
try {
hcm.requestHostCertifcate(OWNER, getHostCertificateRequest("localhost"));
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Compromised);
hcm.updateHostCertificateRecord(update);
HostCertificateRequest req2 = getHostCertificateRequest("localhost");
long id2 = hcm.requestHostCertifcate(OWNER, req2);
hcm.approveHostCertifcate(id2);
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateHostCertificateWithACompromisedKey() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Compromised);
hcm.updateHostCertificateRecord(update);
try {
req.setHostname("newhost");
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
hcm.requestHostCertifcate(OWNER, getHostCertificateRequest("newhost"));
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateHostCertificateBadHostname() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
try {
hcm.requestHostCertifcate(OWNER, getHostCertificateRequest(null));
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
try {
hcm.requestHostCertifcate(OWNER, getHostCertificateRequest(" "));
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateHostCertificateInvalidPublicKey() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
try {
HostCertificateRequest req = getHostCertificateRequest("localhost");
req.setPublicKey(null);
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
try {
HostCertificateRequest req = getHostCertificateRequest("localhost");
req.getPublicKey().setKeyAsString(null);
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
try {
HostCertificateRequest req = getHostCertificateRequest("localhost");
req.getPublicKey().setKeyAsString(" ");
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
try {
HostCertificateRequest req = getHostCertificateRequest("localhost");
req.getPublicKey().setKeyAsString("foobar");
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testCreateHostCertificateInvalidPublicKeySize() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
try {
HostCertificateRequest req = getHostCertificateRequest("localhost", 512);
hcm.requestHostCertifcate(OWNER, req);
fail("Should have Failed!!");
} catch (InvalidHostCertificateRequestFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateHostCertificateStatusBeforeApproval() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Suspended);
try {
hcm.updateHostCertificateRecord(update);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
hcm.updateHostCertificateRecord(update);
assertEquals(HostCertificateStatus.Suspended, hcm.getHostCertificateRecord(id).getStatus());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateHostCertificateOwnerBeforeApproval() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
String newOwner = "newowner";
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner(newOwner);
hcm.updateHostCertificateRecord(update);
assertEquals(newOwner, hcm.getHostCertificateRecord(id).getOwner());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateHostCertificateOwner() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
String newOwner = "newowner";
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner(newOwner);
hcm.updateHostCertificateRecord(update);
assertEquals(newOwner, hcm.getHostCertificateRecord(id).getOwner());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateAllHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
String newOwner = "newowner";
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner(newOwner);
update.setStatus(HostCertificateStatus.Suspended);
hcm.updateHostCertificateRecord(update);
HostCertificateRecord r = hcm.getHostCertificateRecord(id);
assertEquals(newOwner, r.getOwner());
assertEquals(HostCertificateStatus.Suspended, r.getStatus());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateNonExistingHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
try {
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(100);
update.setOwner("newowner");
hcm.updateHostCertificateRecord(update);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateCompromisedHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateUpdate u = new HostCertificateUpdate();
u.setId(id);
u.setStatus(HostCertificateStatus.Compromised);
hcm.updateHostCertificateRecord(u);
try {
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner("newowner");
hcm.updateHostCertificateRecord(update);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateApprovedHostCertificateToPending() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
try {
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Pending);
hcm.updateHostCertificateRecord(update);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateHostCertificateStatus() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateRecord record = hcm.approveHostCertifcate(id);
validateAfterCertificateApproval(hcm, id, OWNER, req, record);
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(HostCertificateStatus.Suspended);
hcm.updateHostCertificateRecord(update);
assertEquals(HostCertificateStatus.Suspended, hcm.getHostCertificateRecord(id).getStatus());
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
public void testUpdateRejectedHostCertificate() {
try {
HostCertificateManager hcm = new HostCertificateManager(db, getConf(), ca, this, blackList);
hcm.clearDatabase();
HostCertificateRequest req = getHostCertificateRequest("localhost");
long id = hcm.requestHostCertifcate(OWNER, req);
validateAfterCertificateRequest(hcm, req, id);
HostCertificateUpdate u = new HostCertificateUpdate();
u.setId(id);
u.setStatus(HostCertificateStatus.Rejected);
hcm.updateHostCertificateRecord(u);
try {
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setOwner("newowner");
hcm.updateHostCertificateRecord(update);
fail("Should have failed");
} catch (InvalidHostCertificateFault f) {
}
} catch (Exception e) {
FaultUtil.printFault(e);
fail(e.getMessage());
}
}
private HostCertificateRequest getHostCertificateRequest(String host) throws Exception {
return getHostCertificateRequest(host, ca.getProperties().getIssuedCertificateKeySize());
}
private HostCertificateRequest getHostCertificateRequest(String host, int keySize) throws Exception {
KeyPair pair = KeyUtil.generateRSAKeyPair(keySize);
HostCertificateRequest req = new HostCertificateRequest();
req.setHostname(host);
String keyStr = KeyUtil.writePublicKey(pair.getPublic());
PublicKey pk = new PublicKey();
pk.setKeyAsString(keyStr);
req.setPublicKey(pk);
return req;
}
private void validateAfterCertificateRequest(HostCertificateManager hcm, HostCertificateRequest req, long id)
throws Exception {
validateAfterCertificateRequest(1, 1, hcm, OWNER, req, id);
}
private void validateAfterCertificateRequest(int count, int statusCount, HostCertificateManager hcm, String owner,
HostCertificateRequest req, long id) throws Exception {
validateFindHostCertificates(count, statusCount, hcm, id, -1, null, req.getHostname(), owner, req
.getPublicKey(), HostCertificateStatus.Pending, "");
}
private void validateAfterCertificateApproval(HostCertificateManager hcm, long id, String owner,
HostCertificateRequest req, HostCertificateRecord record) throws Exception {
validateAfterCertificateApproval(1, 1, hcm, id, owner, req, record);
}
private void validateAfterCertificateApproval(int count, int statusCount, HostCertificateManager hcm, long id,
String owner, HostCertificateRequest req, HostCertificateRecord record) throws Exception {
assertEquals(req.getHostname(), record.getHost());
assertEquals(req.getPublicKey(), record.getPublicKey());
assertEquals(owner, record.getOwner());
assertEquals(HostCertificateStatus.Active, record.getStatus());
String subject = org.cagrid.gaards.dorian.service.util.Utils.getHostCertificateSubject(ca.getCACertificate(),
req.getHostname());
assertEquals(subject, record.getSubject());
HostCertificateRecord r = hcm.getHostCertificateRecord(id);
assertEquals(record.getPublicKey(), r.getPublicKey());
assertEquals(record.getCertificate(), r.getCertificate());
assertEquals(record.getSerialNumber(), r.getSerialNumber());
assertEquals(record.getId(), r.getId());
assertEquals(record.getOwner(), r.getOwner());
assertEquals(record.getHost(), r.getHost());
assertEquals(record.getStatus(), r.getStatus());
assertEquals(record.getSubject(), r.getSubject());
assertEquals(record, r);
validateFindHostCertificates(count, statusCount, hcm, id, record.getSerialNumber(), record.getCertificate(),
req.getHostname(), owner, req.getPublicKey(), HostCertificateStatus.Active, subject);
}
private void validateFindHostCertificates(int count, int statusCount, HostCertificateManager hcm, long id, long sn,
X509Certificate cert, String host, String owner, PublicKey key, HostCertificateStatus status, String subject)
throws Exception {
List<HostCertificateRecord> l1 = hcm.findHostCertificates(null);
assertEquals(count, l1.size());
if (count == 1) {
validateHostCertificateRecord(l1.get(count - 1), id, sn, cert, host, owner, key, status, subject);
}
if (count == 1) {
List<HostCertificateRecord> l2 = hcm.findHostCertificates(new HostCertificateFilter());
assertEquals(count, l2.size());
validateHostCertificateRecord(l2.get(count - 1), id, sn, cert, host, owner, key, status, subject);
}
HostCertificateFilter f3 = new HostCertificateFilter();
f3.setHost(host);
List<HostCertificateRecord> l3 = hcm.findHostCertificates(f3);
assertEquals(1, l3.size());
validateHostCertificateRecord(l3.get(0), id, sn, cert, host, owner, key, status, subject);
HostCertificateFilter f4 = new HostCertificateFilter();
f4.setId(new BigInteger(String.valueOf(id)));
List<HostCertificateRecord> l4 = hcm.findHostCertificates(f4);
assertEquals(1, l4.size());
validateHostCertificateRecord(l4.get(0), id, sn, cert, host, owner, key, status, subject);
HostCertificateFilter f5 = new HostCertificateFilter();
f5.setOwner(owner);
List<HostCertificateRecord> l5 = hcm.findHostCertificates(f5);
assertEquals(1, l5.size());
validateHostCertificateRecord(l5.get(0), id, sn, cert, host, owner, key, status, subject);
if (sn >= 0) {
HostCertificateFilter f6 = new HostCertificateFilter();
f6.setSerialNumber(new BigInteger(String.valueOf(sn)));
List<HostCertificateRecord> l6 = hcm.findHostCertificates(f6);
assertEquals(1, l6.size());
validateHostCertificateRecord(l6.get(0), id, sn, cert, host, owner, key, status, subject);
}
HostCertificateFilter f7 = new HostCertificateFilter();
f7.setStatus(status);
List<HostCertificateRecord> l7 = hcm.findHostCertificates(f7);
assertEquals(statusCount, l7.size());
if (statusCount == 1) {
validateHostCertificateRecord(l7.get(0), id, sn, cert, host, owner, key, status, subject);
} else if (statusCount > 1) {
f7.setId(new BigInteger(String.valueOf(id)));
l7 = hcm.findHostCertificates(f7);
assertEquals(1, l7.size());
validateHostCertificateRecord(l7.get(0), id, sn, cert, host, owner, key, status, subject);
}
if (gov.nih.nci.cagrid.common.Utils.clean(subject) != null) {
HostCertificateFilter f8 = new HostCertificateFilter();
f8.setSubject(subject);
List<HostCertificateRecord> l8 = hcm.findHostCertificates(f8);
assertEquals(1, l8.size());
validateHostCertificateRecord(l8.get(0), id, sn, cert, host, owner, key, status, subject);
}
}
private void validateHostCertificateRecord(HostCertificateRecord record, long id, long sn, X509Certificate cert,
String host, String owner, PublicKey key, HostCertificateStatus status, String subject) {
assertEquals(id, record.getId());
assertEquals(sn, record.getSerialNumber());
assertEquals(cert, record.getCertificate());
assertEquals(host, record.getHost());
assertEquals(owner, record.getOwner());
assertEquals(key, record.getPublicKey());
assertEquals(status, record.getStatus());
assertEquals(subject, record.getSubject());
}
public void setHostCertificateStatus(HostCertificateManager hcm, long id, HostCertificateStatus status)
throws Exception {
HostCertificateUpdate update = new HostCertificateUpdate();
update.setId(id);
update.setStatus(status);
hcm.updateHostCertificateRecord(update);
}
private IdentityFederationProperties getConf() throws Exception {
IdentityFederationProperties conf = Utils.getIdentityFederationProperties();
return conf;
}
private IdentityFederationProperties getExpiringCredentialsConf() throws Exception {
IdentityFederationProperties conf = Utils.getIdentityFederationProperties();
Lifetime l = new Lifetime();
l.setYears(0);
l.setMonths(0);
l.setDays(0);
l.setHours(0);
l.setMinutes(0);
l.setSeconds(35);
conf.setIssuedCertificateLifetime(l);
return conf;
}
protected void setUp() throws Exception {
super.setUp();
try {
db = Utils.getDB();
assertEquals(0, db.getUsedConnectionCount());
ca = Utils.getCA();
blackList = new CertificateBlacklistManager(db);
blackList.clearDatabase();
} catch (Exception e) {
FaultUtil.printFault(e);
assertTrue(false);
}
}
protected void tearDown() throws Exception {
super.setUp();
try {
ca.clearCertificateAuthority();
blackList.clearDatabase();
assertEquals(0, db.getUsedConnectionCount());
} catch (Exception e) {
FaultUtil.printFault(e);
assertTrue(false);
}
}
}
| |
package com.rho.rhoelements.apd;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.security.InvalidParameterException;
import java.util.Timer;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.bluetooth.BluetoothAdapter;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.widget.Toast;
import com.rho.rhoelements.Common;
import com.rho.rhoelements.LogEntry;
import com.rho.rhoelements.apd.ApdCommands;
import com.rho.rhoelements.apd.ApdConfiguration;
import com.rho.rhoelements.apd.ApdConfiguration.Language;
import com.rho.rhoelements.apd.ApdConfiguration.Transport;
import com.rho.rhoelements.apd.transport.ApdBluetooth;
import com.rho.rhoelements.apd.transport.ApdTransport;
import com.rho.rhoelements.apd.transport.ApdTransportFactory;
import com.rho.rhoelements.apd.transport.ApdTransport.ApdTransportError;
import com.rho.rhoelements.common.UnzipUtility;
import com.rhomobile.rhodes.extmanager.RhoExtManager;
public class ApdEngine
{
private static final String APD_VERSION = "Symbol APD V1.00.00A";
private static final String EOL = "\r\n";
private static final String RESPONSE_FILE = Common.getDataPath() + "/APD/prtdrv.rsp";
private static final String DATA_FILE = Common.getDataPath() + "/APD/prtdrv.lbl";
private static final String PRINTER_DATA_FILE = Common.getDataPath() + "/APD/prtdrv.prn";
public static final String DEFAULT_FORMAT_PATH = Common.getDataPath() + "/APD/Formats/";
public static final String DEFAULT_TEMPLATE_PATH = Common.getDataPath() + "/APD/Template/";
public static final String DEFAULT_ID_PATH = Common.getDataPath() + "/APD/ID_Label/";
private static final String PRN_ID_LABEL = "ID";
private static final int APD_WLAN_DEFAULT_PORT = 515;
private static final int APD_BT_DEFAULT_PORT = 1;
private static final int ASCII_A = 65;
private static final String EMBEDDEDCOMMANDPATTERN = "\\$\\$\\[CMD:([0-9A-Fa-f]{4})(,.*)*\\]\\$\\$";
private static final int MAX_DATASTRING = 1024;
private static BluetoothAdapter adapter;
private boolean mTimeoutFlag = false;
private String mLastMessage;
private static ApdConfiguration mApdConfiguration;
private ApdTransport mApdTransport;
private int mPrnDataCount = 0;
private String mPrnDataList[] = null;
public String mPrnStringList[] = null;
private String mPrnDataFormat = null;
//Acceptable data types in variable data
private enum ApdDataType {
CHAR,
STRING,
HEX,
DECIMAL,
UNSIGNED,
INVALID
}
//Variable data type formatting
String prnFormatting[] = {"%c", "%s", "%x", "%d", "%u"};
private enum ApdPathIndex {
FORMAT_PATH,
TEMPLATE_PATH,
IDLABEL_PATH
}
public enum ApdError {
ERR_OK,
ERR_INV_ID,
ERR_FILE,
ERR_PORT,
ERR_NODATA,
ERR_NOFILE,
ERR_NOQTY,
ERR_WINDOW,
ERR_SKT,
ERR_CON,
ERR_SEND,
ERR_CTS,
ERR_SETUP,
ERR_BUFF,
ERR_CREATE,
ERR_INIT,
ERR_CLOSE,
ERR_DESTROY,
ERR_DEINIT,
ERR_LOST
}
private enum AddressType {
MAC_ADDRESS,
IP_ADDRESS
}
/**
* Constructor
*/
public ApdEngine()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
//Unzip the APD files if they haven't been unzipped yet.
boolean isApdUnzippedOk = UnzipUtility.unzipFileIfRequired(Common.getDataPath() + "/apd.zip",
Common.getDataPath() + "/",
Common.getDataPath() + "/APD");
if (isApdUnzippedOk == false)
Common.logger.add(new LogEntry(LogEntry.PB_LOG_WARNING, "an error occured while unzipping apd files (formats and templates)"));
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
try
{
mApdConfiguration = ApdConfigurator.readConfiguration();
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "apdconfig.xml read"));
mApdTransport = ApdTransportFactory.createTransport(mApdConfiguration);
}
catch (Exception e) {
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error while constructing ApdEngine"));
e.printStackTrace();
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
}
/**
* destroys the resources created in the constructor
*/
public void destroy()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
mApdConfiguration = null;
if (mApdTransport != null)
mApdTransport.destroy();
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
}
/**
* Generic command interface (This should not be called directly).
* @param command is the command to be executed
* @param params is a string coding the parameters to the command
* @return //TODO: write a description of the return
*/
public synchronized int PSExternalEx(int command, String params)
{
return PSExternal(command, params).ordinal();
}
/**
* Generic command interface (This should not be called directly).
* @param command is the command to be executed
* @param params is a string coding the parameters to the command
* @return //TODO: write a description of the return
*/
public synchronized ApdError PSExternal(int command, String params)
{
Common.log("PSExternal");//testing
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
if (params != null)
{
params = params.replaceAll("\\\\r", "\r");
params = params.replaceAll("\\\\n", "\n");
}
switch (command)
{
case ApdCommands.Cmd_PSSendString:
{
res = PSSendStringIndirect(params);
break;
}
case ApdCommands.Cmd_PSSendFormat:
{
res = PSSendFormatIndirect(params);
break;
}
case ApdCommands.Cmd_PSPrintIdLabel:
{
res = PSPrintIdLabelIndirect(params);
break;
}
case ApdCommands.Cmd_PSGetVersion:
{
res = writeApdResponse(APD_VERSION);
break;
}
case ApdCommands.Cmd_PSBthPowerOn:
{
res = switchBthPower(true);
break;
}
case ApdCommands.Cmd_PSBthPowerOff:
{
res = switchBthPower(false);
break;
}
case ApdCommands.Cmd_PSGetLastMessage:
{
res = writeApdResponse(mLastMessage);
break;
}
case ApdCommands.Cmd_PSSetFormatPath:
{
res = changeCurrentPath(ApdPathIndex.FORMAT_PATH, params);
break;
}
case ApdCommands.Cmd_PSSetTemplatePath:
{
res = changeCurrentPath(ApdPathIndex.TEMPLATE_PATH, params);
break;
}
case ApdCommands.Cmd_PSSetIDLabelPath:
{
res = changeCurrentPath(ApdPathIndex.IDLABEL_PATH, params);
break;
}
case ApdCommands.Cmd_PSGetPrinterList:
{
res = writeApdResponse(PSGetPrinterList());
break;
}
case ApdCommands.Cmd_PSOpenPort:
{
res = openPort(true);
break;
}
case ApdCommands.Cmd_PSClosePort:
{
res = openPort(false);
break;
}
case ApdCommands.Cmd_PSPowerOn:
case ApdCommands.Cmd_PSPowerOff:
{
res = ApdError.ERR_CREATE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_functionnotsupported"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
break;
}
case ApdCommands.Cmd_PSShowProgress:
{
if (mApdTransport != null)
mApdTransport.setProgressEnabled(true);
break;
}
case ApdCommands.Cmd_PSHideProgress:
{
if (mApdTransport != null)
mApdTransport.setProgressEnabled(false);
break;
}
case ApdCommands.Cmd_PSSetPrinter:
{
res = setCurrentPrinter(params);
break;
}
case ApdCommands.Cmd_PSSendData:
{
res = PSSendDataIndirect(params);
break;
}
default:
{
res = ApdError.ERR_INIT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_unknownerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
break;
}
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* This method implements the Cmd_PSSendString command, which allows data to be sent directly without needing to place
* it in a file first.
*
* @param data is a string containing either data to be sent to the printer or an 'embedded' command followed by data.
* @return
*/
private ApdError PSSendStringIndirect(String data)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
if (data != null)
{
Pattern embeddedCommandPattern = Pattern.compile(EMBEDDEDCOMMANDPATTERN);
Matcher embeddedCommandMatcher = embeddedCommandPattern.matcher(data);
if (embeddedCommandMatcher.find()) //If data is an embedded command...
{
ApdCommand embeddedCommand = apdCommandFromEmbedded(data.substring(embeddedCommandMatcher.start(), embeddedCommandMatcher.end()));
if (embeddedCommand.getCode() != ApdCommands.Cmd_PSSendString) //The APD specs say that Cmd_PSSendString cannot be embedded
{
if ( (embeddedCommandMatcher.end() + 2) <= data.length() )
mPrnStringList = data.substring(embeddedCommandMatcher.end() + 2).split(EOL);
res = PSExternal(embeddedCommand.getCode(), embeddedCommand.getParams());
}
}
else
{
try
{
if (mApdTransport.write(data.getBytes()).ordinal() > ApdTransportError.OK.ordinal())
{
res = ApdError.ERR_SEND;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_sendfailed"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
}
catch (Exception e)
{
res = ApdError.ERR_SEND;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_sendfailed"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
}
}
else
{
res = ApdError.ERR_NODATA;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_nodata"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* This method implements the Cmd_PSSendFormat command, which allows to send a format file to the printer
*
* @param formatFileName is the format filename including the path. Generally the extension is not specified
* @return //TODO: write a description of the return
*/
private ApdError PSSendFormatIndirect(String formatFileName)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
if (formatFileName != null)
{
/*
* The first thing to check is whether the filename contains an extension or not. Generally there is not extension
* as this is determined by the printer type (the content of the format file is printer dependent).
* There might be situations where the extension may be passed, for instance when a format file can be used with
* any printer.
*/
formatFileName = getFileName(ApdPathIndex.FORMAT_PATH, formatFileName);
if (formatFileName == null)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_configurationerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, mLastMessage));
return ApdError.ERR_INIT;
}
/*
* Check whether the format file contains embedded command
*/
FileReader formatFileReader = null;
char[] buffer = null;
try
{
File formatFile = new File(formatFileName);
formatFileReader = new FileReader(formatFile);
buffer = new char[(int)formatFile.length()];
formatFileReader.read(buffer);
String bufferString = new String(buffer);
Pattern embeddedCommandPattern = Pattern.compile(EMBEDDEDCOMMANDPATTERN);
Matcher embeddedCommandMatcher = embeddedCommandPattern.matcher(bufferString);
if (embeddedCommandMatcher.find()) //If data is an embedded command...
{
ApdCommand embeddedCommand = apdCommandFromEmbedded(bufferString.substring(embeddedCommandMatcher.start(), embeddedCommandMatcher.end()));
if (embeddedCommand.getCode() != ApdCommands.Cmd_PSSendString) //The APD specs say that Cmd_PSSendString cannot be embedded
{
mPrnStringList = bufferString.substring(embeddedCommandMatcher.end() + 2).split(EOL);
res = PSExternal(embeddedCommand.getCode(), embeddedCommand.getParams());
}
}
else
res = prnSendData(formatFileName);
}
catch (FileNotFoundException e)
{
res = ApdError.ERR_NOFILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_filenotfound"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, mLastMessage));
}
catch (IOException e)
{
res = ApdError.ERR_BUFF;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
e.printStackTrace();
}
finally
{
if (formatFileReader != null)
try
{
formatFileReader.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
}
else
{
res = ApdError.ERR_INIT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_nodata"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, mLastMessage));
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* Commands all active printers in the vicinity of the terminal to print a label showing their PID
* TODO: the C implementation of this doesn't do what the descriptions says. Yet, it sounds like making sense only for BT printers, maybe because it's only meant to be working
* with SPAN printers (SPAN = Symbol Personal Area Network)
* @param labelFileName is the id filename
* @return
*/
private ApdError PSPrintIdLabelIndirect(String labelFileName)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
if ( (labelFileName == null) || (labelFileName.compareTo("") == 0) ) //Filename is omitted
labelFileName = PRN_ID_LABEL + "." + (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A);
else //labelFileName is not empty
if (labelFileName.indexOf('.') < 0) //The filename does not contain the extension
labelFileName = labelFileName + "." + (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A);
res = prnSendData(mApdConfiguration.getLabelIdPath() + labelFileName);
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
*
*/
private ApdError writeApdResponse(String response)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
FileWriter fstream = null;
BufferedWriter out = null;
try
{
// Create file
fstream = new FileWriter(RESPONSE_FILE);
out = new BufferedWriter(fstream);
out.write(response);
out.flush();
}
catch (IOException e)
{
res = ApdError.ERR_FILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
finally
{
try
{
if (fstream != null)
fstream.close();
if (out != null)
out.close();
} catch (IOException e)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"))));
}
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
*
*/
private ApdError openPort(boolean open)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
ApdTransportError apdTransportError;
try
{
if (open)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Attempting to open channel"));
apdTransportError = mApdTransport.open();
}
else
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Attempting to close channel"));
apdTransportError = mApdTransport.close();
}
if (apdTransportError.ordinal() > ApdTransportError.OK.ordinal())
{
res = ApdError.ERR_PORT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_porterror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
}
catch (Exception e) {
res = ApdError.ERR_PORT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_porterror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* Direct version of PSGetVersion
* @return the current version of the APD driver
*/
public String PSGetVersion()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, null));
return APD_VERSION;
}
/**
* Direct version of PSGetLastMessage
* @return the last error message returned by APD
*/
synchronized public String PSGetLastMessage()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, mLastMessage));
return mLastMessage;
}
private ApdError switchBthPower(boolean powerOn)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
IntentFilter filter = new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED);
BroadcastReceiver btActivationReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent)
{
if (intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.STATE_OFF) == BluetoothAdapter.STATE_ON)
{
Toast.makeText(Common.mainActivity, Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_bton")), Toast.LENGTH_SHORT).show();
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "BT enabled"));
}
//It's purposely STATE_TURNING_OFF rather than STATE_OFF
else if (intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.STATE_OFF) == BluetoothAdapter.STATE_TURNING_OFF)
{
Toast.makeText(Common.mainActivity, Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_btoff")), Toast.LENGTH_SHORT).show();
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "BT disabled"));
}
}
};
Common.mainActivity.registerReceiver(btActivationReceiver, filter);
if (adapter == null)
{
Common.mainActivity.runOnUiThread(new Runnable() {
@Override
public void run()
{
adapter = BluetoothAdapter.getDefaultAdapter();
}
});
}
Timer operationTimer = new Timer();
operationTimer.schedule(new TimerAction(), ApdBluetooth.TIMEOUT);
while ( (adapter == null) && (mTimeoutFlag == false) ) {};
operationTimer.cancel();
mTimeoutFlag = false;
if (adapter != null)
{
Timer mOperationTimer = new Timer();
mOperationTimer.schedule(new TimerAction(), ApdBluetooth.TIMEOUT);
if ( powerOn && (adapter.isEnabled() == false) )
adapter.enable();
else if ( (powerOn == false) && (adapter.isEnabled() == true) )
adapter.disable();
while ( (adapter.isEnabled() != powerOn) && (mTimeoutFlag == false) ) {};
mOperationTimer.cancel();
if (mTimeoutFlag)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "BT operation took too long"));
mTimeoutFlag = false;
res = ApdError.ERR_INIT;
}
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
Common.mainActivity.unregisterReceiver(btActivationReceiver);
return res;
}
private class TimerAction extends TimerTask
{
@Override
public void run()
{
mTimeoutFlag = true;
}
}
/**
* The input validation for this method is carried out by the calling method
*/
private ApdCommand apdCommandFromEmbedded(String embeddedCommand)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdCommand res = null;
String command = embeddedCommand.substring(7, 11); //this is the substring containing the command
String params = null;
if (embeddedCommand.indexOf(",") >= 0)
params = embeddedCommand.substring(embeddedCommand.indexOf(",") + 1, embeddedCommand.indexOf("]"));
// need to convert the command code from hexadecimal to decimal
res = new ApdCommand(Integer.parseInt(command, 16), params);
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* This method changes the default path for either Format, Template or IdLabel files
* @param index FORMAT_PATH to change the current Format path, TEMPLATE_PATH or IDLABEL_PATH
* @param path the new path to be set
* @return a relevant ApdError
*/
private ApdError changeCurrentPath(ApdPathIndex index, String path)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
if ( (path != null) && (path.compareTo("") != 0) )
{
try
{
File newPathFile = new File(Common.parseAndroidURI(path).getPath());
if ( (newPathFile.isDirectory() == false) || (newPathFile.exists() == false) )
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pathnotvalid"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
return ApdError.ERR_FILE;
}
if (path.endsWith("/") == false)
path = path + "/";
if (index == ApdPathIndex.FORMAT_PATH)
mApdConfiguration.setFormatPath(path);
else if (index == ApdPathIndex.TEMPLATE_PATH)
mApdConfiguration.setTemplatePath(path);
else
mApdConfiguration.setLabelIdPath(path);
}
catch (InvalidParameterException e1)
{
// Do nothing
}
catch (URISyntaxException e1)
{
// Do nothing
}
try
{
ApdConfigurator.writeConfiguration(mApdConfiguration);
}
catch (IOException e)
{
res = ApdError.ERR_FILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
}
else
{
res = ApdError.ERR_NODATA;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pathnotvalid"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
*
* @param apdPrinterId is the PID of the device to be set as current printer
* @return a relevant ApdError
*/
private ApdError setCurrentPrinter(String apdPrinterId)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_INV_ID;
ApdConfiguration newPrinter = new ApdConfiguration();
String address;
StringBuffer apdPrinterIdBuffer = new StringBuffer(apdPrinterId);
openPort(false); //Closes the current connection if any is active
if (isPatternMatching(apdPrinterIdBuffer.toString(), ApdConfiguration.FOUR_CHAR_ID_WIRELESS_REGEX) >= 0) //If the pid is a 4-char one
{
//Check whether the PID contains an address (either MAC or IP)
int barPosition = apdPrinterIdBuffer.toString().indexOf('|');
if (barPosition == 4) //there is a bar but it's not where it should be
{
apdPrinterIdBuffer.insert(4, ':');
barPosition++;
}
address = apdPrinterIdBuffer.substring(barPosition + 1, apdPrinterIdBuffer.length());
//Check whether the PID is for a BT or WLAN printer
boolean validAddress;
if (apdPrinterIdBuffer.charAt(0) == 'B')
{
//A mac address is expected
//Check whether the address is a valid MAC address
validAddress = false;
if (isPatternMatching(address, ApdConfiguration.MAC_ADDRESS_COLUMNS_REGEX) >= 0)
validAddress = true;
else if (isPatternMatching(address, ApdConfiguration.MAC_ADDRESS_NOCOLUMNS_REGEX) >= 0)
{
validAddress = true;
address = fixAddress(address, AddressType.MAC_ADDRESS);
}
if (validAddress == false)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pidnotvalid"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
return ApdError.ERR_INV_ID; //BT PID does not contain a valid MAC address
}
newPrinter.setBtMac(address);
newPrinter.setRfComm(APD_BT_DEFAULT_PORT);
newPrinter.setTransport(Transport.Bluetooth);
}
else if (apdPrinterIdBuffer.charAt(0) == 'W')
{
//An IP address is expected
//Check whether the address is a valid IP address
validAddress = false;
if (isPatternMatching(address, ApdConfiguration.IP_ADDRESS_DOTS_REGEX) >= 0)
validAddress = true;
else
{
if (isPatternMatching(address, ApdConfiguration.IP_ADDRESS_NODOTS_REGEX) >= 0)
{
validAddress = true;
//Need to add the columns to the mac address
address = fixAddress(address, AddressType.IP_ADDRESS);
}
}
if (validAddress == false)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pidnotvalid"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
return ApdError.ERR_INV_ID; //BT PID does not contain a valid MAC address
}
newPrinter.setIpAddress(address);
newPrinter.setIpPort(APD_WLAN_DEFAULT_PORT);
newPrinter.setTransport(Transport.Wlan);
}
// Check the port number
int columnPosition = apdPrinterIdBuffer.toString().indexOf(':');
if (columnPosition >= barPosition) //first column is not where expected
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pidnotvalid"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
return ApdError.ERR_INV_ID;
}
//Check that the port number is valid
if ( (barPosition - columnPosition > 1) && (isPatternMatching(apdPrinterIdBuffer.substring(columnPosition + 1, barPosition - 1), ApdConfiguration.PORT_REGEX) >= 0) )
newPrinter.setIpPort(Integer.valueOf(apdPrinterIdBuffer.substring(columnPosition + 1, barPosition)));
newPrinter.setModel(Integer.valueOf(apdPrinterIdBuffer.substring(2, 4)));
newPrinter.setId(0);
newPrinter.setLanguage(Language.values()[apdPrinterIdBuffer.charAt(1) - ASCII_A]);
newPrinter.setFormatPath(mApdConfiguration.getFormatPath());
newPrinter.setTemplatePath(mApdConfiguration.getTemplatePath());
newPrinter.setLabelIdPath(mApdConfiguration.getLabelIdPath());
mApdConfiguration = newPrinter;
res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
try
{
ApdConfigurator.writeConfiguration(mApdConfiguration);
mApdTransport.destroy();
mApdTransport = null;
mApdTransport = ApdTransportFactory.createTransport(mApdConfiguration);
}
catch (IOException e)
{
res = ApdError.ERR_FILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
catch (Exception e)
{
res = ApdError.ERR_INIT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_unknownerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
e.printStackTrace();
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_pidnotsupported"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
if (isPatternMatching(apdPrinterId, ApdConfiguration.FIVE_CHAR_ID_TOSHIBA_REGEX) >= 0) //If the pid is a 5-char one
return ApdError.ERR_SETUP; //this pid is not supported
if (isPatternMatching(apdPrinterId, ApdConfiguration.FOUR_CHAR_ID_WIRED_REGEX) >= 0)
return ApdError.ERR_SETUP; //this pid is not supported
if (isPatternMatching(apdPrinterId, ApdConfiguration.FIVE_CHAR_ID_SPAN_REGEX) >= 0)
return ApdError.ERR_SETUP; //this pid is not supported
if (isPatternMatching(apdPrinterId, ApdConfiguration.FIVE_CHAR_ID_WIRED_REGEX) >= 0)
return ApdError.ERR_SETUP; //this pid is not supported
if (isPatternMatching(apdPrinterId, ApdConfiguration.SEVEN_CHAR_ID_WIRED_REGEX) >= 0)
return ApdError.ERR_SETUP; //this pid is not supported
if (isPatternMatching(apdPrinterId, ApdConfiguration.SEVEN_CHAR_ID_SPAN_REGEX) >= 0)
return ApdError.ERR_SETUP; //this pid is not supported
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
/**
* Requests the PID of the current printer
* @return the string coding the printer PID
*/
synchronized public String PSGetPrinterList()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
String res = null;
//If the printer connection is wlan
if ( (mApdConfiguration.getBtMac() == null) && (mApdConfiguration.getIpAddress() != null) )
res = String.format("W%c%02d:%d|%S", (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A),
mApdConfiguration.getModel(),
mApdConfiguration.getIpPort(),
removeSeparatorFromAddress(mApdConfiguration.getIpAddress(), AddressType.IP_ADDRESS));
//if the printer connection is bluetooth
else if ( (mApdConfiguration.getIpAddress() == null) && (mApdConfiguration.getBtMac() != null) )
{
//For mantaining the compatibility with the win version, the columns from the bt mac address have to be removed
res = String.format("B%c%02d:%d|%S", (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A),
mApdConfiguration.getModel(),
mApdConfiguration.getRfComm(),
removeSeparatorFromAddress(mApdConfiguration.getBtMac(), AddressType.MAC_ADDRESS));
}
else //in case the printer configuration is inconsistent
res = null;
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
private int isPatternMatching(String input, String patternString)
{
Pattern pattern = Pattern.compile(patternString);
Matcher patternMatcher = pattern.matcher(input);
if (patternMatcher.find())
return patternMatcher.start();
else
return -1;
}
private String fixAddress(String address, AddressType addressType)
{
int upperBound = 0;
int increment = 0;
char separator;
switch (addressType.ordinal())
{
case 0: //AddressType.MAC_ADDRESS
{
upperBound = 10;
increment = 2;
separator = ':';
break;
}
case 1: //AddressType.IP_ADDRESS
{
upperBound = 9;
increment = 3;
separator = '.';
break;
}
default:
return address;
}
StringBuffer addressBuffer = new StringBuffer(address);
int j = 0;
for (int i=increment; i<=upperBound; i=i+increment)
{
addressBuffer.insert(i+j, separator);
j++;
}
return addressBuffer.toString();
}
private String removeSeparatorFromAddress(String address, AddressType addressType)
{
StringBuffer addressBuffer = new StringBuffer();
String separator;
if (addressType == AddressType.MAC_ADDRESS)
separator = ":";
else
separator = ".";
for (int i=0; i < address.length(); i++)
if (address.substring(i, i+1).compareTo(separator) != 0)
addressBuffer.append(address.substring(i, i+1));
return addressBuffer.toString();
}
private String getFileName(ApdPathIndex pathIndex, String fileName)
{
String res = null;
String ext = "";
int posOfFinalSlash = fileName.lastIndexOf("/");
try
{
if ( (fileName.indexOf(".") == -1 ) || (fileName.lastIndexOf(".") < posOfFinalSlash) ) //no extension is given
{
if (mApdConfiguration.getModel() > 0)
ext = "." + String.format("%c%02d", (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A), mApdConfiguration.getModel());
else
ext = "." + String.format("%c", (char)(mApdConfiguration.getLanguage().ordinal() + ASCII_A));
}
if (posOfFinalSlash == -1) //Path is not in the parameter...
{
if (pathIndex == ApdPathIndex.FORMAT_PATH)
res = mApdConfiguration.getFormatPath() + fileName;
else if (pathIndex == ApdPathIndex.TEMPLATE_PATH)
res = mApdConfiguration.getTemplatePath() + fileName;
else
res = mApdConfiguration.getLabelIdPath() + fileName;
}
else
res = fileName;
try
{
res = Common.parseAndroidURI(res + ext).getPath();
}
catch (InvalidParameterException e)
{
res = null;
}
catch (URISyntaxException e)
{
res = null;
}
}
catch (NullPointerException e)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error while reading APD configuration"));
res = null;
}
return res;
}
/**
* This direct command may be used to send data to the printer without having to put data into a file
* @param pcData is an array of strings containing the data to send to the printer
* @param iQty is the number of times that the data have to be sent
* @return
*/
synchronized public ApdError PSSendData(String pcData[], int iQty)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, null));
mPrnStringList = pcData;
return PSExternal(ApdCommands.Cmd_PSSendData, String.valueOf(iQty));
}
private ApdError PSSendDataIndirect(String quantity)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
ApdError intermediateRes;
String templateFileName;
int intQuantity = 0;
try
{
intQuantity = Integer.valueOf(quantity);
}
catch (NumberFormatException e)
{
res = ApdError.ERR_NOQTY;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_nodata"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
if (intQuantity == 0)
return res;
if (prnTextToList() > 0)
{
templateFileName = getFileName(ApdPathIndex.TEMPLATE_PATH, mPrnDataFormat);
if (templateFileName == null)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_configurationerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, mLastMessage));
return ApdError.ERR_INIT;
}
//Check whether the template file exists
File templateFile = new File(templateFileName);
if (templateFile.exists() == false)
{
if (isStoredFormatSupported())
intermediateRes = prnStoredFormat(mApdConfiguration.getLanguage(), intQuantity);
else
intermediateRes = ApdError.ERR_NOFILE;
}
else
intermediateRes = prnGenericDrv(templateFileName, intQuantity);
if (intermediateRes == ApdError.ERR_OK)
res = prnSendData(DATA_FILE);
prnDestroyList();
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
private void prnDestroyList()
{
mPrnDataList = null;
mPrnDataCount = 0;
}
private int prnTextToList()
{
int wLoop;
mPrnDataCount = 0;
prnDestroyList();
if (mPrnStringList != null)
{
mPrnDataCount = mPrnStringList.length;
if (mPrnDataCount > 0)
{
mPrnDataList = new String[mPrnDataCount];
for (wLoop = 0; wLoop < mPrnDataCount; wLoop++)
{
// Capture the first field (format name).
if (wLoop == 0)
mPrnDataFormat = mPrnStringList[wLoop];
mPrnDataList[wLoop] = new String(mPrnStringList[wLoop]);
}
}
}
mPrnStringList = null;
// if no data in mPrnStringList,try the file
if (mPrnDataCount == 0)
{
FileInputStream fileInputStream = null;
try
{
fileInputStream = new FileInputStream(PRINTER_DATA_FILE);
BufferedReader input = new BufferedReader(new FileReader(PRINTER_DATA_FILE));
input.mark(fileInputStream.available());
while (input.readLine() != null)
mPrnDataCount++;
input.reset();
mPrnDataList = new String[mPrnDataCount];
int i = 0;
String line;
while ((line = input.readLine()) != null)
mPrnDataList[i++] = new String(line);
}
catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
try
{
if (fileInputStream != null)
fileInputStream.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
return mPrnDataCount;
}
private StringBuffer getNullTerminatedStringBuffer(String input)
{
if (input == null)
return null;
int nullCharPos = input.indexOf('\0');
return new StringBuffer(input.substring(0, nullCharPos));
}
private ApdError prnGenericDrv(String formatFileName, int quantity)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
boolean bLooking = true;
boolean bFound = false;
int iLoop;
int iLen = 0;
int iPos = 0;
int iField = 0;
int iPcStr;
int iFormat;
int iHere;
int iNext;
StringBuffer outputContent = new StringBuffer();
StringBuffer pcNumber = new StringBuffer();
FileWriter outputFileStream = null;
StringBuffer nullTerminatedOutput = null;
FileReader fileReader = null;
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
try
{
File formatFile = new File(formatFileName);
fileReader = new FileReader(new File(formatFileName));
char[] formatContent = new char[(int)formatFile.length()];
fileReader.read(formatContent);
outputFileStream = new FileWriter(DATA_FILE);
outputContent.setLength(MAX_DATASTRING);
for (iLoop = 0; iLoop < formatContent.length; iLoop++)
{
if (formatContent[iLoop] == '$')
{
if ( bFound && (iLen == 1) )
{
//Reached the end of ']$$'
outputContent.setCharAt(iPos, '\0');
iLen = 0;
iPos = 0;
bFound = false;
bLooking = true;
nullTerminatedOutput = getNullTerminatedStringBuffer(outputContent.toString());
if ((iPcStr = (nullTerminatedOutput.indexOf(":"))) > 0)
{
outputContent.setCharAt(iPcStr, '\0');
iPcStr++;
String temp = nullTerminatedOutput.toString().substring(iPcStr);
nullTerminatedOutput.replace(0, temp.length() - 1, temp);
nullTerminatedOutput.delete(temp.length(), nullTerminatedOutput.length() - 1);
nullTerminatedOutput.setLength(temp.length());
for (iFormat = 0; iFormat < ApdDataType.INVALID.ordinal(); iFormat++)
if (nullTerminatedOutput.toString().contains(prnFormatting[iFormat]))
break;
if (iFormat < ApdDataType.INVALID.ordinal())
{
for (iHere = iNext = 0;
iHere < nullTerminatedOutput.length();
iHere++, iNext++)
{
if (nullTerminatedOutput.charAt(iHere) == '\\')
{
iHere++;
switch (nullTerminatedOutput.charAt(iHere))
{
case '0':
{
nullTerminatedOutput.setCharAt(iHere, '\0');
break;
}
case 'a':
{
nullTerminatedOutput.setCharAt(iHere, '\007');
break;
}
case 'b':
{
nullTerminatedOutput.setCharAt(iHere, '\b');
break;
}
case 'f':
{
nullTerminatedOutput.setCharAt(iHere, '\f');
break;
}
case 'r':
{
nullTerminatedOutput.setCharAt(iHere, '\r');
break;
}
case 't':
{
nullTerminatedOutput.setCharAt(iHere, '\t');
break;
}
case 'n':
{
nullTerminatedOutput.setCharAt(iHere, '\n');
break;
}
case 'x':
{
pcNumber.setCharAt(0, nullTerminatedOutput.charAt(iHere + 1));
pcNumber.setCharAt(1, nullTerminatedOutput.charAt(iHere + 2));
//pcNumber.setCharAt(2, '\0');
if (isPatternMatching(pcNumber.toString(), "\b[0-9a-fA-F]+\b") >= 0)
{
iHere += 2;
nullTerminatedOutput.setCharAt(iHere, (char)(Integer.valueOf(pcNumber.toString(), 16).intValue()));
}
else
{
iHere--;
}
break;
}
case '\\':
{
nullTerminatedOutput.setCharAt(iHere, '\\');
break;
}
default:
{
iHere--;
break;
}
}
if (iHere > iNext)
nullTerminatedOutput.setCharAt(iNext, outputContent.charAt(iHere));
}
}
if (iNext < nullTerminatedOutput.length())
nullTerminatedOutput.setCharAt(iNext, '\0');
}
}
else
{
iFormat = ApdDataType.INVALID.ordinal();
}
if (outputContent.charAt(0) == 'Q')
{
String formattedQuantity = null;
if (iFormat == ApdDataType.CHAR.ordinal())
formattedQuantity = String.format(nullTerminatedOutput.toString(), (char)quantity);
else if (iFormat == ApdDataType.HEX.ordinal())
formattedQuantity = String.format(nullTerminatedOutput.toString(), quantity);
else if (iFormat == ApdDataType.STRING.ordinal())
formattedQuantity = String.valueOf(quantity);
else
formattedQuantity = String.format("%d", quantity); //Java doesn't support %u
if (formattedQuantity != null)
outputFileStream.write(formattedQuantity);
}
else if (outputContent.charAt(0) == 'N')
{
iField = Integer.valueOf(outputContent.substring(1,2));
while ( (iField > 0) && (iField < mPrnDataCount) )
{
if (mPrnDataList[iField].length() < MAX_DATASTRING)
{
outputFileStream.write(mPrnDataList[iField] + EOL);
}
iField++;
}
}
else
{
iField = Integer.valueOf(nullTerminatedOutput.toString());
//If field number refers to valid data include it!
if ( (iField > 0) && (iField < mPrnDataCount) )
outputFileStream.write(mPrnDataList[iField]);
}
}
else
{
iLen++;
}
}
else
{
if (bFound) // ']' without a "$$" so put it with the data
{
if (iPos < outputContent.length())
{
outputContent.setCharAt(iPos++, ']');
}
else if (bLooking) // Buffer full, but no $$[, so send it anyway
{
outputFileStream.write(outputContent.toString(), 0, iPos);
iPos = 0;
}
}
if (bLooking) // If looking for the '[' then consider it found if more than two '$'
{
if ( (bFound = (formatContent[iLoop] == '[')) && (iLen > 1) != false)
iLen -= 2;
}
else // If within the "$$[" look for the ']'
{
bFound = formatContent[iLoop] == ']';
}
while (iLen > 0) // Put any surplus '$' in the data.
{
if (iPos < outputContent.length())
outputContent.setCharAt(iPos++, '$');
else
if (bLooking) // Buffer full, but no $$[, so send it anyway
{
outputFileStream.write(outputContent.toString(), 0, iPos);
iPos = 0;
}
iLen--;
}
if (bFound)
{
if (bLooking)
{
if (iPos > 0)
outputFileStream.write(outputContent.toString(), 0, iPos);
iLen = 0;
iPos = 0;
bFound = false;
bLooking = false;
}
else
iLen = 0;
}
else
{
if (iPos < outputContent.length())
outputContent.setCharAt(iPos++, formatContent[iLoop]);
else
{
if (bLooking) // Buffer full, but no $$[, so send it anyway
{
outputFileStream.write(outputContent.toString(), 0, iPos);
iPos = 0;
}
}
}
}
}
if (iPos > 0)
outputFileStream.write(outputContent.toString(), 0, iPos);
}
catch (FileNotFoundException e)
{
res = ApdError.ERR_NOFILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_filenotfound"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
catch (IOException e)
{
res = ApdError.ERR_BUFF;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ioerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
finally
{
try
{
if (outputFileStream != null)
outputFileStream.close();
if (fileReader != null)
fileReader.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
private boolean isStoredFormatSupported()
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
boolean res = false;
if (mApdConfiguration != null)
{
if ( (mApdConfiguration.getLanguage() == Language.C) ||
(mApdConfiguration.getLanguage() == Language.E) ||
(mApdConfiguration.getLanguage() == Language.M) ||
(mApdConfiguration.getLanguage() == Language.R) ||
(mApdConfiguration.getLanguage() == Language.S) ||
(mApdConfiguration.getLanguage() == Language.T) ||
(mApdConfiguration.getLanguage() == Language.Z))
return true;
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
private ApdError prnStoredFormat(Language language, int quantity)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
//FileOutputStream fileOutputStream = null;
FileWriter fileOutputStream = null;
StringBuffer pcTxBuff;
try
{
pcTxBuff = new StringBuffer();
//fileOutputStream = new FileOutputStream(PRINTER_DATA_FILE);
File dataFile = new File(DATA_FILE);
if (dataFile.exists())
dataFile.delete();
fileOutputStream = new FileWriter(DATA_FILE);
if (language == Language.R)
{
pcTxBuff.append(String.format("^P|%s|%d", mPrnDataList[0], quantity));
}
else if (language == Language.M)
{
pcTxBuff.append(String.format("{\r\nB,%s,N,%d |\r\n", mPrnDataList[0], quantity));
}
else if (language == Language.T)
{
String format = null;
if (mApdConfiguration.getId() == 0)
format = new String("\27X%c\01%c"); //TODO: This has to be re-checked
else
format = new String("X%c\01%c");
pcTxBuff.append(String.format(format, (char)(Integer.valueOf(mPrnDataList[0]).intValue()), (char)quantity));
}
else if (language == Language.E)
{
pcTxBuff.append(String.format("FR\"%s\"\r\n?\r\n", mPrnDataList[0]));
}
else if (language == Language.C)
{
pcTxBuff.append(String.format("! UF F%s.FMT\r\n%d\r\n", mPrnDataList[0], quantity));
}
else if (language == Language.S)
{
pcTxBuff.append(String.format("\27A\r\n\27TB%02d\r\n", Integer.valueOf(mPrnDataList[0]).intValue()));
}
else if (language == Language.Z)
{
pcTxBuff.append(String.format("^XA^XF%s^FS\r\n", mPrnDataList[0]));
}
if (pcTxBuff.length() > 0)
fileOutputStream.write(pcTxBuff.toString());
fileOutputStream.flush();
pcTxBuff.delete(0, pcTxBuff.length()); //Reset the string buffer
//Fill data fields
for (int i=1; i < mPrnDataCount; i++)
{
if (language == Language.R)
{
pcTxBuff.append(String.format("|%s", mPrnDataList[i]));
}
else if (language == Language.M)
{
pcTxBuff.append(String.format("%d,\"%s\" |\r\n", i, mPrnDataList[i]));
}
else if (language == Language.T)
{
pcTxBuff.append(String.format("%s\n", mPrnDataList[i]));
}
else if ( (language == Language.E) || (language == Language.C) )
{
pcTxBuff.append(String.format("%s\r\n", mPrnDataList[i]));
}
else if (language == Language.S)
{
pcTxBuff.append(String.format("\27D%s\r\n", mPrnDataList[i]));
}
else if (language == Language.Z)
{
pcTxBuff.append(String.format("^FN%d^FD%s^FS\r\n", i, mPrnDataList[i]));
}
}
if (pcTxBuff.length() > 0)
fileOutputStream.write(pcTxBuff.toString());
fileOutputStream.flush();
pcTxBuff.delete(0, pcTxBuff.length()); //Reset the string buffer
//Format trailer
if (language == Language.R)
{
pcTxBuff.append(String.format("|^"));
}
else if (language == Language.M)
{
pcTxBuff.append(String.format("}\r\n"));
}
else if (language == Language.T)
{
if (mApdConfiguration.getId() > 0)
{
fileOutputStream.write(mApdConfiguration.getId());
fileOutputStream.flush();
}
}
else if (language == Language.E)
{
pcTxBuff.append(String.format("P%u,1\r\n", quantity));
}
else if (language == Language.C)
{
}
else if (language == Language.S)
{
pcTxBuff.append(String.format("\27Q%04d\r\n\27Z\r\n", quantity));
}
else if (language == Language.Z)
{
pcTxBuff.append(String.format("^PQ%d^XZ\r\n", quantity));
}
if (pcTxBuff.length() > 0)
fileOutputStream.write(pcTxBuff.toString());
fileOutputStream.flush();
}
catch (FileNotFoundException e)
{
res = ApdError.ERR_NOFILE;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_filenotfound"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
catch (IOException e)
{
res = ApdError.ERR_INIT;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_unknownerror"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
finally
{
try
{
if (fileOutputStream != null)
fileOutputStream.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "End"));
return res;
}
private ApdError prnSendData(String fileName)
{
Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, "Start"));
ApdError res = ApdError.ERR_OK;
//mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_ok"));
byte buffer[] = null;
InputStream is = null;
File file = new File(fileName);
if (file.exists() == false)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_filenotfound"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
return ApdError.ERR_NOFILE;
}
try
{
is = new FileInputStream(file);
buffer = new byte[(int)file.length()];
is.read(buffer, 0, buffer.length);
if (mApdTransport.write(buffer).ordinal() > ApdTransportError.OK.ordinal())
{
res = ApdError.ERR_SEND;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_sendfailed"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
}
catch (FileNotFoundException e)
{
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_filenotfound"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
catch (NullPointerException e)
{
res = ApdError.ERR_SEND;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_sendfailed"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
e.printStackTrace();
}
catch (IOException e)
{
res = ApdError.ERR_SEND;
mLastMessage = Common.mainActivity.getString(RhoExtManager.getResourceId("string","apd_sendfailed"));
Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mLastMessage));
}
finally
{
if (is != null)
try
{
is.close();
}
catch (IOException e)
{
e.printStackTrace();
}
}
return res;
}
private class ApdCommand
{
/**
* @return the code
*/
public int getCode() {
return code;
}
/**
* @return the params
*/
public String getParams() {
return params;
}
private int code;
private String params;
/**
* @param code
* @param params
*/
public ApdCommand(int code, String params)
{
this.code = code;
this.params = params;
}
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 1999-2006 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.core.gui.components.form.flexible.impl.elements;
import java.util.List;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.form.flexible.elements.FormLink;
import org.olat.core.gui.components.form.flexible.impl.FormEvent;
import org.olat.core.gui.components.form.flexible.impl.FormItemImpl;
import org.olat.core.gui.components.link.FormLinkFactory;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.control.Event;
import org.olat.core.util.StringHelper;
/**
* Description:<br>
* Links / Buttons within the flexi form have to be form items. This implementation of the {@link FormLink} interface adapts the {@link Link} component from the GUI
* Framework to the flexi form substructure.
* <p>
* implementation note:<br>
* although a form link is created by <code>new ...</code> the corresponding {@link Link} creation is deferred until the root form is available. E.g. this is typically
* the case when the form link is added to a layout or form.
* <P>
* Initial Date: 10.12.2006 <br>
*
* @author patrickb
*/
public class FormLinkImpl extends FormItemImpl implements FormLink {
private Link component;
private int presentation = Link.LINK;
private String i18n = null;
private String cmd = null;
private boolean hasCustomEnabledCss = false;
private boolean hasCustomDisabledCss = false;
private String customEnabledLinkCSS = null;
private String customDisabledLinkCSS = null;
/**
* creates a form link with the given name which acts also as command, i18n and component name.
*
* @param name
*/
public FormLinkImpl(String name) {
super(name);
}
/**
* creates a form link with a given name and a i18n key. The name is used also as component name and as command string.
*
* @param name
* @param i18n
*/
public FormLinkImpl(String name, String i18n) {
this(name);
this.i18n = i18n;
}
/**
* creates a form link with a different representation as they are defined by {@link Link#BUTTON} {@link Link#BUTTON_SMALL} {@link Link#BUTTON_XSMALL}
* {@link Link#LINK_BACK} etc.
*
* @param name
* @param cmd
* @param i18n
* @param presentation
*/
public FormLinkImpl(String name, String cmd, String i18n, int presentation) {
this(name, i18n);
hasCustomEnabledCss = true;
this.cmd = cmd;
this.presentation = presentation;
}
/*
* uses the FormLinkFactory to create the link associated with this formlink it is deferred to have the translator, and most of all the form id where the link must
* dispatch to.
*/
@Override
protected void rootFormAvailable() {
// create component if we have the root form
String name = getName();
cmd = cmd == null ? name : cmd;
i18n = i18n == null ? name : i18n;
if (hasCustomEnabledCss || hasCustomDisabledCss) {
this.component = FormLinkFactory.createCustomFormLink(name, cmd, i18n, presentation, this.getRootForm());
if (customEnabledLinkCSS != null) {
this.component.setCustomEnabledLinkCSS(customEnabledLinkCSS);
}
if (customDisabledLinkCSS != null) {
this.component.setCustomDisabledLinkCSS(customDisabledLinkCSS);
}
} else {
this.component = FormLinkFactory.createFormLink(name, this.getRootForm());
// set link text
if ((presentation - Link.FLEXIBLEFORMLNK - Link.NONTRANSLATED) >= 0) {
// don't translate non-tranlated links
this.component.setCustomDisplayText(i18n);
} else {
// translate other links
if (StringHelper.containsNonWhitespace(i18n)) {
this.component.setCustomDisplayText(getTranslator().translate(i18n));
}
}
}
// if enabled or not must be set now in case it was set during construction time
this.component.setEnabled(isEnabled());
}
@Override
public void dispatchFormRequest(UserRequest ureq) {
getRootForm().fireFormEvent(ureq, new FormEvent(Event.DONE_EVENT, this, FormEvent.ONCLICK));
}
@Override
public void evalFormRequest(UserRequest ureq) {
// a link has no data to remember
}
@Override
public void validate(List validationResults) {
// typically a link does not validate its data
}
@Override
public void reset() {
// a link can not be resetted
}
@Override
protected Component getFormItemComponent() {
return component;
}
/**
* @see org.olat.core.gui.components.form.flexible.elements.FormLink#setCustomEnabledLinkCSS(java.lang.String)
*/
@Override
public void setCustomEnabledLinkCSS(String customEnabledLinkCSS) {
hasCustomEnabledCss = true;
this.customEnabledLinkCSS = customEnabledLinkCSS;
if (customEnabledLinkCSS != null && this.component != null) {
this.component.setCustomEnabledLinkCSS(customEnabledLinkCSS);
}
}
/**
* @see org.olat.core.gui.components.form.flexible.elements.FormLink#setCustomDisabledLinkCSS(java.lang.String)
*/
@Override
public void setCustomDisabledLinkCSS(String customDisabledLinkCSS) {
hasCustomDisabledCss = true;
this.customDisabledLinkCSS = customDisabledLinkCSS;
if (customDisabledLinkCSS != null && this.component != null) {
this.component.setCustomDisabledLinkCSS(customDisabledLinkCSS);
}
}
/**
* @see org.olat.core.gui.components.form.flexible.elements.FormLink#setI18nKey(java.lang.String)
*/
@Override
public void setI18nKey(String i18n) {
this.i18n = i18n;
if (this.component != null) {
if ((presentation - Link.FLEXIBLEFORMLNK - Link.NONTRANSLATED) >= 0) {
// don't translate non-tranlated links
this.component.setCustomDisplayText(i18n);
} else {
// translate other links
if (StringHelper.containsNonWhitespace(i18n)) {
this.component.setCustomDisplayText(getTranslator().translate(i18n));
}
}
}
}
@Override
protected boolean translateLabel() {
if (presentation == Link.NONTRANSLATED || (presentation == (Link.NONTRANSLATED + Link.FLEXIBLEFORMLNK))) {
return false;
} else {
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.physical;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptRuleOperand;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.physical.base.GroupScan;
import org.apache.drill.exec.physical.base.ScanStats;
import org.apache.drill.exec.planner.logical.DrillAggregateRel;
import org.apache.drill.exec.planner.logical.DrillProjectRel;
import org.apache.drill.exec.planner.logical.DrillScanRel;
import org.apache.drill.exec.planner.logical.RelOptHelper;
import org.apache.drill.exec.store.ColumnExplorer;
import org.apache.drill.exec.store.direct.MetadataDirectGroupScan;
import org.apache.drill.exec.store.pojo.DynamicPojoRecordReader;
/**
* <p>
* This rule will convert <b>" select count(*) as mycount from table "</b>
* or <b>" select count(not-nullable-expr) as mycount from table "</b> into
* <pre>
* Project(mycount)
* \
* DirectGroupScan ( PojoRecordReader ( rowCount ))
*</pre>
* or <b>" select count(column) as mycount from table "</b> into
* <pre>
* Project(mycount)
* \
* DirectGroupScan (PojoRecordReader (columnValueCount))
*</pre>
* Rule can be applied if query contains multiple count expressions.
* <b>" select count(column1), count(column2), count(*) from table "</b>
* </p>
*
* <p>
* Currently, only parquet group scan has the exact row count and column value count,
* obtained from parquet row group info. This will save the cost to
* scan the whole parquet files.
* </p>
*/
public class ConvertCountToDirectScan extends Prule {
public static final RelOptRule AGG_ON_PROJ_ON_SCAN = new ConvertCountToDirectScan(
RelOptHelper.some(DrillAggregateRel.class,
RelOptHelper.some(DrillProjectRel.class,
RelOptHelper.any(DrillScanRel.class))), "Agg_on_proj_on_scan");
public static final RelOptRule AGG_ON_SCAN = new ConvertCountToDirectScan(
RelOptHelper.some(DrillAggregateRel.class,
RelOptHelper.any(DrillScanRel.class)), "Agg_on_scan");
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ConvertCountToDirectScan.class);
/** Creates a SplunkPushDownRule. */
protected ConvertCountToDirectScan(RelOptRuleOperand rule, String id) {
super(rule, "ConvertCountToDirectScan:" + id);
}
@Override
public void onMatch(RelOptRuleCall call) {
final DrillAggregateRel agg = (DrillAggregateRel) call.rel(0);
final DrillScanRel scan = (DrillScanRel) call.rel(call.rels.length - 1);
final DrillProjectRel project = call.rels.length == 3 ? (DrillProjectRel) call.rel(1) : null;
final GroupScan oldGrpScan = scan.getGroupScan();
final PlannerSettings settings = PrelUtil.getPlannerSettings(call.getPlanner());
// Only apply the rule when:
// 1) scan knows the exact row count in getSize() call,
// 2) No GroupBY key,
// 3) No distinct agg call.
if (!(oldGrpScan.getScanStats(settings).getGroupScanProperty().hasExactRowCount()
&& agg.getGroupCount() == 0
&& !agg.containsDistinctCall())) {
return;
}
Map<String, Long> result = collectCounts(settings, agg, scan, project);
logger.trace("Calculated the following aggregate counts: ", result);
// if could not determine the counts, rule won't be applied
if (result.isEmpty()) {
return;
}
final RelDataType scanRowType = constructDataType(agg, result.keySet());
final DynamicPojoRecordReader<Long> reader = new DynamicPojoRecordReader<>(
buildSchema(scanRowType.getFieldNames()),
Collections.singletonList((List<Long>) new ArrayList<>(result.values())));
final ScanStats scanStats = new ScanStats(ScanStats.GroupScanProperty.EXACT_ROW_COUNT, 1, 1, scanRowType.getFieldCount());
final GroupScan directScan = new MetadataDirectGroupScan(reader, oldGrpScan.getFiles(), scanStats);
final ScanPrel newScan = ScanPrel.create(scan,
scan.getTraitSet().plus(Prel.DRILL_PHYSICAL).plus(DrillDistributionTrait.SINGLETON), directScan,
scanRowType);
final ProjectPrel newProject = new ProjectPrel(agg.getCluster(), agg.getTraitSet().plus(Prel.DRILL_PHYSICAL)
.plus(DrillDistributionTrait.SINGLETON), newScan, prepareFieldExpressions(scanRowType), agg.getRowType());
call.transformTo(newProject);
}
/**
* Collects counts for each aggregation call.
* Will return empty result map if was not able to determine count for at least one aggregation call,
*
* For each aggregate call will determine if count can be calculated. Collects counts only for COUNT function.
* For star, not null expressions and implicit columns sets count to total record number.
* For other cases obtains counts from group scan operator. Also count can not be calculated for parition columns.
*
* @param agg aggregate relational expression
* @param scan scan relational expression
* @param project project relational expression
* @return result map where key is count column name, value is count value
*/
private Map<String, Long> collectCounts(PlannerSettings settings, DrillAggregateRel agg, DrillScanRel scan, DrillProjectRel project) {
final Set<String> implicitColumnsNames = ColumnExplorer.initImplicitFileColumns(settings.getOptions()).keySet();
final GroupScan oldGrpScan = scan.getGroupScan();
final long totalRecordCount = (long)oldGrpScan.getScanStats(settings).getRecordCount();
final LinkedHashMap<String, Long> result = new LinkedHashMap<>();
for (int i = 0; i < agg.getAggCallList().size(); i++) {
AggregateCall aggCall = agg.getAggCallList().get(i);
//for (AggregateCall aggCall : agg.getAggCallList()) {
long cnt;
// rule can be applied only for count function, return empty counts
if (!"count".equalsIgnoreCase(aggCall.getAggregation().getName()) ) {
return ImmutableMap.of();
}
if (containsStarOrNotNullInput(aggCall, agg)) {
cnt = totalRecordCount;
} else if (aggCall.getArgList().size() == 1) {
// count(columnName) ==> Agg ( Scan )) ==> columnValueCount
int index = aggCall.getArgList().get(0);
if (project != null) {
// project in the middle of Agg and Scan : Only when input of AggCall is a RexInputRef in Project, we find the index of Scan's field.
// For instance,
// Agg - count($0)
// \
// Proj - Exp={$1}
// \
// Scan (col1, col2).
// return count of "col2" in Scan's metadata, if found.
if (!(project.getProjects().get(index) instanceof RexInputRef)) {
return ImmutableMap.of(); // do not apply for all other cases.
}
index = ((RexInputRef) project.getProjects().get(index)).getIndex();
}
String columnName = scan.getRowType().getFieldNames().get(index).toLowerCase();
// for implicit column count will the same as total record count
if (implicitColumnsNames.contains(columnName)) {
cnt = totalRecordCount;
} else {
SchemaPath simplePath = SchemaPath.getSimplePath(columnName);
if (ColumnExplorer.isPartitionColumn(settings.getOptions(), simplePath)) {
return ImmutableMap.of();
}
cnt = oldGrpScan.getColumnValueCount(simplePath);
if (cnt == GroupScan.NO_COLUMN_STATS) {
// if column stats is not available don't apply this rule, return empty counts
return ImmutableMap.of();
}
}
} else {
return ImmutableMap.of();
}
String name = "count" + i + "$" + (aggCall.getName() == null ? aggCall.toString() : aggCall.getName());
result.put(name, cnt);
}
return ImmutableMap.copyOf(result);
}
/**
* Checks if aggregate call contains star or non-null expression:
* <pre>
* count(*) == > empty arg ==> rowCount
* count(Not-null-input) ==> rowCount
* </pre>
*
* @param aggregateCall aggregate call
* @param aggregate aggregate relation expression
* @return true of aggregate call contains star or non-null expression
*/
private boolean containsStarOrNotNullInput(AggregateCall aggregateCall, DrillAggregateRel aggregate) {
return aggregateCall.getArgList().isEmpty() ||
(aggregateCall.getArgList().size() == 1 &&
!aggregate.getInput().getRowType().getFieldList().get(aggregateCall.getArgList().get(0)).getType().isNullable());
}
/**
* For each aggregate call creates field based on its name with bigint type.
* Constructs record type for created fields.
*
* @param aggregateRel aggregate relation expression
* @param fieldNames field names
* @return record type
*/
private RelDataType constructDataType(DrillAggregateRel aggregateRel, Collection<String> fieldNames) {
List<RelDataTypeField> fields = new ArrayList<>();
Iterator<String> filedNamesIterator = fieldNames.iterator();
int fieldIndex = 0;
while (filedNamesIterator.hasNext()) {
RelDataTypeField field = new RelDataTypeFieldImpl(
filedNamesIterator.next(),
fieldIndex++,
aggregateRel.getCluster().getTypeFactory().createSqlType(SqlTypeName.BIGINT));
fields.add(field);
}
return new RelRecordType(fields);
}
/**
* Builds schema based on given field names.
* Type for each schema is set to long.class.
*
* @param fieldNames field names
* @return schema
*/
private LinkedHashMap<String, Class<?>> buildSchema(List<String> fieldNames) {
LinkedHashMap<String, Class<?>> schema = new LinkedHashMap<>();
for (String fieldName: fieldNames) {
schema.put(fieldName, long.class);
}
return schema;
}
/**
* For each field creates row expression.
*
* @param rowType row type
* @return list of row expressions
*/
private List<RexNode> prepareFieldExpressions(RelDataType rowType) {
List<RexNode> expressions = new ArrayList<>();
for (int i = 0; i < rowType.getFieldCount(); i++) {
expressions.add(RexInputRef.of(i, rowType));
}
return expressions;
}
}
| |
/*******************************************************************************
* Copyright 2013 Open mHealth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.openmhealth.reference.domain;
import java.util.regex.Pattern;
import name.jenkins.paul.john.concordia.Concordia;
import name.jenkins.paul.john.concordia.exception.ConcordiaException;
import name.jenkins.paul.john.concordia.validator.ValidationController;
import org.openmhealth.reference.exception.OmhException;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
/**
* <p>
* A schema as defined by the Open mHealth specification.
* </p>
*
* <p>
* This class is immutable.
* </p>
*
* @author John Jenkins
*/
public class Schema implements OmhObject {
/**
* The version of this class for serialization purposes.
*/
private static final long serialVersionUID = 1L;
/**
* The pattern to use for validating schema IDs.
*/
private static final Pattern PATTERN_ID =
Pattern.compile("omh(:[a-zA-Z0-9_]+)+");
/**
* The JSON key for the ID of a schema.
*/
public static final String JSON_KEY_ID = "schema_id";
/**
* The JSON key for the version of the schema.
*/
public static final String JSON_KEY_VERSION = "schema_version";
/**
* The JSON key for the Concordia schema.
*/
public static final String JSON_KEY_SCHEMA = "schema";
/**
* The JSON key for the {@link ValidationController} that will be used to
* build the underlying {@link Concordia} object. If not set or not given,
* the default one, {@link ValidationController#BASIC_CONTROLLER} will be
* used.
*/
public static final String JSON_KEY_VALIDATION_CONTROLLER =
"validation_controller";
/**
* The schema's ID.
*/
@JsonProperty(JSON_KEY_ID)
private final String id;
/**
* The schema's version.
*/
@JsonProperty(JSON_KEY_VERSION)
private final long version;
/**
* The actual schema for this {@link Schema} object.
*/
@JsonProperty(JSON_KEY_SCHEMA)
private final Concordia schema;
/**
* Creates a new schema (registry entry).
*
* @param id
* The ID for this schema.
*
* @param version
* The version of this schema.
*
* @param schema
* The Concordia schema that defines this Schema object.
*
* @throws OmhException
* A parameter was invalid.
*/
@JsonCreator
public Schema(
@JsonProperty(JSON_KEY_ID) final String id,
@JsonProperty(JSON_KEY_VERSION) final long version,
@JsonProperty(JSON_KEY_SCHEMA) final Concordia schema)
throws OmhException {
// Validate the ID.
if(id == null) {
throw new OmhException("The ID is null.");
}
else if(id.trim().length() == 0) {
throw new OmhException("The ID is empty.");
}
else {
this.id = validateId(id);
}
// Validate the version.
this.version = validateVersion(version);
// Make sure the schema is not null.
if(schema == null) {
throw new OmhException("The schema is null.");
}
else {
this.schema = schema;
}
}
/**
* Returns the unique identifier for this schema.
*
* @return The unique identifier for this schema.
*/
public String getId() {
return id;
}
/**
* Returns the version of this schema.
*
* @return The version of this schema.
*/
public long getVersion() {
return version;
}
/**
* Returns the schema.
*
* @return The schema.
*/
public Concordia getSchema() {
return schema;
}
/**
* Validates some data.
*
* @param owner
* The owner of the data that is being validated. This is needed to
* build the {@link Data} object.
*
* @param metaData
* The meta-data for the data that is being validated. This is
* needed to build the {@link Data} object.
*
* @param data
* The data to be validated.
*
* @return The validated data as a {@link Data} object.
*
* @throws OmhException
* The data was null or invalid.
*/
public Data validateData(
final String owner,
final MetaData metaData,
final JsonNode data)
throws OmhException {
// Ensure the data is not null.
if(data == null) {
throw new OmhException("The data field is null.");
}
// Validate the data.
try {
schema.validateData(data);
}
catch(ConcordiaException e) {
throw new OmhException("The data is invalid.", e);
}
// Return the result.
return new Data(owner, id, version, metaData, data);
}
/**
* Validates that the ID follows our rules.
*
* @param id
* The ID to be validated.
*
* @return The validated and, potentially, simplified schema, e.g. trimmed.
*
* @throws OmhException
* The ID is invalid.
*/
public static String validateId(final String id) throws OmhException {
// Validate that the ID is not null.
if(id == null) {
throw new OmhException("The ID is null.");
}
// Remove surrounding whitespace.
String idTrimmed = id.trim();
// Validate that the ID is not empty or only whitespace.
if(idTrimmed.length() == 0) {
throw new OmhException("The ID is empty or only whitespace.");
}
// Validate that the trimmed ID matches the pattern.
if(! PATTERN_ID.matcher(idTrimmed).matches()) {
throw
new OmhException(
"The schema ID is invalid. It must be colon " +
"deliminated, alphanumeric sections, with or " +
"without underscores, where the first section is " +
"\"omh\": " +
idTrimmed);
}
// Return the trimmed ID.
return idTrimmed;
}
/**
* Validates that the version follows our rules.
*
* @param version
* The version to be validated.
*
* @return The version as it was given.
*
* @throws OmhException
* The version is invalid.
*/
public static long validateVersion(
final long version)
throws OmhException {
// The version must be positive.
if(version <= 0) {
throw new OmhException("The version must be positive.");
}
return version;
}
/**
* Validates that the chunk size follows our rules.
*
* @param chunkSize
* The chunk size to validate.
*
* @return The chunk size as it was given.
*
* @throws OmhException
* The chunk size is invalid.
*/
public static long validateChunkSize(
final long chunkSize)
throws OmhException {
// The chunk size must be positive.
if(chunkSize <= 0) {
throw new OmhException("The chunk size must be positive.");
}
return chunkSize;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto
package com.google.bigtable.admin.table.v1;
/**
* Protobuf type {@code google.bigtable.admin.table.v1.ListTablesRequest}
*/
public final class ListTablesRequest extends
com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:google.bigtable.admin.table.v1.ListTablesRequest)
ListTablesRequestOrBuilder {
// Use ListTablesRequest.newBuilder() to construct.
private ListTablesRequest(com.google.protobuf.GeneratedMessage.Builder builder) {
super(builder);
}
private ListTablesRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private ListTablesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
com.google.protobuf.ByteString bs = input.readBytes();
name_ = bs;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw new RuntimeException(e.setUnfinishedMessage(this));
} catch (java.io.IOException e) {
throw new RuntimeException(
new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this));
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.admin.table.v1.ListTablesRequest.class, com.google.bigtable.admin.table.v1.ListTablesRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
output.writeBytes(1, getNameBytes());
}
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.admin.table.v1.ListTablesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code google.bigtable.admin.table.v1.ListTablesRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.bigtable.admin.table.v1.ListTablesRequest)
com.google.bigtable.admin.table.v1.ListTablesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.admin.table.v1.ListTablesRequest.class, com.google.bigtable.admin.table.v1.ListTablesRequest.Builder.class);
}
// Construct using com.google.bigtable.admin.table.v1.ListTablesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.bigtable.admin.table.v1.BigtableTableServiceMessagesProto.internal_static_google_bigtable_admin_table_v1_ListTablesRequest_descriptor;
}
public com.google.bigtable.admin.table.v1.ListTablesRequest getDefaultInstanceForType() {
return com.google.bigtable.admin.table.v1.ListTablesRequest.getDefaultInstance();
}
public com.google.bigtable.admin.table.v1.ListTablesRequest build() {
com.google.bigtable.admin.table.v1.ListTablesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.bigtable.admin.table.v1.ListTablesRequest buildPartial() {
com.google.bigtable.admin.table.v1.ListTablesRequest result = new com.google.bigtable.admin.table.v1.ListTablesRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.admin.table.v1.ListTablesRequest) {
return mergeFrom((com.google.bigtable.admin.table.v1.ListTablesRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.admin.table.v1.ListTablesRequest other) {
if (other == com.google.bigtable.admin.table.v1.ListTablesRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.admin.table.v1.ListTablesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.bigtable.admin.table.v1.ListTablesRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>optional string name = 1;</code>
*
* <pre>
* The unique name of the cluster for which tables should be listed.
* </pre>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.bigtable.admin.table.v1.ListTablesRequest)
}
// @@protoc_insertion_point(class_scope:google.bigtable.admin.table.v1.ListTablesRequest)
private static final com.google.bigtable.admin.table.v1.ListTablesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.admin.table.v1.ListTablesRequest();
}
public static com.google.bigtable.admin.table.v1.ListTablesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
public static final com.google.protobuf.Parser<ListTablesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListTablesRequest>() {
public ListTablesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
try {
return new ListTablesRequest(input, extensionRegistry);
} catch (RuntimeException e) {
if (e.getCause() instanceof
com.google.protobuf.InvalidProtocolBufferException) {
throw (com.google.protobuf.InvalidProtocolBufferException)
e.getCause();
}
throw e;
}
}
};
public static com.google.protobuf.Parser<ListTablesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTablesRequest> getParserForType() {
return PARSER;
}
public com.google.bigtable.admin.table.v1.ListTablesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataflow.sdk.transforms;
import com.google.cloud.dataflow.sdk.coders.AtomicCoder;
import com.google.cloud.dataflow.sdk.coders.BigEndianLongCoder;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.CoderException;
import com.google.cloud.dataflow.sdk.coders.CoderRegistry;
import com.google.cloud.dataflow.sdk.coders.DoubleCoder;
import com.google.cloud.dataflow.sdk.transforms.Combine.AccumulatingCombineFn.Accumulator;
import com.google.common.base.MoreObjects;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Objects;
/**
* {@code PTransform}s for computing the arithmetic mean
* (a.k.a. average) of the elements in a {@code PCollection}, or the
* mean of the values associated with each key in a
* {@code PCollection} of {@code KV}s.
*
* <p>Example 1: get the mean of a {@code PCollection} of {@code Long}s.
* <pre> {@code
* PCollection<Long> input = ...;
* PCollection<Double> mean = input.apply(Mean.<Long>globally());
* } </pre>
*
* <p>Example 2: calculate the mean of the {@code Integer}s
* associated with each unique key (which is of type {@code String}).
* <pre> {@code
* PCollection<KV<String, Integer>> input = ...;
* PCollection<KV<String, Double>> meanPerKey =
* input.apply(Mean.<String, Integer>perKey());
* } </pre>
*/
public class Mean {
private Mean() { } // Namespace only
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<NumT>} and returns a
* {@code PCollection<Double>} whose contents is the mean of the
* input {@code PCollection}'s elements, or
* {@code 0} if there are no elements.
*
* @param <NumT> the type of the {@code Number}s being combined
*/
public static <NumT extends Number> Combine.Globally<NumT, Double> globally() {
return Combine.<NumT, Double>globally(new MeanFn<>()).named("Mean.Globally");
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<KV<K, N>>} and returns a
* {@code PCollection<KV<K, Double>>} that contains an output
* element mapping each distinct key in the input
* {@code PCollection} to the mean of the values associated with
* that key in the input {@code PCollection}.
*
* <p>See {@link Combine.PerKey} for how this affects timestamps and bucketing.
*
* @param <K> the type of the keys
* @param <NumT> the type of the {@code Number}s being combined
*/
public static <K, NumT extends Number> Combine.PerKey<K, NumT, Double> perKey() {
return Combine.<K, NumT, Double>perKey(new MeanFn<>()).named("Mean.PerKey");
}
/////////////////////////////////////////////////////////////////////////////
/**
* A {@code Combine.CombineFn} that computes the arithmetic mean
* (a.k.a. average) of an {@code Iterable} of numbers of type
* {@code N}, useful as an argument to {@link Combine#globally} or
* {@link Combine#perKey}.
*
* <p>Returns {@code Double.NaN} if combining zero elements.
*
* @param <NumT> the type of the {@code Number}s being combined
*/
static class MeanFn<NumT extends Number>
extends Combine.AccumulatingCombineFn<NumT, CountSum<NumT>, Double> {
/**
* Constructs a combining function that computes the mean over
* a collection of values of type {@code N}.
*/
public MeanFn() {}
@Override
public CountSum<NumT> createAccumulator() {
return new CountSum<>();
}
@Override
public Coder<CountSum<NumT>> getAccumulatorCoder(
CoderRegistry registry, Coder<NumT> inputCoder) {
return new CountSumCoder<>();
}
}
/**
* Accumulator class for {@link MeanFn}.
*/
static class CountSum<NumT extends Number>
implements Accumulator<NumT, CountSum<NumT>, Double> {
long count = 0;
double sum = 0.0;
public CountSum() {
this(0, 0);
}
public CountSum(long count, double sum) {
this.count = count;
this.sum = sum;
}
@Override
public void addInput(NumT element) {
count++;
sum += element.doubleValue();
}
@Override
public void mergeAccumulator(CountSum<NumT> accumulator) {
count += accumulator.count;
sum += accumulator.sum;
}
@Override
public Double extractOutput() {
return count == 0 ? Double.NaN : sum / count;
}
@Override
public boolean equals(Object other) {
if (!(other instanceof CountSum)) {
return false;
}
@SuppressWarnings("unchecked")
CountSum<?> otherCountSum = (CountSum<?>) other;
return (count == otherCountSum.count)
&& (sum == otherCountSum.sum);
}
@Override
public int hashCode() {
return Objects.hash(count, sum);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("count", count)
.add("sum", sum)
.toString();
}
}
static class CountSumCoder<NumT extends Number>
extends AtomicCoder<CountSum<NumT>> {
private static final Coder<Long> LONG_CODER = BigEndianLongCoder.of();
private static final Coder<Double> DOUBLE_CODER = DoubleCoder.of();
@Override
public void encode(CountSum<NumT> value, OutputStream outStream, Coder.Context context)
throws CoderException, IOException {
Coder.Context nestedContext = context.nested();
LONG_CODER.encode(value.count, outStream, nestedContext);
DOUBLE_CODER.encode(value.sum, outStream, nestedContext);
}
@Override
public CountSum<NumT> decode(InputStream inStream, Coder.Context context)
throws CoderException, IOException {
Coder.Context nestedContext = context.nested();
return new CountSum<>(
LONG_CODER.decode(inStream, nestedContext),
DOUBLE_CODER.decode(inStream, nestedContext));
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2beta2/cloudtasks.proto
package com.google.cloud.tasks.v2beta2;
/**
*
*
* <pre>
* Request message for forcing a task to run now using
* [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.RunTaskRequest}
*/
public final class RunTaskRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta2.RunTaskRequest)
RunTaskRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RunTaskRequest.newBuilder() to construct.
private RunTaskRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RunTaskRequest() {
name_ = "";
responseView_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RunTaskRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private RunTaskRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 16:
{
int rawValue = input.readEnum();
responseView_ = rawValue;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_RunTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_RunTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.RunTaskRequest.class,
com.google.cloud.tasks.v2beta2.RunTaskRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESPONSE_VIEW_FIELD_NUMBER = 2;
private int responseView_;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta2.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta2.Task.View result =
com.google.cloud.tasks.v2beta2.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta2.Task.View.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (responseView_ != com.google.cloud.tasks.v2beta2.Task.View.VIEW_UNSPECIFIED.getNumber()) {
output.writeEnum(2, responseView_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (responseView_ != com.google.cloud.tasks.v2beta2.Task.View.VIEW_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, responseView_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2beta2.RunTaskRequest)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2beta2.RunTaskRequest other =
(com.google.cloud.tasks.v2beta2.RunTaskRequest) obj;
if (!getName().equals(other.getName())) return false;
if (responseView_ != other.responseView_) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + RESPONSE_VIEW_FIELD_NUMBER;
hash = (53 * hash) + responseView_;
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2beta2.RunTaskRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for forcing a task to run now using
* [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.RunTaskRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta2.RunTaskRequest)
com.google.cloud.tasks.v2beta2.RunTaskRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_RunTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_RunTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.RunTaskRequest.class,
com.google.cloud.tasks.v2beta2.RunTaskRequest.Builder.class);
}
// Construct using com.google.cloud.tasks.v2beta2.RunTaskRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
responseView_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2beta2.CloudTasksProto
.internal_static_google_cloud_tasks_v2beta2_RunTaskRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RunTaskRequest getDefaultInstanceForType() {
return com.google.cloud.tasks.v2beta2.RunTaskRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RunTaskRequest build() {
com.google.cloud.tasks.v2beta2.RunTaskRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RunTaskRequest buildPartial() {
com.google.cloud.tasks.v2beta2.RunTaskRequest result =
new com.google.cloud.tasks.v2beta2.RunTaskRequest(this);
result.name_ = name_;
result.responseView_ = responseView_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2beta2.RunTaskRequest) {
return mergeFrom((com.google.cloud.tasks.v2beta2.RunTaskRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2beta2.RunTaskRequest other) {
if (other == com.google.cloud.tasks.v2beta2.RunTaskRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.responseView_ != 0) {
setResponseViewValue(other.getResponseViewValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.tasks.v2beta2.RunTaskRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.tasks.v2beta2.RunTaskRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The task name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private int responseView_ = 0;
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @return The enum numeric value on the wire for responseView.
*/
@java.lang.Override
public int getResponseViewValue() {
return responseView_;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @param value The enum numeric value on the wire for responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseViewValue(int value) {
responseView_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @return The responseView.
*/
@java.lang.Override
public com.google.cloud.tasks.v2beta2.Task.View getResponseView() {
@SuppressWarnings("deprecation")
com.google.cloud.tasks.v2beta2.Task.View result =
com.google.cloud.tasks.v2beta2.Task.View.valueOf(responseView_);
return result == null ? com.google.cloud.tasks.v2beta2.Task.View.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @param value The responseView to set.
* @return This builder for chaining.
*/
public Builder setResponseView(com.google.cloud.tasks.v2beta2.Task.View value) {
if (value == null) {
throw new NullPointerException();
}
responseView_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta2.Task] will be
* returned.
* By default response_view is [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
* information is retrieved by default because some data, such as
* payloads, might be desirable to return only when needed because
* of its large size or because of the sensitivity of data that it
* contains.
* Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
* `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/)
* permission on the [Task][google.cloud.tasks.v2beta2.Task] resource.
* </pre>
*
* <code>.google.cloud.tasks.v2beta2.Task.View response_view = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearResponseView() {
responseView_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta2.RunTaskRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RunTaskRequest)
private static final com.google.cloud.tasks.v2beta2.RunTaskRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta2.RunTaskRequest();
}
public static com.google.cloud.tasks.v2beta2.RunTaskRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RunTaskRequest> PARSER =
new com.google.protobuf.AbstractParser<RunTaskRequest>() {
@java.lang.Override
public RunTaskRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RunTaskRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RunTaskRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RunTaskRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RunTaskRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2002-2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.springframework.util.StringUtils;
/**
* Default implementation of the PropertyValues interface.
* Allows simple manipulation of properties, and provides constructors
* to support deep copy and construction from a Map.
* @author Rod Johnson
* @since 13 May 2001
*/
public class MutablePropertyValues implements PropertyValues, Serializable {
/** List of PropertyValue objects */
private List propertyValueList = new ArrayList();
/**
* Cached PropertyValues for quicker access.
* Updated on writes.
*/
private PropertyValue[] propertyValueArray = new PropertyValue[0];
/**
* Creates a new empty MutablePropertyValues object.
* Property values can be added with the addPropertyValue methods.
* @see #addPropertyValue(PropertyValue)
* @see #addPropertyValue(String, Object)
*/
public MutablePropertyValues() {
}
/**
* Deep copy constructor. Guarantees PropertyValue references
* are independent, although it can't deep copy objects currently
* referenced by individual PropertyValue objects.
* @param source the PropertyValues to copy
* @see #addPropertyValues(PropertyValues)
*/
public MutablePropertyValues(PropertyValues source) {
// We can optimize this because it's all new:
// there is no replacement of existing property values
if (source != null) {
PropertyValue[] pvs = source.getPropertyValues();
this.propertyValueArray = new PropertyValue[pvs.length];
for (int i = 0; i < pvs.length; i++) {
PropertyValue newPv = new PropertyValue(pvs[i].getName(), pvs[i].getValue());
propertyValueArray[i] = newPv;
propertyValueList.add(newPv);
}
}
}
/**
* Construct a new PropertyValues object from a Map.
* @param source Map with property values keyed by property name,
* which must be a String
* @see #addPropertyValues(Map)
*/
public MutablePropertyValues(Map source) {
addPropertyValues(source);
recache();
}
/**
* Rebuild the cached array
*/
private void recache() {
this.propertyValueArray = (PropertyValue[]) this.propertyValueList.toArray(new PropertyValue[propertyValueList.size()]);
}
/**
* Copy all given PropertyValues into this object. Guarantees PropertyValue
* references are independent, although it can't deep copy objects currently
* referenced by individual PropertyValue objects.
* @param source the PropertyValues to copy
* @return this object to allow creating objects, adding multiple PropertyValues
* in a single statement
*/
public MutablePropertyValues addPropertyValues(PropertyValues source) {
if (source != null) {
PropertyValue[] pvs = source.getPropertyValues();
for (int i = 0; i < pvs.length; i++) {
addPropertyValue(new PropertyValue(pvs[i].getName(), pvs[i].getValue()));
}
recache();
}
return this;
}
/**
* Add all property values from the given Map.
* @param source Map with property values keyed by property name,
* which must be a String
* @return this object to allow creating objects, adding multiple PropertyValues
* in a single statement
*/
public MutablePropertyValues addPropertyValues(Map source) {
if (source != null) {
Iterator it = source.keySet().iterator();
while (it.hasNext()) {
String key = (String) it.next();
addPropertyValue(new PropertyValue(key, source.get(key)));
}
recache();
}
return this;
}
/**
* Add a PropertyValue object, replacing any existing one
* for the respective property.
* @param pv PropertyValue object to add
* @return this object to allow creating objects, adding multiple PropertyValues
* in a single statement
*/
public MutablePropertyValues addPropertyValue(PropertyValue pv) {
for (int i = 0; i < this.propertyValueList.size(); i++) {
PropertyValue currentPv = (PropertyValue) this.propertyValueList.get(i);
if (currentPv.getName().equals(pv.getName())) {
setPropertyValueAt(pv, i);
return this;
}
}
this.propertyValueList.add(pv);
recache();
return this;
}
/**
* Overloaded version of addPropertyValue that takes
* a property name and a property value.
* @param propertyName name of the property
* @param propertyValue value of the property
* @see #addPropertyValue(PropertyValue)
*/
public void addPropertyValue(String propertyName, Object propertyValue) {
addPropertyValue(new PropertyValue(propertyName, propertyValue));
}
/**
* Remove the given PropertyValue, if contained.
* @param pv the PropertyValue to remove
*/
public void removePropertyValue(PropertyValue pv) {
this.propertyValueList.remove(pv);
recache();
}
/**
* Overloaded version of removePropertyValue that takes
* a property name.
* @param propertyName name of the property
* @see #removePropertyValue(PropertyValue)
*/
public void removePropertyValue(String propertyName) {
removePropertyValue(getPropertyValue(propertyName));
}
/**
* Modify a PropertyValue object held in this object.
* Indexed from 0.
*/
public void setPropertyValueAt(PropertyValue pv, int i) {
this.propertyValueList.set(i, pv);
this.propertyValueArray[i] = pv;
}
public PropertyValue[] getPropertyValues() {
return propertyValueArray;
}
public PropertyValue getPropertyValue(String propertyName) {
for (int i = 0; i < this.propertyValueArray.length; i++) {
if (propertyValueArray[i].getName().equals(propertyName)) {
return propertyValueArray[i];
}
}
return null;
}
public boolean contains(String propertyName) {
return getPropertyValue(propertyName) != null;
}
public PropertyValues changesSince(PropertyValues old) {
MutablePropertyValues changes = new MutablePropertyValues();
if (old == this)
return changes;
// for each property value in the new set
for (int i = 0; i < this.propertyValueArray.length; i++) {
PropertyValue newPv = propertyValueArray[i];
// if there wasn't an old one, add it
PropertyValue pvOld = old.getPropertyValue(newPv.getName());
if (pvOld == null) {
changes.addPropertyValue(newPv);
}
else if (!pvOld.equals(newPv)) {
// it's changed
changes.addPropertyValue(newPv);
}
}
return changes;
}
public String toString() {
PropertyValue[] pvs = getPropertyValues();
StringBuffer sb = new StringBuffer("MutablePropertyValues: length=" + pvs.length + "; ");
sb.append(StringUtils.arrayToDelimitedString(pvs, ","));
return sb.toString();
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.provider;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.SearchRecentSuggestionsProvider;
import android.net.Uri;
import android.text.TextUtils;
import android.util.Log;
import java.util.concurrent.Semaphore;
/**
* This is a utility class providing access to
* {@link android.content.SearchRecentSuggestionsProvider}.
*
* <p>Unlike some utility classes, this one must be instantiated and properly initialized, so that
* it can be configured to operate with the search suggestions provider that you have created.
*
* <p>Typically, you will do this in your searchable activity, each time you receive an incoming
* {@link android.content.Intent#ACTION_SEARCH ACTION_SEARCH} Intent. The code to record each
* incoming query is as follows:
* <pre class="prettyprint">
* SearchSuggestions suggestions = new SearchSuggestions(this,
* MySuggestionsProvider.AUTHORITY, MySuggestionsProvider.MODE);
* suggestions.saveRecentQuery(queryString, null);
* </pre>
*
* <p>For a working example, see SearchSuggestionSampleProvider and SearchQueryResults in
* samples/ApiDemos/app.
*
* <div class="special reference">
* <h3>Developer Guides</h3>
* <p>For information about using search suggestions in your application, read the
* <a href="{@docRoot}guide/topics/search/adding-recent-query-suggestions.html">Adding Recent Query
* Suggestions</a> developer guide.</p>
* </div>
*/
public class SearchRecentSuggestions {
// debugging support
private static final String LOG_TAG = "SearchSuggestions";
// This is a superset of all possible column names (need not all be in table)
private static class SuggestionColumns implements BaseColumns {
public static final String DISPLAY1 = "display1";
public static final String DISPLAY2 = "display2";
public static final String QUERY = "query";
public static final String DATE = "date";
}
/* if you change column order you must also change indices below */
/**
* This is the database projection that can be used to view saved queries, when
* configured for one-line operation.
*/
public static final String[] QUERIES_PROJECTION_1LINE = new String[] {
SuggestionColumns._ID,
SuggestionColumns.DATE,
SuggestionColumns.QUERY,
SuggestionColumns.DISPLAY1,
};
/* if you change column order you must also change indices below */
/**
* This is the database projection that can be used to view saved queries, when
* configured for two-line operation.
*/
public static final String[] QUERIES_PROJECTION_2LINE = new String[] {
SuggestionColumns._ID,
SuggestionColumns.DATE,
SuggestionColumns.QUERY,
SuggestionColumns.DISPLAY1,
SuggestionColumns.DISPLAY2,
};
/* these indices depend on QUERIES_PROJECTION_xxx */
/** Index into the provided query projections. For use with Cursor.update methods. */
public static final int QUERIES_PROJECTION_DATE_INDEX = 1;
/** Index into the provided query projections. For use with Cursor.update methods. */
public static final int QUERIES_PROJECTION_QUERY_INDEX = 2;
/** Index into the provided query projections. For use with Cursor.update methods. */
public static final int QUERIES_PROJECTION_DISPLAY1_INDEX = 3;
/** Index into the provided query projections. For use with Cursor.update methods. */
public static final int QUERIES_PROJECTION_DISPLAY2_INDEX = 4; // only when 2line active
/*
* Set a cap on the count of items in the suggestions table, to
* prevent db and layout operations from dragging to a crawl. Revisit this
* cap when/if db/layout performance improvements are made.
*/
private static final int MAX_HISTORY_COUNT = 250;
// client-provided configuration values
private final Context mContext;
private final String mAuthority;
private final boolean mTwoLineDisplay;
private final Uri mSuggestionsUri;
/** Released once per completion of async write. Used for tests. */
private static final Semaphore sWritesInProgress = new Semaphore(0);
/**
* Although provider utility classes are typically static, this one must be constructed
* because it needs to be initialized using the same values that you provided in your
* {@link android.content.SearchRecentSuggestionsProvider}.
*
* @param authority This must match the authority that you've declared in your manifest.
* @param mode You can use mode flags here to determine certain functional aspects of your
* database. Note, this value should not change from run to run, because when it does change,
* your suggestions database may be wiped.
*
* @see android.content.SearchRecentSuggestionsProvider
* @see android.content.SearchRecentSuggestionsProvider#setupSuggestions
*/
public SearchRecentSuggestions(Context context, String authority, int mode) {
if (TextUtils.isEmpty(authority) ||
((mode & SearchRecentSuggestionsProvider.DATABASE_MODE_QUERIES) == 0)) {
throw new IllegalArgumentException();
}
// unpack mode flags
mTwoLineDisplay = (0 != (mode & SearchRecentSuggestionsProvider.DATABASE_MODE_2LINES));
// saved values
mContext = context;
mAuthority = new String(authority);
// derived values
mSuggestionsUri = Uri.parse("content://" + mAuthority + "/suggestions");
}
/**
* Add a query to the recent queries list. Returns immediately, performing the save
* in the background.
*
* @param queryString The string as typed by the user. This string will be displayed as
* the suggestion, and if the user clicks on the suggestion, this string will be sent to your
* searchable activity (as a new search query).
* @param line2 If you have configured your recent suggestions provider with
* {@link android.content.SearchRecentSuggestionsProvider#DATABASE_MODE_2LINES}, you can
* pass a second line of text here. It will be shown in a smaller font, below the primary
* suggestion. When typing, matches in either line of text will be displayed in the list.
* If you did not configure two-line mode, or if a given suggestion does not have any
* additional text to display, you can pass null here.
*/
public void saveRecentQuery(final String queryString, final String line2) {
if (TextUtils.isEmpty(queryString)) {
return;
}
if (!mTwoLineDisplay && !TextUtils.isEmpty(line2)) {
throw new IllegalArgumentException();
}
new Thread("saveRecentQuery") {
@Override
public void run() {
saveRecentQueryBlocking(queryString, line2);
sWritesInProgress.release();
}
}.start();
}
// Visible for testing.
void waitForSave() {
// Acquire writes semaphore until there is nothing available.
// This is to clean up after any previous callers to saveRecentQuery
// who did not also call waitForSave().
do {
sWritesInProgress.acquireUninterruptibly();
} while (sWritesInProgress.availablePermits() > 0);
}
private void saveRecentQueryBlocking(String queryString, String line2) {
ContentResolver cr = mContext.getContentResolver();
long now = System.currentTimeMillis();
// Use content resolver (not cursor) to insert/update this query
try {
ContentValues values = new ContentValues();
values.put(SuggestionColumns.DISPLAY1, queryString);
if (mTwoLineDisplay) {
values.put(SuggestionColumns.DISPLAY2, line2);
}
values.put(SuggestionColumns.QUERY, queryString);
values.put(SuggestionColumns.DATE, now);
cr.insert(mSuggestionsUri, values);
} catch (RuntimeException e) {
Log.e(LOG_TAG, "saveRecentQuery", e);
}
// Shorten the list (if it has become too long)
truncateHistory(cr, MAX_HISTORY_COUNT);
}
/**
* Completely delete the history. Use this call to implement a "clear history" UI.
*
* Any application that implements search suggestions based on previous actions (such as
* recent queries, page/items viewed, etc.) should provide a way for the user to clear the
* history. This gives the user a measure of privacy, if they do not wish for their recent
* searches to be replayed by other users of the device (via suggestions).
*/
public void clearHistory() {
ContentResolver cr = mContext.getContentResolver();
truncateHistory(cr, 0);
}
/**
* Reduces the length of the history table, to prevent it from growing too large.
*
* @param cr Convenience copy of the content resolver.
* @param maxEntries Max entries to leave in the table. 0 means remove all entries.
*/
protected void truncateHistory(ContentResolver cr, int maxEntries) {
if (maxEntries < 0) {
throw new IllegalArgumentException();
}
try {
// null means "delete all". otherwise "delete but leave n newest"
String selection = null;
if (maxEntries > 0) {
selection = "_id IN " +
"(SELECT _id FROM suggestions" +
" ORDER BY " + SuggestionColumns.DATE + " DESC" +
" LIMIT -1 OFFSET " + String.valueOf(maxEntries) + ")";
}
cr.delete(mSuggestionsUri, selection, null);
} catch (RuntimeException e) {
Log.e(LOG_TAG, "truncateHistory", e);
}
}
}
| |
package org.fiteagle.core.reservation.dm;
import java.util.Iterator;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.ActivationConfigProperty;
import javax.ejb.MessageDriven;
import javax.inject.Inject;
import javax.jms.JMSContext;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.Topic;
import org.fiteagle.api.core.IMessageBus;
import org.fiteagle.api.core.MessageFilters;
import org.fiteagle.api.core.MessageUtil;
import org.fiteagle.api.tripletStoreAccessor.TripletStoreAccessor;
import org.fiteagle.api.tripletStoreAccessor.TripletStoreAccessor.ResourceRepositoryException;
import org.fiteagle.core.reservation.ReservationHandler;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.SimpleSelector;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import info.openmultinet.ontology.exceptions.InvalidModelException;
import info.openmultinet.ontology.vocabulary.Omn;
import info.openmultinet.ontology.vocabulary.Omn_lifecycle;
@MessageDriven(name = "ReservationMDBListener", activationConfig = {
@ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Topic"),
@ActivationConfigProperty(propertyName = "destination", propertyValue = IMessageBus.TOPIC_CORE),
@ActivationConfigProperty(propertyName = "messageSelector", propertyValue = MessageFilters.FILTER_RESERVATION),
@ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge")})
public class ReservationMDBListener implements MessageListener {
@Inject
private JMSContext context;
@javax.annotation.Resource(mappedName = IMessageBus.TOPIC_CORE_NAME)
private Topic topic;
public void onMessage(final Message message) {
String messageType = MessageUtil.getMessageType(message);
String serialization = MessageUtil.getMessageSerialization(message);
String rdfString = MessageUtil.getStringBody(message);
try {
if (messageType != null && rdfString != null) {
Model messageModel = MessageUtil.parseSerializedModel(rdfString, serialization);
if (messageType.equals(IMessageBus.TYPE_CREATE)) {
handleCreate(messageModel, serialization, MessageUtil.getJMSCorrelationID(message));
}
if (messageType.equals(IMessageBus.TYPE_CONFIGURE)) {
handleConfigure(messageModel, serialization, MessageUtil.getJMSCorrelationID(message));
}
if (messageType.equals(IMessageBus.TYPE_GET)) {
handleGet(messageModel, serialization, MessageUtil.getJMSCorrelationID(message));
}
} }
catch (ResourceRepositoryException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidModelException e) {
e.printStackTrace();
}
}
private void handleConfigure(Model messageModel, String serialization, String jmsCorrelationID) throws InvalidModelException, ResourceRepositoryException {
Message responseMessage = null;
Model resultModel = messageModel;
ResIterator iterator = messageModel.listResourcesWithProperty(RDF.type, Omn.Topology);
if (iterator.hasNext()) {
//should be only one resource
TripletStoreAccessor.updateModel(messageModel);
} else {
iterator = messageModel.listResourcesWithProperty(RDF.type, Omn.Reservation);
TripletStoreAccessor.updateModel(messageModel);
while (iterator.hasNext()) {
Resource reservation = iterator.nextResource();
String uri = reservation.getURI();
resultModel.add(TripletStoreAccessor.getResource(uri));
addResourcesForReservations(resultModel);
addWrappingTopology(resultModel);
}
}
String serializedResponse = MessageUtil.serializeModel(resultModel, serialization);
responseMessage = MessageUtil.createRDFMessage(serializedResponse, IMessageBus.TYPE_INFORM, null, serialization, jmsCorrelationID, context);
context.createProducer().send(topic, responseMessage);
}
private void addResourcesForReservations(Model resultModel) {
StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector(null, Omn.isReservationOf,(Object)null));
while(stmtIterator.hasNext()){
Resource resource = stmtIterator.nextStatement().getObject().asResource();
resultModel.add(TripletStoreAccessor.getResource(resource.getURI()));
}
}
private void handleGet(Model messageModel, String serialization, String jmsCorrelationID) {
Message responseMessage = null;
Model resultModel = ModelFactory.createDefaultModel();
//getInfrastructure Slice URN or Sliver URNS
ResIterator iterator = messageModel.listResourcesWithProperty(RDF.type, Omn.Topology);
if (iterator.hasNext()) {
//should be only one resource
Resource r = iterator.nextResource();
String uri = r.getURI();
resultModel = TripletStoreAccessor.getResource(uri);
addResources(resultModel);
addReservations(resultModel);
addServices(resultModel);
} else {
iterator = messageModel.listResourcesWithProperty(RDF.type, Omn.Resource);
while (iterator.hasNext()) {
Resource r = iterator.nextResource();
String uri = r.getURI();
resultModel = TripletStoreAccessor.getResource(uri);
addReservations(resultModel);
addServices(resultModel);
addWrappingTopology(resultModel);
}
}
String serializedResponse = MessageUtil.serializeModel(resultModel, serialization);
responseMessage = MessageUtil.createRDFMessage(serializedResponse, IMessageBus.TYPE_INFORM, null, serialization, jmsCorrelationID, context);
context.createProducer().send(topic, responseMessage);
}
private void addServices(Model resultModel) {
StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector(null, Omn.hasService,(Object)null));
while(stmtIterator.hasNext()){
Resource resource = stmtIterator.nextStatement().getObject().asResource();
resultModel.add(TripletStoreAccessor.getResource(resource.getURI()));
}
}
private void addWrappingTopology(Model resultModel) {
StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector(null, Omn.hasResource,(Object)null));
while(stmtIterator.hasNext()){
Resource resource = stmtIterator.nextStatement().getSubject().asResource();
resource.addProperty(RDF.type,Omn.Topology);
resultModel.add(resource.listProperties());
//resultModel.add(TripletStoreAccessor.getResource(resource.getURI()));
}
}
private void addReservations(Model resultModel) {
StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector(null, Omn.hasReservation,(Object)null));
while(stmtIterator.hasNext()){
Resource resource = stmtIterator.nextStatement().getObject().asResource();
resultModel.add(TripletStoreAccessor.getResource(resource.getURI()));
}
}
private void addResources(Model resultModel) {
StmtIterator stmtIterator = resultModel.listStatements(new SimpleSelector(null, Omn.hasResource,(Object)null));
CopyOnWriteArrayList<Statement> statementList = new CopyOnWriteArrayList<Statement>();
for(Statement s : stmtIterator.toList()){
statementList.add(s);
}
Iterator<Statement> statementIterator = statementList.iterator();
while(statementIterator.hasNext()){
Resource resource = statementIterator.next().getObject().asResource();
Model resourceModel = TripletStoreAccessor.getResource(resource.getURI());
resultModel.add(resourceModel);
Resource res = resourceModel.getResource(resource.getURI());
StmtIterator stmtIter = res.listProperties();
while(stmtIter.hasNext()){
Statement statement = stmtIter.nextStatement();
if(statement.getObject().isResource()){
if(TripletStoreAccessor.exists(statement.getObject().asResource().getURI())){
if(!Omn_lifecycle.implementedBy.getLocalName().equals(statement.getPredicate().getLocalName())){
Model model = TripletStoreAccessor.getResource(statement.getObject().asResource().getURI());
resultModel.add(model);
}
}
}
}
}
}
private void handleCreate(Model requestModel, String serialization, String requestID) throws ResourceRepositoryException, ResourceRepositoryException {
ReservationHandler reservationHandler = new ReservationHandler();
Message responseMessage = reservationHandler.handleReservation(requestModel, serialization, requestID, context);
context.createProducer().send(topic, responseMessage);
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.fra.type;
import static com.opengamma.strata.basics.currency.Currency.AUD;
import static com.opengamma.strata.basics.currency.Currency.GBP;
import static com.opengamma.strata.basics.currency.Currency.NZD;
import static com.opengamma.strata.basics.currency.Currency.USD;
import static com.opengamma.strata.basics.date.BusinessDayConventions.FOLLOWING;
import static com.opengamma.strata.basics.date.BusinessDayConventions.MODIFIED_FOLLOWING;
import static com.opengamma.strata.basics.date.DayCounts.ACT_360;
import static com.opengamma.strata.basics.date.DayCounts.ACT_365F;
import static com.opengamma.strata.basics.date.HolidayCalendarIds.GBLO;
import static com.opengamma.strata.basics.date.HolidayCalendarIds.SAT_SUN;
import static com.opengamma.strata.basics.date.Tenor.TENOR_3M;
import static com.opengamma.strata.basics.index.IborIndices.GBP_LIBOR_3M;
import static com.opengamma.strata.basics.index.IborIndices.USD_LIBOR_3M;
import static com.opengamma.strata.collect.TestHelper.assertSerialization;
import static com.opengamma.strata.collect.TestHelper.assertThrowsIllegalArg;
import static com.opengamma.strata.collect.TestHelper.coverBeanEquals;
import static com.opengamma.strata.collect.TestHelper.coverImmutableBean;
import static com.opengamma.strata.collect.TestHelper.coverPrivateConstructor;
import static com.opengamma.strata.collect.TestHelper.date;
import static com.opengamma.strata.product.common.BuySell.BUY;
import static com.opengamma.strata.product.fra.FraDiscountingMethod.AFMA;
import static com.opengamma.strata.product.fra.FraDiscountingMethod.ISDA;
import static org.testng.Assert.assertEquals;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneId;
import java.util.Optional;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.date.AdjustableDate;
import com.opengamma.strata.basics.date.BusinessDayAdjustment;
import com.opengamma.strata.basics.date.DaysAdjustment;
import com.opengamma.strata.basics.date.TenorAdjustment;
import com.opengamma.strata.basics.index.ImmutableIborIndex;
import com.opengamma.strata.product.fra.Fra;
import com.opengamma.strata.product.fra.FraDiscountingMethod;
import com.opengamma.strata.product.fra.FraTrade;
/**
* Test {@link FraConvention}.
*/
@Test
public class FraConventionTest {
private static final ReferenceData REF_DATA = ReferenceData.standard();
private static final double NOTIONAL_2M = 2_000_000d;
private static final BusinessDayAdjustment BDA_FOLLOW = BusinessDayAdjustment.of(FOLLOWING, GBLO);
private static final BusinessDayAdjustment BDA_MOD_FOLLOW = BusinessDayAdjustment.of(MODIFIED_FOLLOWING, GBLO);
private static final DaysAdjustment NEXT_SAME_BUS_DAY = DaysAdjustment.ofCalendarDays(0, BDA_FOLLOW);
private static final DaysAdjustment PLUS_ONE_DAY = DaysAdjustment.ofBusinessDays(1, GBLO);
private static final DaysAdjustment PLUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(2, GBLO);
private static final DaysAdjustment MINUS_TWO_DAYS = DaysAdjustment.ofBusinessDays(-2, GBLO);
private static final DaysAdjustment MINUS_FIVE_DAYS = DaysAdjustment.ofBusinessDays(-5, GBLO);
private static final ImmutableIborIndex AUD_INDEX = ImmutableIborIndex.builder()
.name("AUD-INDEX-3M")
.currency(AUD)
.dayCount(ACT_360)
.fixingDateOffset(MINUS_TWO_DAYS)
.effectiveDateOffset(PLUS_TWO_DAYS)
.maturityDateOffset(TenorAdjustment.ofLastDay(TENOR_3M, BDA_MOD_FOLLOW))
.fixingCalendar(SAT_SUN)
.fixingTime(LocalTime.NOON)
.fixingZone(ZoneId.of("Australia/Sydney"))
.build();
private static final ImmutableIborIndex NZD_INDEX = ImmutableIborIndex.builder()
.name("NZD-INDEX-3M")
.currency(NZD)
.dayCount(ACT_360)
.fixingDateOffset(MINUS_TWO_DAYS)
.effectiveDateOffset(PLUS_TWO_DAYS)
.maturityDateOffset(TenorAdjustment.ofLastDay(TENOR_3M, BDA_MOD_FOLLOW))
.fixingCalendar(SAT_SUN)
.fixingTime(LocalTime.NOON)
.fixingZone(ZoneId.of("NZ"))
.build();
//-------------------------------------------------------------------------
public void test_of_index() {
ImmutableFraConvention test = ImmutableFraConvention.of(GBP_LIBOR_3M);
assertEquals(test.getIndex(), GBP_LIBOR_3M);
assertEquals(test.getName(), GBP_LIBOR_3M.getName());
assertEquals(test.getCurrency(), GBP);
assertEquals(test.getSpotDateOffset(), GBP_LIBOR_3M.getEffectiveDateOffset());
assertEquals(test.getBusinessDayAdjustment(), BDA_MOD_FOLLOW);
assertEquals(test.getPaymentDateOffset(), DaysAdjustment.NONE);
assertEquals(test.getFixingDateOffset(), GBP_LIBOR_3M.getFixingDateOffset());
assertEquals(test.getDayCount(), ACT_365F);
assertEquals(test.getDiscounting(), ISDA);
// ensure other factories match
assertEquals(FraConvention.of(GBP_LIBOR_3M), test);
assertEquals(FraConventions.of(GBP_LIBOR_3M), test);
}
//-------------------------------------------------------------------------
public void test_builder_noIndex() {
assertThrowsIllegalArg(() -> ImmutableFraConvention.builder()
.spotDateOffset(NEXT_SAME_BUS_DAY)
.build());
}
//-------------------------------------------------------------------------
public void test_builder_minSpecified() {
ImmutableFraConvention test = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getName(), GBP_LIBOR_3M.getName());
assertEquals(test.getIndex(), GBP_LIBOR_3M);
assertEquals(test.getCurrency(), GBP);
assertEquals(test.getSpotDateOffset(), GBP_LIBOR_3M.getEffectiveDateOffset());
assertEquals(test.getBusinessDayAdjustment(), BDA_MOD_FOLLOW);
assertEquals(test.getPaymentDateOffset(), DaysAdjustment.NONE);
assertEquals(test.getFixingDateOffset(), GBP_LIBOR_3M.getFixingDateOffset());
assertEquals(test.getDayCount(), GBP_LIBOR_3M.getDayCount());
assertEquals(test.getDiscounting(), ISDA);
}
public void test_builder_allSpecified() {
ImmutableFraConvention test = ImmutableFraConvention.builder()
.name(GBP_LIBOR_3M.getName())
.index(GBP_LIBOR_3M)
.currency(GBP)
.spotDateOffset(PLUS_ONE_DAY)
.businessDayAdjustment(BDA_FOLLOW)
.paymentDateOffset(PLUS_TWO_DAYS)
.fixingDateOffset(MINUS_FIVE_DAYS)
.dayCount(ACT_360)
.discounting(FraDiscountingMethod.NONE)
.build();
assertEquals(test.getName(), GBP_LIBOR_3M.getName());
assertEquals(test.getIndex(), GBP_LIBOR_3M);
assertEquals(test.getCurrency(), GBP);
assertEquals(test.getSpotDateOffset(), PLUS_ONE_DAY);
assertEquals(test.getBusinessDayAdjustment(), BDA_FOLLOW);
assertEquals(test.getPaymentDateOffset(), PLUS_TWO_DAYS);
assertEquals(test.getFixingDateOffset(), MINUS_FIVE_DAYS);
assertEquals(test.getDayCount(), ACT_360);
assertEquals(test.getDiscounting(), FraDiscountingMethod.NONE);
}
public void test_builder_AUD() {
ImmutableFraConvention test = ImmutableFraConvention.of(AUD_INDEX);
assertEquals(test.getIndex(), AUD_INDEX);
assertEquals(test.getDiscounting(), AFMA);
}
public void test_builder_NZD() {
ImmutableFraConvention test = ImmutableFraConvention.of(NZD_INDEX);
assertEquals(test.getIndex(), NZD_INDEX);
assertEquals(test.getDiscounting(), AFMA);
}
//-------------------------------------------------------------------------
public void test_createTrade_period() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.build();
LocalDate tradeDate = LocalDate.of(2015, 5, 5);
FraTrade test = base.createTrade(tradeDate, Period.ofMonths(3), BUY, NOTIONAL_2M, 0.25d, REF_DATA);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(date(2015, 8, 5))
.endDate(date(2015, 11, 5))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
//-------------------------------------------------------------------------
public void test_createTrade_periods() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.build();
LocalDate tradeDate = LocalDate.of(2015, 5, 5);
FraTrade test = base.createTrade(tradeDate, Period.ofMonths(3), Period.ofMonths(6), BUY, NOTIONAL_2M, 0.25d, REF_DATA);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(date(2015, 8, 5))
.endDate(date(2015, 11, 5))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
public void test_createTrade_periods_adjust() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.paymentDateOffset(DaysAdjustment.ofCalendarDays(0, BDA_FOLLOW))
.build();
LocalDate tradeDate = LocalDate.of(2016, 8, 11);
FraTrade test = base.createTrade(tradeDate, Period.ofMonths(1), Period.ofMonths(4), BUY, NOTIONAL_2M, 0.25d, REF_DATA);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(date(2016, 9, 12))
.endDate(date(2016, 12, 12))
.paymentDate(AdjustableDate.of(date(2016, 9, 12), BDA_FOLLOW))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
public void test_createTrade_periods_adjust_payOffset() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.paymentDateOffset(PLUS_TWO_DAYS)
.build();
LocalDate tradeDate = LocalDate.of(2016, 8, 11);
FraTrade test = base.createTrade(tradeDate, Period.ofMonths(1), Period.ofMonths(4), BUY, NOTIONAL_2M, 0.25d, REF_DATA);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(date(2016, 9, 12))
.endDate(date(2016, 12, 12))
.paymentDate(AdjustableDate.of(date(2016, 9, 14), PLUS_TWO_DAYS.getAdjustment()))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
//-------------------------------------------------------------------------
public void test_toTrade_dates() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.build();
LocalDate tradeDate = LocalDate.of(2015, 5, 5);
LocalDate startDate = date(2015, 8, 5);
LocalDate endDate = date(2015, 11, 5);
LocalDate paymentDate = startDate;
FraTrade test = base.toTrade(tradeDate, startDate, endDate, startDate, BUY, NOTIONAL_2M, 0.25d);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(startDate)
.endDate(endDate)
.paymentDate(AdjustableDate.of(paymentDate))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
public void test_toTrade_dates_paymentOffset() {
FraConvention base = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.spotDateOffset(NEXT_SAME_BUS_DAY)
.paymentDateOffset(PLUS_TWO_DAYS)
.build();
LocalDate tradeDate = LocalDate.of(2015, 5, 5);
LocalDate startDate = date(2015, 8, 5);
LocalDate endDate = date(2015, 11, 5);
LocalDate paymentDate = date(2015, 8, 7);
FraTrade test = base.toTrade(tradeDate, startDate, endDate, paymentDate, BUY, NOTIONAL_2M, 0.25d);
Fra expected = Fra.builder()
.buySell(BUY)
.notional(NOTIONAL_2M)
.startDate(date(2015, 8, 5))
.endDate(date(2015, 11, 5))
.paymentDate(AdjustableDate.of(paymentDate, PLUS_TWO_DAYS.getAdjustment()))
.fixedRate(0.25d)
.index(GBP_LIBOR_3M)
.build();
assertEquals(test.getInfo().getTradeDate(), Optional.of(tradeDate));
assertEquals(test.getProduct(), expected);
}
public void test_unknownIndex() {
assertThrowsIllegalArg(() -> FraConvention.of("Rubbish"));
}
public void test_toTemplate_badDateOrder() {
FraConvention base = FraConvention.of(GBP_LIBOR_3M);
LocalDate tradeDate = LocalDate.of(2015, 5, 5);
LocalDate startDate = date(2015, 4, 5);
LocalDate endDate = date(2015, 7, 5);
LocalDate paymentDate = date(2015, 8, 7);
assertThrowsIllegalArg(() -> base.toTrade(tradeDate, startDate, endDate, paymentDate, BUY, NOTIONAL_2M, 0.25d));
}
//-------------------------------------------------------------------------
@DataProvider(name = "name")
static Object[][] data_name() {
return new Object[][] {
{ImmutableFraConvention.of(GBP_LIBOR_3M), "GBP-LIBOR-3M"},
{ImmutableFraConvention.of(USD_LIBOR_3M), "USD-LIBOR-3M"},
};
}
@Test(dataProvider = "name")
public void test_name(FraConvention convention, String name) {
assertEquals(convention.getName(), name);
}
@Test(dataProvider = "name")
public void test_toString(FraConvention convention, String name) {
assertEquals(convention.toString(), name);
}
@Test(dataProvider = "name")
public void test_of_lookup(FraConvention convention, String name) {
assertEquals(FraConvention.of(name), convention);
}
@Test(dataProvider = "name")
public void test_extendedEnum(FraConvention convention, String name) {
FraConvention.of(name); // ensures map is populated
ImmutableMap<String, FraConvention> map = FraConvention.extendedEnum().lookupAll();
assertEquals(map.get(name), convention);
}
public void test_of_lookup_notFound() {
assertThrowsIllegalArg(() -> FraConvention.of("Rubbish"));
}
public void test_of_lookup_null() {
assertThrowsIllegalArg(() -> FraConvention.of((String) null));
}
//-------------------------------------------------------------------------
public void coverage() {
ImmutableFraConvention test = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.build();
coverImmutableBean(test);
ImmutableFraConvention test2 = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.name("Test")
.currency(USD)
.spotDateOffset(PLUS_ONE_DAY)
.businessDayAdjustment(BDA_FOLLOW)
.paymentDateOffset(PLUS_TWO_DAYS)
.fixingDateOffset(MINUS_FIVE_DAYS)
.dayCount(ACT_360)
.discounting(FraDiscountingMethod.NONE)
.build();
coverBeanEquals(test, test2);
coverPrivateConstructor(FraConventions.class);
coverPrivateConstructor(FraConventionLookup.class);
}
public void test_serialization() {
ImmutableFraConvention test = ImmutableFraConvention.builder()
.index(GBP_LIBOR_3M)
.build();
assertSerialization(test);
}
}
| |
/**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.ds.further.model.impl.domain;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.Table;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import edu.utah.further.core.api.context.Implementation;
import edu.utah.further.core.api.data.PersistentEntity;
/**
* Persistent entity implementation of {@link Location}
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2013 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <further@utah.edu>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author N. Dustin Schultz {@code <dustin.schultz@utah.edu>}
* @version Aug 4, 2010
*/
@Entity
@Implementation
@Table(name = "FPERSON_LCTN")
@XmlRootElement(name = "Location")
@XmlAccessorType(XmlAccessType.FIELD)
public final class Location implements PersistentEntity<LocationId>
{
// ========================= CONSTANTS ===================================
/**
* Generated SUID
*/
private static final long serialVersionUID = -4566890844457489875L;
// ========================= FIELDS ===================================
@EmbeddedId
private LocationId id;
@Column(name = "fperson_id")
private Long personId;
@Column(name = "FPERSON_COMPOSITE_ID")
private String personCompositeId;
@Column(name = "person_lctn_type_cd")
private String personLocationType;
@Column(name = "lctn_type_cd")
private String locationType;
@Column(name = "lctn_nmspc_id")
private Long locationNamespaceId;
@Column(name = "lctn_cid")
private String location;
@Column(name = "lctn_txt")
private String locationText;
@Column(name = "start_dt")
private Date startDateTime;
@Column(name = "end_dt")
private Date endDateTime;
/*
* (non-Javadoc)
*
* @see edu.utah.further.core.api.discrete.HasIdentifier#getId()
*/
@Override
public LocationId getId()
{
return id;
}
/**
* Set a new value for the id property.
*
* @param id
* the id to set
*/
public void setId(LocationId id)
{
this.id = id;
}
/**
* Return the personId property.
*
* @return the personId
*/
public Long getPersonId()
{
return personId;
}
/**
* Set a new value for the personId property.
*
* @param personId
* the personId to set
*/
public void setPersonId(Long personId)
{
this.personId = personId;
}
/**
* Return the personCompositeId property.
*
* @return the personCompositeId
*/
public String getPersonCompositeId()
{
return personCompositeId;
}
/**
* Set a new value for the personCompositeId property.
*
* @param personCompositeId
* the personCompositeId to set
*/
public void setPersonCompositeId(String personCompositeId)
{
this.personCompositeId = personCompositeId;
}
/**
* Return the personLocationType property.
*
* @return the personLocationType
*/
public String getPersonLocationType()
{
return personLocationType;
}
/**
* Set a new value for the personLocationType property.
*
* @param personLocationType
* the personLocationType to set
*/
public void setPersonLocationType(String personLocationType)
{
this.personLocationType = personLocationType;
}
/**
* Return the locationType property.
*
* @return the locationType
*/
public String getLocationType()
{
return locationType;
}
/**
* Set a new value for the locationType property.
*
* @param locationType
* the locationType to set
*/
public void setLocationType(String locationType)
{
this.locationType = locationType;
}
/**
* Return the locationNamespaceId property.
*
* @return the locationNamespaceId
*/
public Long getLocationNamespaceId()
{
return locationNamespaceId;
}
/**
* Set a new value for the locationNamespaceId property.
*
* @param locationNamespaceId
* the locationNamespaceId to set
*/
public void setLocationNamespaceId(Long locationNamespaceId)
{
this.locationNamespaceId = locationNamespaceId;
}
/**
* Return the location property.
*
* @return the location
*/
public String getLocation()
{
return location;
}
/**
* Set a new value for the location property.
*
* @param location
* the location to set
*/
public void setLocation(String location)
{
this.location = location;
}
/**
* Return the locationText property.
*
* @return the locationText
*/
public String getLocationText()
{
return locationText;
}
/**
* Set a new value for the locationText property.
*
* @param locationText
* the locationText to set
*/
public void setLocationText(String locationText)
{
this.locationText = locationText;
}
/**
* Return the startDateTime property.
*
* @return the startDateTime
*/
public Date getStartDateTime()
{
return startDateTime;
}
/**
* Set a new value for the startDateTime property.
*
* @param startDateTime
* the startDateTime to set
*/
public void setStartDateTime(Date startDateTime)
{
this.startDateTime = startDateTime;
}
/**
* Return the endDateTime property.
*
* @return the endDateTime
*/
public Date getEndDateTime()
{
return endDateTime;
}
/**
* Set a new value for the endDateTime property.
*
* @param endDateTime
* the endDateTime to set
*/
public void setEndDateTime(Date endDateTime)
{
this.endDateTime = endDateTime;
}
// ====================== IMPLEMENTATION: Object =====================
/*
* (non-Javadoc)
*
* @see edu.utah.further.fqe.model.api.domain.Location#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj)
{
if (obj == null)
return false;
if (obj == this)
return true;
if (getClass() != obj.getClass())
return false;
final Location that = (Location) obj;
return new EqualsBuilder().append(getId(), that.getId()).isEquals();
}
/*
* (non-Javadoc)
*
* @see edu.utah.further.fqe.model.api.domain.Location#hashCode()
*/
@Override
public int hashCode()
{
return new HashCodeBuilder().append(getId()).toHashCode();
}
/*
* (non-Javadoc)
*
* @see edu.utah.further.fqe.model.api.domain.Location#toString()
*/
@Override
public String toString()
{
return ReflectionToStringBuilder.toString(this);
}
}
| |
package de.dhbw.sort.util;
import de.dhbw.sort.algorithms.SortingAlgorithm;
import de.dhbw.sort.util.AlgorithmCommand.Direction;
import de.dhbw.sort.visualize.SplitGraphics;
import java.util.Arrays;
public class OutOfPlaceAlgorithmHelper extends AbstractAlgorithmHelper {
private int[] output;
private int[] graphicsOutput;
private int graphMoves = 0;
private int graphComp = 0;
private SplitGraphics splitScreen;
public OutOfPlaceAlgorithmHelper(SplitGraphics splitGraphics, int[] theArray, SortingAlgorithm sort) {
super(splitGraphics, theArray);
this.sort = sort;
this.sort.setHelper(this);
this.splitScreen = splitGraphics;
this.output = new int[values.length];
this.graphicsOutput = Arrays.copyOf(output, output.length);
this.height = getHeight();
this.drawValues();
splitGraphics.addFrame();
}
private float getHeight() {
return (splitScreen.getHeight() / splitScreen.getAmountOfScreens() - liableHeight) / big;
}
public void drawValues() {
splitScreen.drawBackground(0, 0, 0);
splitScreen.fill(255, 255, 255);
for (int i = 0; i < graphicsValues.length; i++) {
splitScreen.drawRect(i * width, (splitScreen.getHeight(0) - (height * graphicsValues[i])), width,
(height * graphicsValues[i]), 0);
}
for (int i = 0; i < graphicsOutput.length; i++) {
splitScreen.drawRect(i * width, (splitScreen.getHeight(1) - (height * graphicsOutput[i])), width,
(height * graphicsOutput[i]), 1);
}
}
private void blackout(int index, int i) {
screen.fill(0, 0, 0);
if (i == 0) {
splitScreen.drawRect(index * width, liableHeight, width, splitScreen.getHeight(0) - liableHeight, 0);
} else {
splitScreen.drawRect(index * width, liableHeight, width, splitScreen.getHeight(0) - liableHeight, 1);
}
//This is not reset by the next draw operation?
screen.fill(255, 255, 255);
}
protected void reDraw(int index, int i) {
screen.fill(255, 255, 255);
blackout(index, i);
if (i == 0) {
splitScreen.drawRect(index * width, (splitScreen.getHeight(0) - (height * graphicsValues[index])), width, height * graphicsValues[index], i);
} else if (i == 1) {
splitScreen.drawRect(index * width, (splitScreen.getHeight(1) - (height * graphicsOutput[index])), width, height * graphicsOutput[index], i);
}
}
public void nextFrame() {
int firstIndex = 0;
int secondIndex = 0;
drawInfo();
int directionOrdinal;
if (!ready && mov.peek() != null) {
switch (mov.poll()) {
case COMPARE:
graphComp++;
firstIndex = indexes.poll();
secondIndex = indexes.poll();
directionOrdinal = indexes.poll();
switch (Direction.values()[directionOrdinal]) {
case IN_MAIN:
blackout(firstIndex, 0);
blackout(secondIndex, 0);
splitScreen.fill(0, 255, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[firstIndex])), width,
(height * graphicsValues[firstIndex]), 0);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[secondIndex])), width,
(height * graphicsValues[secondIndex]), 0);
break;
case IN_OUTPUT:// TODO split splitScreen in half
blackout(firstIndex, 1);
blackout(secondIndex, 1);
splitScreen.fill(0, 255, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[firstIndex])), width,
(height * graphicsOutput[firstIndex]), 1);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[secondIndex])), width,
(height * graphicsOutput[secondIndex]), 1);
break;
case MAIN_TO_OUTPUT:
break;
case OUTPUT_TO_MAIN:
break;
}
break;
case SWAP:
graphMoves += 3;
firstIndex = indexes.poll();
secondIndex = indexes.poll();
directionOrdinal = indexes.poll();
// TODO change switch include move logic
// switch (Direction.values()[directionOrdinal]) {
// case IN_MAIN:
// splitScreen.fill(0, 255, 0);
// splitScreen.drawRect(firstIndex * width,
// (splitScreen.getHeight() - (height *
// graphicsValues[firstIndex])), width,
// (height * graphicsValues[firstIndex]));
// splitScreen.drawRect(secondIndex * width,
// (splitScreen.getHeight() - (height *
// graphicsValues[secondIndex])), width,
// (height * graphicsValues[secondIndex]));
// break;
// case IN_OUTPUT://TODO split splitScreen in half
// splitScreen.fill(0, 255, 0);
// splitScreen.drawRect(firstIndex * width,
// (splitScreen.getHeight() - (height *
// graphicsOutput[firstIndex])), width,
// (height * graphicsValues[firstIndex]));
// splitScreen.drawRect(secondIndex * width,
// (splitScreen.getHeight() - (height *
// graphicsOutput[secondIndex])), width,
// (height * graphicsValues[secondIndex]));
// break;
// case MAIN_TO_OUTPUT:
// break;
// case OUTPUT_TO_MAIN:
// break;
// }
// TODO include in switch
int temp = graphicsValues[firstIndex];
graphicsValues[firstIndex] = graphicsValues[secondIndex];
graphicsValues[secondIndex] = temp;
break;
case MOVE:
graphMoves++;
firstIndex = indexes.poll();
secondIndex = indexes.poll();
directionOrdinal = indexes.poll();
// TODO change switch to to moves
switch (Direction.values()[directionOrdinal]) {
case IN_MAIN:
blackout(firstIndex, 0);
blackout(secondIndex, 0);
splitScreen.fill(255, 0, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[firstIndex])), width,
(height * graphicsValues[firstIndex]), 0);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[secondIndex])), width,
(height * graphicsValues[secondIndex]), 0);
break;
case IN_OUTPUT:
blackout(firstIndex, 1);
blackout(secondIndex, 1);
splitScreen.fill(255, 0, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[firstIndex])), width,
(height * graphicsValues[firstIndex]), 0);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[secondIndex])), width,
(height * graphicsValues[secondIndex]), 0);
break;
case MAIN_TO_OUTPUT:
// blackout(firstIndex,0);
// blackout(secondIndex,1);
splitScreen.fill(255, 0, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[firstIndex])), width,
(height * graphicsValues[firstIndex]), 0);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[secondIndex])), width,
(height * graphicsOutput[secondIndex]), 1);
graphicsOutput[secondIndex] = graphicsValues[firstIndex];
graphicsValues[firstIndex] = 0;
break;
case OUTPUT_TO_MAIN:
blackout(secondIndex, 0);
blackout(firstIndex, 1);
splitScreen.fill(255, 0, 0);
splitScreen.drawRect(firstIndex * width,
(splitScreen.getHeight(1) - (height * graphicsOutput[firstIndex])), width,
(height * graphicsOutput[firstIndex]), 1);
splitScreen.drawRect(secondIndex * width,
(splitScreen.getHeight(0) - (height * graphicsValues[secondIndex])), width,
(height * graphicsValues[secondIndex]), 0);
graphicsValues[secondIndex] = graphicsOutput[firstIndex];
graphicsOutput[firstIndex] = 0;
break;
}
break;
case READY:
this.ready = true;
default:
break;
}
while (!this.splitScreen.addFrame())
;
reDraw(firstIndex, 0);
reDraw(firstIndex, 1);
reDraw(secondIndex, 0);
reDraw(secondIndex, 1);
}
}
public void swap(int firstIndex, ArrayType fromArray, int secondIndex, ArrayType toArray) {
moves += 3;
AlgorithmCommand.Direction direction = calculateDirection(fromArray, toArray);
mov.add(Moves.SWAP);
indexes.add(firstIndex);
indexes.add(secondIndex);
indexes.add(direction.ordinal());
int temp = values[firstIndex];
values[firstIndex] = values[secondIndex];
values[secondIndex] = temp;
}
public int compare(int firstIndex, ArrayType fromArray, int secondIndex, ArrayType toArray) {
comparisons++;
AlgorithmCommand.Direction direction = calculateDirection(fromArray, toArray);
mov.add(Moves.COMPARE);
indexes.add(firstIndex);
indexes.add(secondIndex);
indexes.add(direction.ordinal()); // Adding the direction information
switch (direction) {
case IN_MAIN:
return (values[firstIndex] - values[secondIndex]);
case IN_OUTPUT:
return (output[firstIndex] - output[secondIndex]);
// TODO add rest of the cases
default:
return 0;
}
}
public void move(int fromIndex, ArrayType fromArray, int toIndex, ArrayType toArray) {
moves++;
AlgorithmCommand.Direction direction = calculateDirection(fromArray, toArray);
mov.add(Moves.MOVE);
indexes.add(fromIndex);
indexes.add(toIndex);
indexes.add(direction.ordinal()); // Adding the direction information
switch (direction) {
case MAIN_TO_OUTPUT:
output[toIndex] = values[fromIndex];
values[fromIndex] = 0;
break;
case OUTPUT_TO_MAIN:
values[toIndex] = output[fromIndex];
output[fromIndex] = 0;
break;
}
}
public void resetAlgorithm(int[] theArray) {
output = new int[theArray.length];
super.resetAlgorithm(theArray);
}
@Override
public void drawInfo() {
screen.fill(0, 0, 0);
screen.drawRect(0, 0, splitScreen.getWidth(), 40);
splitScreen.fill(255, 255, 255);
screen.text(algorithmName + " ValueCount: " + this.graphicsValues.length, 0, 10);
splitScreen.text("Comparisons: " + graphComp, 0, 20);
splitScreen.text("Moves: " + graphMoves, 0, 30);
}
protected AlgorithmCommand.Direction calculateDirection(ArrayType fromArray, ArrayType toArray) {
if (fromArray == toArray) {
if (fromArray == ArrayType.MAIN) {
return AlgorithmCommand.Direction.IN_MAIN;
} else {
return AlgorithmCommand.Direction.IN_OUTPUT;
}
} else {
if (fromArray == ArrayType.MAIN) {
return AlgorithmCommand.Direction.MAIN_TO_OUTPUT;
} else {
return AlgorithmCommand.Direction.OUTPUT_TO_MAIN;
}
}
}
public synchronized void resetGraphics(int[] peek) {
// super.resetHelper(peek);
graphMoves = 0;
graphComp = 0;
graphSwaps = 0;
this.graphicsValues = Arrays.copyOf(peek, peek.length);
this.graphicsOutput = new int[peek.length];
this.big = 0;
for (int a : graphicsValues) {
if (a > this.big) {
this.big = a;
}
}
width = screen.getWidth() / graphicsValues.length;
height= getHeight();
this.drawValues();
this.ready = false;
this.notify();
}
public enum ArrayType {
MAIN, OUTPUT
}
}
| |
/* $Id$ */
package ibis.io;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
/**
* The <code>SunSerializationInputStream</code> class is the "glue" between
* <code>SerializationInputStream</code> and <code>ObjectInputStream</code>.
* It provides implementations for the abstract methods in
* <code>SerializationInputStream</code>, built on methods in
* <code>ObjectInputStream</code>.
*/
public final class SunSerializationInputStream
extends java.io.ObjectInputStream implements SerializationInput {
private InputStream in;
/**
* Constructor. Calls constructor of superclass.
*
* @param s the underlying <code>DataInputStream</code>
* @exception IOException when an IO error occurs.
*/
public SunSerializationInputStream(InputStream s) throws IOException {
super(new DummyInputStream(s));
this.in = s;
}
public SunSerializationInputStream(DataInputStream s) throws IOException {
super(new DummyInputStream(s));
this.in = s;
}
/**
* Returns the name of the current serialization implementation: "sun".
*
* @return the name of the current serialization implementation.
*/
public String serializationImplName() {
return "sun";
}
public boolean reInitOnNewConnection() {
return true;
}
/**
* Dummy reset. For Ibis, we want to be able to remove the object table in
* a SerializationInputStream.
* With Sun serialization, this is accomplished by sending a RESET to it.
* For Ibis serialization, we cannot do this because we can only send
* a RESET when a handle is expected.
*/
public void clear() {
// Not needed for Sun serialization.
}
/**
* No statistics are printed for the Sun serialization version.
*/
public void statistics() {
// No statistics for Sun serialization.
}
/**
* Read a slice of an array of booleans. A consequence of the Ibis
* <code>ReadMessage</code> interface is that a copy has to be made here,
* because <code>ObjectInputStream</code> has no mechanism to read an array
* "in place".
*
* @param ref the array to be read
* @param off offset in the array from where reading starts
* @param len the number of elements to be read
*
* @exception IOException when something is wrong.
*/
public void readArray(boolean[] ref, int off, int len) throws IOException {
try {
boolean[] temp = (boolean[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'boolean[]' not found", f);
}
}
/**
* Read a slice of an array of bytes.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(byte[] ref, int off, int len) throws IOException {
/*
* Calling write() and read() here turns out to be much, much faster.
* So, we go ahead and implement a fast path just for byte[].
* RFHH
*/
/*
int rd = 0;
do {
rd += read(ref, off + rd, len - rd);
} while (rd < len);
No, not good. It is written in such a way that it can be read back
with readObject(). (Ceriel)
*/
try {
byte[] temp = (byte[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'byte[]' not found", f);
}
}
public void readByteBuffer(ByteBuffer b) throws IOException {
int len = b.limit() - b.position();
byte[] temp;
try {
temp = (byte[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'byte[]' not found", f);
}
b.put(temp);
}
/**
* Read a slice of an array of chars.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(char[] ref, int off, int len) throws IOException {
try {
char[] temp = (char[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'char[]' not found", f);
}
}
/**
* Read a slice of an array of shorts.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(short[] ref, int off, int len) throws IOException {
try {
short[] temp = (short[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'short[]' not found", f);
}
}
/**
* Read a slice of an array of ints.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(int[] ref, int off, int len) throws IOException {
try {
int[] temp = (int[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'int[]' not found", f);
}
}
/**
* Read a slice of an array of longs.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(long[] ref, int off, int len) throws IOException {
try {
long[] temp = (long[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'long[]' not found", f);
}
}
/**
* Read a slice of an array of floats.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(float[] ref, int off, int len) throws IOException {
try {
float[] temp = (float[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'float[]' not found", f);
}
}
/**
* Read a slice of an array of doubles.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(double[] ref, int off, int len) throws IOException {
try {
double[] temp = (double[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
} catch (ClassNotFoundException f) {
throw new SerializationError("class 'double[]' not found", f);
}
}
/**
* Read a slice of an array of Objects.
* See {@link #readArray(boolean[], int, int)} for a description.
*/
public void readArray(Object[] ref, int off, int len) throws IOException,
ClassNotFoundException {
Object[] temp = (Object[]) readObject();
if (temp.length != len) {
throw new ArrayIndexOutOfBoundsException(
"Received sub array has wrong len");
}
System.arraycopy(temp, 0, ref, off, len);
}
public void readArray(boolean[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(byte[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(short[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(char[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(int[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(long[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(float[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(double[] ref) throws IOException {
readArray(ref, 0, ref.length);
}
public void readArray(Object[] ref)
throws IOException, ClassNotFoundException {
readArray(ref, 0, ref.length);
}
public String readString() throws IOException {
try {
return (String) readObject();
} catch (ClassNotFoundException e) {
throw new SerializationError("class 'String' not found", e);
}
}
public void realClose() throws IOException {
close();
in.close();
}
}
| |
/*
* The compilation of software known as V4L4J is distributed under the
* following terms:
*
* Copyright (c) 2015 Christopher Friedt. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
package org.v4l4j;
import java.io.*;
import java.util.*;
import org.jruby.ext.posix.*;
import com.sun.jna.*;
public enum Ioctl {
VIDIOC_QUERYCAP,
VIDIOC_RESERVED,
VIDIOC_ENUM_FMT,
VIDIOC_G_FMT,
VIDIOC_S_FMT,
VIDIOC_REQBUFS,
VIDIOC_QUERYBUF,
VIDIOC_QBUF,
VIDIOC_DQBUF,
VIDIOC_STREAMON,
VIDIOC_STREAMOFF,
;
public int toInteger() {
return toInteger( this );
}
static final TreeMap<Integer,Ioctl> to = new TreeMap<Integer,Ioctl>();
static final TreeMap<Ioctl,Integer> fro = new TreeMap<Ioctl,Integer>();
public static int toInteger( Ioctl e ) {
return fro.get( e );
}
public static Ioctl toEnum( int e ) {
return to.get( e );
}
static {
to.put( _IOR( 'V', 0, ( new V4L4JCapability.ByReference() ).size() ), VIDIOC_QUERYCAP );
to.put( _IO( 'V', 1 ), VIDIOC_RESERVED );
to.put( _IOWR( 'V', 2, ( new V4L4JFormatDescriptor.ByReference() ).size() ), VIDIOC_ENUM_FMT );
to.put( _IOWR( 'V', 4, ( new V4L4JFormat.ByReference() ).size() ), VIDIOC_G_FMT );
to.put( _IOWR('V', 5, ( new V4L4JFormat.ByReference() ).size() ), VIDIOC_S_FMT );
to.put( _IOWR( 'V', 8, ( new V4L4JRequestBuffers.ByReference() ).size() ), VIDIOC_REQBUFS );
to.put( _IOWR( 'V', 9, ( new V4L4JBuffer.ByReference() ).size() ), VIDIOC_QUERYBUF );
to.put( _IOWR( 'V', 15, ( new V4L4JBuffer.ByReference() ).size() ), VIDIOC_QBUF );
to.put( _IOWR( 'V', 17, ( new V4L4JBuffer.ByReference() ).size() ), VIDIOC_DQBUF );
to.put( _IOW( 'V', 18, ( new IntPointer.ByReference() ).size() ), VIDIOC_STREAMON );
to.put( _IOW( 'V', 19, ( new IntPointer.ByReference() ).size() ), VIDIOC_STREAMOFF );
for( Map.Entry<Integer,Ioctl> e: to.entrySet() ) {
fro.put( e.getValue(), e.getKey() );
}
}
//
// To extend the usual integer / enum mapping to also support NativeLong queries
//
private static final long unsigned_int_mask = (1L << Integer.SIZE) - 1;
public NativeLong toNativeLong() {
return toNativeLong( this );
}
public static NativeLong toNativeLong( Ioctl e ) {
return new NativeLong( unsigned_int_mask & fro.get( e ) );
}
public static Ioctl toEnum( NativeLong e ) {
return to.get( e.intValue() );
}
//
// V4L2 IOCTLS
//
static final POSIXHandler handler;
static final POSIX posix;
public int ioctl( FileDescriptor fd, Object... arg )
throws IOException
{
int r = -1;
r = posix.ioctl( fd, this.toNativeLong(), arg );
if ( r < 0 ) {
throw new IOException( "ioctl " + this + " failed with return value " + r + " and errno " + posix.errno() );
}
return r;
}
static {
handler = new V4L4JPOSIXHandler();
posix = POSIXFactory.getPOSIX( handler, true );
}
static final int _IOC_NRBITS = 8;
static final int _IOC_TYPEBITS = 8;
static final int _IOC_SIZEBITS = 14;
static final int _IOC_DIRBITS = 2;
static final int _IOC_NRMASK = ((1 << _IOC_NRBITS)-1);
static final int _IOC_TYPEMASK = ((1 << _IOC_TYPEBITS)-1);
static final int _IOC_SIZEMASK = ((1 << _IOC_SIZEBITS)-1);
static final int _IOC_DIRMASK = ((1 << _IOC_DIRBITS)-1);
static final int _IOC_NRSHIFT = 0;
static final int _IOC_TYPESHIFT = _IOC_NRSHIFT + _IOC_NRBITS;
static final int _IOC_SIZESHIFT = _IOC_TYPESHIFT + _IOC_TYPEBITS;
static final int _IOC_DIRSHIFT = _IOC_SIZESHIFT + _IOC_SIZEBITS;
static final int _IOC_NONE = 0;
static final int _IOC_WRITE = 1;
static final int _IOC_READ = 2;
static int _IOC( int dir, int type, int nr, int size ) {
int r = 0;
r |= ( dir & _IOC_DIRMASK ) << _IOC_DIRSHIFT;
r |= ( type & _IOC_TYPEMASK ) << _IOC_TYPESHIFT;
r |= ( nr & _IOC_NRMASK ) << _IOC_NRSHIFT;
r |= ( size & _IOC_SIZEMASK ) << _IOC_SIZESHIFT;
return r;
}
static int _IO( int type, int nr ) {
return _IOC( _IOC_NONE, type , nr , 0 );
}
static int _IOWR( int type, int nr, int size ) {
return _IOC( _IOC_READ | _IOC_WRITE, type, nr, size );
}
static int _IOR( int type, int nr, int size ) {
return _IOC( _IOC_READ, type, nr, size );
}
static int _IOW( int type, int nr, int size ) {
return _IOC( _IOC_WRITE, type, nr, size );
}
/*
public static final int VIDIOC_G_FBUF = _IOR('V', 10, struct v4l2_framebuffer)
public static final int VIDIOC_S_FBUF = _IOW('V', 11, struct v4l2_framebuffer)
public static final int VIDIOC_OVERLAY = _IOW('V', 14, int)
public static final int VIDIOC_QBUF = _IOWR('V', 15, struct v4l2_buffer)
public static final int VIDIOC_EXPBUF = _IOWR('V', 16, struct v4l2_exportbuffer)
public static final int VIDIOC_DQBUF = _IOWR('V', 17, struct v4l2_buffer)
public static final int VIDIOC_STREAMON = _IOW('V', 18, int)
public static final int VIDIOC_STREAMOFF = _IOW('V', 19, int)
public static final int VIDIOC_G_PARM = _IOWR('V', 21, struct v4l2_streamparm)
public static final int VIDIOC_S_PARM = _IOWR('V', 22, struct v4l2_streamparm)
public static final int VIDIOC_G_STD = _IOR('V', 23, v4l2_std_id)
public static final int VIDIOC_S_STD = _IOW('V', 24, v4l2_std_id)
public static final int VIDIOC_ENUMSTD = _IOWR('V', 25, struct v4l2_standard)
public static final int VIDIOC_ENUMINPUT = _IOWR('V', 26, struct v4l2_input)
public static final int VIDIOC_G_CTRL = _IOWR('V', 27, struct v4l2_control)
public static final int VIDIOC_S_CTRL = _IOWR('V', 28, struct v4l2_control)
public static final int VIDIOC_G_TUNER = _IOWR('V', 29, struct v4l2_tuner)
public static final int VIDIOC_S_TUNER = _IOW('V', 30, struct v4l2_tuner)
public static final int VIDIOC_G_AUDIO = _IOR('V', 33, struct v4l2_audio)
public static final int VIDIOC_S_AUDIO = _IOW('V', 34, struct v4l2_audio)
public static final int VIDIOC_QUERYCTRL = _IOWR('V', 36, struct v4l2_queryctrl)
public static final int VIDIOC_QUERYMENU = _IOWR('V', 37, struct v4l2_querymenu)
public static final int VIDIOC_G_INPUT = _IOR('V', 38, int)
public static final int VIDIOC_S_INPUT = _IOWR('V', 39, int)
public static final int VIDIOC_G_EDID = _IOWR('V', 40, struct v4l2_edid)
public static final int VIDIOC_S_EDID = _IOWR('V', 41, struct v4l2_edid)
public static final int VIDIOC_G_OUTPUT = _IOR('V', 46, int)
public static final int VIDIOC_S_OUTPUT = _IOWR('V', 47, int)
public static final int VIDIOC_ENUMOUTPUT = _IOWR('V', 48, struct v4l2_output)
public static final int VIDIOC_G_AUDOUT = _IOR('V', 49, struct v4l2_audioout)
public static final int VIDIOC_S_AUDOUT = _IOW('V', 50, struct v4l2_audioout)
public static final int VIDIOC_G_MODULATOR = _IOWR('V', 54, struct v4l2_modulator)
public static final int VIDIOC_S_MODULATOR = _IOW('V', 55, struct v4l2_modulator)
public static final int VIDIOC_G_FREQUENCY = _IOWR('V', 56, struct v4l2_frequency)
public static final int VIDIOC_S_FREQUENCY = _IOW('V', 57, struct v4l2_frequency)
public static final int VIDIOC_CROPCAP = _IOWR('V', 58, struct v4l2_cropcap)
public static final int VIDIOC_G_CROP = _IOWR('V', 59, struct v4l2_crop)
public static final int VIDIOC_S_CROP = _IOW('V', 60, struct v4l2_crop)
public static final int VIDIOC_G_JPEGCOMP = _IOR('V', 61, struct v4l2_jpegcompression)
public static final int VIDIOC_S_JPEGCOMP = _IOW('V', 62, struct v4l2_jpegcompression)
public static final int VIDIOC_QUERYSTD = _IOR('V', 63, v4l2_std_id)
public static final int VIDIOC_TRY_FMT = _IOWR('V', 64, struct v4l2_format)
public static final int VIDIOC_ENUMAUDIO = _IOWR('V', 65, struct v4l2_audio)
public static final int VIDIOC_ENUMAUDOUT = _IOWR('V', 66, struct v4l2_audioout)
public static final int VIDIOC_G_PRIORITY = _IOR('V', 67, __u32) // enum v4l2_priority
public static final int VIDIOC_S_PRIORITY = _IOW('V', 68, __u32) // enum v4l2_priority
public static final int VIDIOC_G_SLICED_VBI_CAP = _IOWR('V', 69, struct v4l2_sliced_vbi_cap)
public static final int VIDIOC_LOG_STATUS = _IO('V', 70)
public static final int VIDIOC_G_EXT_CTRLS = _IOWR('V', 71, struct v4l2_ext_controls)
public static final int VIDIOC_S_EXT_CTRLS = _IOWR('V', 72, struct v4l2_ext_controls)
public static final int VIDIOC_TRY_EXT_CTRLS = _IOWR('V', 73, struct v4l2_ext_controls)
public static final int VIDIOC_ENUM_FRAMESIZES = _IOWR('V', 74, struct v4l2_frmsizeenum)
public static final int VIDIOC_ENUM_FRAMEINTERVALS = _IOWR('V', 75, struct v4l2_frmivalenum)
public static final int VIDIOC_G_ENC_INDEX = _IOR('V', 76, struct v4l2_enc_idx)
public static final int VIDIOC_ENCODER_CMD = _IOWR('V', 77, struct v4l2_encoder_cmd)
public static final int VIDIOC_TRY_ENCODER_CMD = _IOWR('V', 78, struct v4l2_encoder_cmd)
// Experimental, meant for debugging, testing and internal use.
// Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
// You must be root to use these ioctls. Never use these in applications!
public static final int VIDIOC_DBG_S_REGISTER = _IOW('V', 79, struct v4l2_dbg_register)
public static final int VIDIOC_DBG_G_REGISTER = _IOWR('V', 80, struct v4l2_dbg_register)
public static final int VIDIOC_S_HW_FREQ_SEEK = _IOW('V', 82, struct v4l2_hw_freq_seek)
public static final int VIDIOC_S_DV_TIMINGS = _IOWR('V', 87, struct v4l2_dv_timings)
public static final int VIDIOC_G_DV_TIMINGS = _IOWR('V', 88, struct v4l2_dv_timings)
public static final int VIDIOC_DQEVENT = _IOR('V', 89, struct v4l2_event)
public static final int VIDIOC_SUBSCRIBE_EVENT = _IOW('V', 90, struct v4l2_event_subscription)
public static final int VIDIOC_UNSUBSCRIBE_EVENT = _IOW('V', 91, struct v4l2_event_subscription)
// Experimental, the below two ioctls may change over the next couple of kernel
// versions
public static final int VIDIOC_CREATE_BUFS = _IOWR('V', 92, struct v4l2_create_buffers)
public static final int VIDIOC_PREPARE_BUF = _IOWR('V', 93, struct v4l2_buffer)
// Experimental selection API
public static final int VIDIOC_G_SELECTION = _IOWR('V', 94, struct v4l2_selection)
public static final int VIDIOC_S_SELECTION = _IOWR('V', 95, struct v4l2_selection)
// Experimental, these two ioctls may change over the next couple of kernel
// versions.
public static final int VIDIOC_DECODER_CMD = _IOWR('V', 96, struct v4l2_decoder_cmd)
public static final int VIDIOC_TRY_DECODER_CMD = _IOWR('V', 97, struct v4l2_decoder_cmd)
// Experimental, these three ioctls may change over the next couple of kernel
// versions.
public static final int VIDIOC_ENUM_DV_TIMINGS = _IOWR('V', 98, struct v4l2_enum_dv_timings)
public static final int VIDIOC_QUERY_DV_TIMINGS = _IOR('V', 99, struct v4l2_dv_timings)
public static final int VIDIOC_DV_TIMINGS_CAP = _IOWR('V', 100, struct v4l2_dv_timings_cap)
// Experimental, this ioctl may change over the next couple of kernel
// versions.
public static final int VIDIOC_ENUM_FREQ_BANDS = _IOWR('V', 101, struct v4l2_frequency_band)
// Experimental, meant for debugging, testing and internal use.
// Never use these in applications!
public static final int VIDIOC_DBG_G_CHIP_INFO = _IOWR('V', 102, struct v4l2_dbg_chip_info)
public static final int VIDIOC_QUERY_EXT_CTRL = _IOWR('V', 103, struct v4l2_query_ext_ctrl)
// Reminder: when adding new ioctls please add support for them to
// drivers/media/video/v4l2-compat-ioctl32.c as well!
public static final int BASE_VIDIOC_PRIVATE 192 // 192-255 are private
*/
}
| |
package ml.alternet.discover;
import javax.inject.Inject;
import org.assertj.core.api.Assertions;
import org.jboss.weld.environment.se.Weld;
import org.jboss.weld.environment.se.WeldContainer;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import ml.alternet.discover.Injection;
@LookupKey(forClass = LookupKeyProducerTest.H.class, implClass = LookupKeyProducerTest.H2.class)
@Injection.Producer(forClass = LookupKeyProducerTest.H.class)
public class LookupKeyProducerTest {
Weld weld;
WeldContainer container;
@BeforeTest
public void startCDI() {
weld = new Weld();
container = weld.initialize();
}
@AfterTest
public void stopCDI() {
weld.shutdown();
}
// ====================================
public static class A {
B b;
@Inject
public A(@Injection.LookupKey(variant = "var2") B b) {
this.b = b;
}
}
public static interface B {
}
@LookupKey(forClass = B.class, variant = "var1")
@Injection.Producer(forClass = B.class, variant = "var1")
public static class B1 implements B {
}
@LookupKey(forClass = B.class, variant = "var2")
@Injection.Producer(forClass = B.class, variant = "var2")
public static class B2 implements B {
}
@Test
public void injectedInstanceWithVariant_ShouldBe_producedWithVariant() {
A a = container.instance().select(A.class).get();
// a.b injected by the generated producer
Assertions.assertThat(a.b).isInstanceOf(B2.class);
}
@Test
public void discoveryService_Should_createInstanceWithVariant() throws InstantiationException,
IllegalAccessException, ClassNotFoundException {
B b1 = DiscoveryService.lookupSingleton(B.class.getName() + "/var1");
Assertions.assertThat(b1).isInstanceOf(B1.class);
B b2 = DiscoveryService.lookupSingleton(B.class.getName() + "/var2");
Assertions.assertThat(b2).isInstanceOf(B2.class);
}
// ====================================
public static class C {
D d;
@Inject
public C(@Injection.LookupKey D d) {
this.d = d;
}
}
public static interface D {
}
@LookupKey(forClass = D.class, variant = "var1")
public static class D1 implements D {
}
@LookupKey(forClass = D.class, variant = "var2")
@Injection.Producer(forClass = D.class, lookupVariant = "var2")
public static class D2 implements D {
}
@Test
public void injectedInstanceWithoutVariant_ShouldBe_producedWithVariant() {
C c = container.instance().select(C.class).get();
// c.d injected by the generated producer
Assertions.assertThat(c.d).isInstanceOf(D2.class);
}
// ====================================
public static class E {
F f;
@Inject
public E(@Injection.LookupKey F f) {
this.f = f;
}
}
public static interface F {
}
@LookupKey(forClass = F.class)
@Injection.Producer(forClass = F.class)
public static class F1 implements F {
}
public static class F2 implements F {
}
@Test
public void injectedInstanceWithoutVariant_ShouldBe_producedWithoutVariant() {
E e = container.instance().select(E.class).get();
// e.f injected by the generated producer
Assertions.assertThat(e.f).isInstanceOf(F1.class);
}
@Test
public void discoveryService_Should_createInstanceWithoutVariant() throws InstantiationException,
IllegalAccessException, ClassNotFoundException {
F f = DiscoveryService.lookupSingleton(F.class);
Assertions.assertThat(f).isInstanceOf(F1.class);
}
// ====================================
public static class G {
H h;
@Inject
public G(@Injection.LookupKey H h) {
this.h = h;
}
}
public static interface H {
}
public static class H1 implements H {
}
public static class H2 implements H {
}
@Test
public void injectedInstance_ShouldBe_produced() {
G g = container.instance().select(G.class).get();
// g.h injected by the generated producer
Assertions.assertThat(g.h).isInstanceOf(H2.class);
}
@Test
public void discoveryService_Should_findKeyDefinedOnADifferentClass() throws InstantiationException,
IllegalAccessException, ClassNotFoundException {
H h = DiscoveryService.lookupSingleton(H.class);
Assertions.assertThat(h).isInstanceOf(H2.class);
}
// ====================================
public static class I {
J j;
@Inject
public I(@Injection.LookupKey(variant = "var2") J j) {
this.j = j;
}
}
public static class K {
J j;
@Inject
public K(@Injection.LookupKey(variant = "var1") J j) {
this.j = j;
}
}
@Injection.Producer(forClass = J.class, variant = "var1")
@Injection.Producer(forClass = J.class, variant = "var2")
@LookupKey(forClass = J.class, implClass=J1.class, variant = "var1")
@LookupKey(forClass = J.class, implClass=J2.class, variant = "var2")
public static interface J {
}
public static class J1 implements J {
}
public static class J2 implements J {
}
@Test
public void multipleAnnotation_Should_generateEntries() throws InstantiationException, IllegalAccessException, ClassNotFoundException {
I i = container.instance().select(I.class).get();
// i.j injected by the generated producer
Assertions.assertThat(i.j).isInstanceOf(J2.class);
K k = container.instance().select(K.class).get();
// k.j injected by the generated producer
Assertions.assertThat(k.j).isInstanceOf(J1.class);
J j1 = DiscoveryService.lookupSingleton(J.class.getCanonicalName() + "/var1");
Assertions.assertThat(j1).isInstanceOf(J1.class);
J j2 = DiscoveryService.lookupSingleton(J.class.getCanonicalName() + "/var2");
Assertions.assertThat(j2).isInstanceOf(J2.class);
}
}
| |
package com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service;
import com.bitdubai.fermat_api.layer.all_definition.crypto.asymmetric.AsymmectricCryptography;
import com.bitdubai.fermat_api.layer.all_definition.crypto.asymmetric.ECCKeyPair;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.exceptions.CantInsertRecordDatabaseException;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.exceptions.CantReadRecordDatabaseException;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.exceptions.CantUpdateRecordDatabaseException;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.database.IncomingMessageDAO;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.database.OutgoingMessageDAO;
import com.bitdubai.fermat_ccp_plugin.layer.network_service.crypto_addresses.developer.bitdubai.version_1.network_service.database.CryptoAddressesNetworkServiceDatabaseConstants;
import com.bitdubai.fermat_p2p_api.layer.p2p_communication.CantSendMessageException;
import com.bitdubai.fermat_p2p_api.layer.p2p_communication.ConnectionStatus;
import com.bitdubai.fermat_p2p_api.layer.p2p_communication.Message;
import com.bitdubai.fermat_p2p_api.layer.p2p_communication.MessagesStatus;
import com.bitdubai.fermat_p2p_api.layer.p2p_communication.ServiceToServiceOnlineConnection;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity;
import java.util.List;
import java.util.Observable;
/**
* Created by Leon Acosta - (laion.cj91@gmail.com) on 15/09/15.
*
* @version 1.0
* @since Java JDK 1.7
*/
public class CryptoAddressesNetworkServiceRemoteAgent extends Observable {
/*
* Represent the sleep time for the read or send (2000 milliseconds)
*/
private static final long SLEEP_TIME = 2000;
/**
* Represent the serviceToServiceOnlineConnection
*/
private ServiceToServiceOnlineConnection serviceToServiceOnlineConnection;
/**
* DealsWithErrors Interface member variables.
*/
private ErrorManager errorManager;
/**
* Represent the incomingMessageDao
*/
private IncomingMessageDAO incomingMessageDao;
/**
* Represent the outgoingMessageDao
*/
private OutgoingMessageDAO outgoingMessageDao;
/**
* Represent is the tread is running
*/
private Boolean running;
/**
* Represent the read messages tread of this TemplateNetworkServiceRemoteAgent
*/
private Thread toReceive;
/**
* Represent the send messages tread of this TemplateNetworkServiceRemoteAgent
*/
private Thread toSend;
/**
* Represent the eccKeyPair
*/
private ECCKeyPair eccKeyPair;
/**
* Represent the public key of the remote network service
*/
private String remoteNetworkServicePublicKey;
/**
* Constructor with parameters
*
* @param eccKeyPair from the plugin root
* @param remoteNetworkServicePublicKey the public key
* @param serviceToServiceOnlineConnection the serviceToServiceOnlineConnection instance
* @param errorManager instance
* @param incomingMessageDao instance
* @param outgoingMessageDao instance
*/
public CryptoAddressesNetworkServiceRemoteAgent(ECCKeyPair eccKeyPair, String remoteNetworkServicePublicKey, ServiceToServiceOnlineConnection serviceToServiceOnlineConnection, ErrorManager errorManager, IncomingMessageDAO incomingMessageDao, OutgoingMessageDAO outgoingMessageDao) {
super();
this.eccKeyPair = eccKeyPair;
this.remoteNetworkServicePublicKey = remoteNetworkServicePublicKey;
this.serviceToServiceOnlineConnection = serviceToServiceOnlineConnection;
this.errorManager = errorManager;
this.running = Boolean.FALSE;
this.incomingMessageDao = incomingMessageDao;
this.outgoingMessageDao = outgoingMessageDao;
//Create a thread to receive the messages
this.toReceive = new Thread(new Runnable() {
@Override
public void run() {
while (running)
processMessageReceived();
}
});
//Create a thread to send the messages
this.toSend = new Thread(new Runnable() {
@Override
public void run() {
while (running)
processMessageToSend();
}
});
}
/**
* Start the internal threads to make the job
*/
public void start() {
//Set to running
this.running = Boolean.TRUE;
//Start the Threads
toReceive.start();
toSend.start();
}
/**
* Pause the internal threads
*/
public void pause() {
this.running = Boolean.FALSE;
}
/**
* Resume the internal threads
*/
public void resume() {
this.running = Boolean.TRUE;
}
/**
* Stop the internal threads
*/
public void stop() {
//Stop the Threads
toReceive.interrupt();
toSend.interrupt();
//Disconnect from the service
serviceToServiceOnlineConnection.disconnect();
}
/**
* This method process the message received and save on the
* data base in the table <code>incoming_messages</code> and notify all observers
* to the new messages received
*/
private void processMessageReceived() {
try {
/**
* Verified the status of the connection
*/
if (serviceToServiceOnlineConnection.getStatus() == ConnectionStatus.CONNECTED) {
/**
* process all pending messages
*/
for (int i = 0; i < serviceToServiceOnlineConnection.getUnreadMessagesCount(); i++) {
/*
* Read the next message in the queue
*/
Message message = serviceToServiceOnlineConnection.readNextMessage();
/*
* Cast the message to IncomingTemplateNetworkServiceMessage
*/
CryptoAddressesNetworkServiceMessage incomingTemplateNetworkServiceMessage = (CryptoAddressesNetworkServiceMessage) message;
/*
* Validate the message signature
*/
AsymmectricCryptography.verifyMessageSignature(incomingTemplateNetworkServiceMessage.getSignature(), incomingTemplateNetworkServiceMessage.getTextContent(), remoteNetworkServicePublicKey);
/*
* Decrypt the message content
*/
incomingTemplateNetworkServiceMessage.setTextContent(AsymmectricCryptography.decryptMessagePrivateKey(incomingTemplateNetworkServiceMessage.getTextContent(), eccKeyPair.getPrivateKey()));
/*
* Change to the new status
*/
incomingTemplateNetworkServiceMessage.setStatus(MessagesStatus.NEW_RECEIVED);
/*
* Save to the data base table
*/
incomingMessageDao.create(incomingTemplateNetworkServiceMessage);
/*
* Remove the message from the queue
*/
serviceToServiceOnlineConnection.clearMessage(message);
/**
* Notify all observer of this agent that Received a new message
*/
notifyObservers(message);
}
}
//Sleep for a time
toReceive.sleep(this.SLEEP_TIME);
} catch (InterruptedException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_TEMPLATE_NETWORK_SERVICE, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, new Exception("Can not sleep"));
} catch (CantInsertRecordDatabaseException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_TEMPLATE_NETWORK_SERVICE, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, new Exception("Can not process message received. Error reason: " + e.getMessage()));
}
}
/**
* This method read for new messages pending to send on the data base in
* the table <code>outbox_messages</code> and encrypt the message content,
* sing the message and send it
*/
public void processMessageToSend() {
try {
if (serviceToServiceOnlineConnection.getStatus() == ConnectionStatus.CONNECTED) {
try {
/*
* Read all pending message from database
*/
List<CryptoAddressesNetworkServiceMessage> messages = outgoingMessageDao.findAll(CryptoAddressesNetworkServiceDatabaseConstants.OUTGOING_MESSAGES_TABLE_STATUS_COLUMN_NAME,
MessagesStatus.PENDING_TO_SEND.getCode());
/*
* For each message
*/
for (CryptoAddressesNetworkServiceMessage message : messages) {
/*
* Encrypt the content of the message whit the remote public key
*/
message.setTextContent(AsymmectricCryptography.encryptMessagePublicKey(message.getTextContent(), remoteNetworkServicePublicKey));
/*
* Sing the message
*/
AsymmectricCryptography.createMessageSignature(message.getTextContent(), eccKeyPair.getPrivateKey());
/*
* Send the message
*/
serviceToServiceOnlineConnection.sendMessage(message);
/*
* Change the message and update in the data base
*/
message.setStatus(MessagesStatus.SENT);
outgoingMessageDao.update(message);
}
} catch (CantSendMessageException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_TEMPLATE_NETWORK_SERVICE, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, new Exception("Can not send message to remote network service "));
} catch (CantUpdateRecordDatabaseException | CantReadRecordDatabaseException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_TEMPLATE_NETWORK_SERVICE, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, new Exception("Can not process messages to send. Error reason: " + e.getMessage()));
}
}
//Sleep for a time
toSend.sleep(this.SLEEP_TIME);
} catch (InterruptedException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_TEMPLATE_NETWORK_SERVICE, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, new Exception("Can not sleep"));
}
}
}
| |
/* Copyright 2017 Alfa Financial Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.alfasoftware.morf.jdbc;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.alfasoftware.morf.metadata.Column;
import org.alfasoftware.morf.metadata.DataType;
import org.alfasoftware.morf.metadata.Schema;
import org.alfasoftware.morf.metadata.Table;
import org.alfasoftware.morf.sql.InsertStatement;
import org.alfasoftware.morf.sql.element.AliasedField;
import org.alfasoftware.morf.sql.element.AliasedFieldBuilder;
import org.alfasoftware.morf.sql.element.FieldLiteral;
import org.alfasoftware.morf.sql.element.NullFieldLiteral;
import org.alfasoftware.morf.sql.element.TableReference;
import org.apache.commons.lang.StringUtils;
import com.google.common.collect.Lists;
/**
* Adds field defaults for columns that are missing from an insert statement.
*
* @author Copyright (c) Alfa Financial Software 2011
*/
public class InsertStatementDefaulter {
/**
* The database schema.
*/
private final Schema metadata;
/**
* Constructor.
*
* @param metadata the database schema.
*/
protected InsertStatementDefaulter(Schema metadata) {
super();
this.metadata = metadata;
if (metadata == null) {
throw new IllegalArgumentException("MetaData must be provided");
}
}
/**
* Inserts default values for missing fields into the insert statement.
*
* @param statement the {@link InsertStatement} to add defaults for.
* @return an insert statement with appropriate defaults added.
*/
public InsertStatement defaultMissingFields(InsertStatement statement) {
// Don't fiddle with parameterised statements
if (statement.isParameterisedInsert()) {
return statement;
}
Set<String> columnsWithValues = getColumnsWithValues(statement);
return addColumnDefaults(statement, columnsWithValues);
}
/**
* Gets a set of columns for which values have been provided.
*
* @param statement the statement to parse.
* @return a set of columns for which values have been provided.
*/
private Set<String> getColumnsWithValues(InsertStatement statement) {
Set<String> columnsWithValues = new HashSet<>();
addColumns(statement.getValues(), columnsWithValues);
if (statement.getSelectStatement() != null) {
addColumns(statement.getSelectStatement().getFields(), columnsWithValues);
}
if (statement.getFromTable() != null) {
addColumnsFromSchema(statement.getFromTable(), columnsWithValues);
}
for (String columnName : statement.getFieldDefaults().keySet()) {
columnsWithValues.add(columnName.toUpperCase());
}
return columnsWithValues;
}
/**
* Adds the list of {@code fields} to the {@code columnsWithValues}.
*
* @param fields the fields to add.
* @param columnsWithValues the set to add to.
*/
private void addColumns(List<AliasedField> fields, Set<String> columnsWithValues) {
for (AliasedField field : fields) {
columnsWithValues.add(field.getAlias().toUpperCase());
}
}
/**
* Adds table columns from the schema.
*
* @param tableReference the table reference to add for.
* @param columnsWithValues the set to add to.
*/
private void addColumnsFromSchema(TableReference tableReference, Set<String> columnsWithValues) {
Table table = metadata.getTable(tableReference.getName().toUpperCase());
if (table == null) {
throw new IllegalArgumentException("Could not find table in schema for: " + tableReference.getName());
}
for (Column column : table.columns()) {
columnsWithValues.add(column.getName().toUpperCase());
}
}
/**
* Adds the column defaults for missing columns to the {@code statement}.
*
* @param statement the statement to add to.
* @param columnsWithValues the columns for which we have values.
*/
private InsertStatement addColumnDefaults(InsertStatement statement, Set<String> columnsWithValues) {
Table table = metadata.getTable(statement.getTable().getName().toUpperCase());
if (table == null) {
throw new IllegalArgumentException("Could not find table in schema for: " + statement.getTable().getName());
}
List<AliasedFieldBuilder> aliasedFieldBuilders = Lists.newArrayList();
for (Column currentColumn : table.columns()) {
// Default date columns to null and skip columns we've already added.
if (columnsWithValues.contains(currentColumn.getName().toUpperCase())) {
continue;
}
AliasedField fieldDefault = getFieldDefault(currentColumn);
if (fieldDefault == null) {
continue;
}
if(AliasedField.immutableDslEnabled()) {
aliasedFieldBuilders.add(fieldDefault.as(currentColumn.getName()));
}
else {
statement.getFieldDefaults().put(currentColumn.getName(), fieldDefault);
}
}
if(AliasedField.immutableDslEnabled()) {
return statement.shallowCopy().withDefaults(aliasedFieldBuilders).build();
}
return statement;
}
/**
* Gets the default value for the {@code column}. If the column has a default
* value associated with it, this is returned. Otherwise a standard default is
* given.
*
* @param column The column to get the default for.
* @return the default value to use.
*/
private AliasedField getFieldDefault(Column column) {
if (isNullDefaultType(column)) {
return new NullFieldLiteral().as(column.getName());
}
if (StringUtils.isNotEmpty(column.getDefaultValue())) {
switch (column.getType()) {
case STRING:
return new FieldLiteral(column.getDefaultValue()).as(column.getName());
case BOOLEAN:
return new FieldLiteral(Boolean.valueOf(column.getDefaultValue())).as(column.getName());
case BIG_INTEGER:
case INTEGER:
return new FieldLiteral(Integer.valueOf(column.getDefaultValue())).as(column.getName());
case DECIMAL:
return new FieldLiteral(Double.valueOf(column.getDefaultValue())).as(column.getName());
default:
throw new UnsupportedOperationException("Cannot determine the default value for data of type " + column.getType());
}
} else {
switch (column.getType()) {
case STRING:
return new FieldLiteral("").as(column.getName());
case BOOLEAN:
return new FieldLiteral(false).as(column.getName());
case DECIMAL:
case INTEGER:
case BIG_INTEGER:
return new FieldLiteral(0).as(column.getName());
default:
throw new UnsupportedOperationException("Cannot determine the default value for data of type " + column.getType());
}
}
}
/**
* Determines whether the column is a type that will always be defaulted to
* 'null'. i.e. dates, BLOBs and CLOBs.
*
* @param column the column to check for.
* @return true if the column should default to null.
*/
private boolean isNullDefaultType(Column column) {
return column.getType() == DataType.DATE || column.getType() == DataType.BLOB || column.getType() == DataType.CLOB;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.query.netty.message;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.UnpooledByteBufAllocator;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.query.KvStateID;
import org.apache.flink.runtime.query.TaskKvStateRegistry;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.runtime.state.heap.HeapKeyedStateBackend;
import org.apache.flink.runtime.state.internal.InternalKvState;
import org.apache.flink.runtime.state.internal.InternalListState;
import org.apache.flink.runtime.state.internal.InternalMapState;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
@RunWith(Parameterized.class)
public class KvStateRequestSerializerTest {
private final ByteBufAllocator alloc = UnpooledByteBufAllocator.DEFAULT;
@Parameterized.Parameters
public static Collection<Boolean> parameters() {
return Arrays.asList(false, true);
}
@Parameterized.Parameter
public boolean async;
/**
* Tests KvState request serialization.
*/
@Test
public void testKvStateRequestSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 1337L;
KvStateID kvStateId = new KvStateID();
byte[] serializedKeyAndNamespace = randomByteArray(1024);
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequest(
alloc,
requestId,
kvStateId,
serializedKeyAndNamespace);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequest request = KvStateRequestSerializer.deserializeKvStateRequest(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertEquals(kvStateId, request.getKvStateId());
assertArrayEquals(serializedKeyAndNamespace, request.getSerializedKeyAndNamespace());
}
/**
* Tests KvState request serialization with zero-length serialized key and namespace.
*/
@Test
public void testKvStateRequestSerializationWithZeroLengthKeyAndNamespace() throws Exception {
byte[] serializedKeyAndNamespace = new byte[0];
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequest(
alloc,
1823,
new KvStateID(),
serializedKeyAndNamespace);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequest request = KvStateRequestSerializer.deserializeKvStateRequest(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertArrayEquals(serializedKeyAndNamespace, request.getSerializedKeyAndNamespace());
}
/**
* Tests that we don't try to be smart about <code>null</code> key and namespace.
* They should be treated explicitly.
*/
@Test(expected = NullPointerException.class)
public void testNullPointerExceptionOnNullSerializedKeyAndNamepsace() throws Exception {
new KvStateRequest(0, new KvStateID(), null);
}
/**
* Tests KvState request result serialization.
*/
@Test
public void testKvStateRequestResultSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 72727278L;
byte[] serializedResult = randomByteArray(1024);
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestResult(
alloc,
requestId,
serializedResult);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_RESULT, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestResult request = KvStateRequestSerializer.deserializeKvStateRequestResult(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertArrayEquals(serializedResult, request.getSerializedResult());
}
/**
* Tests KvState request result serialization with zero-length serialized result.
*/
@Test
public void testKvStateRequestResultSerializationWithZeroLengthSerializedResult() throws Exception {
byte[] serializedResult = new byte[0];
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestResult(
alloc,
72727278,
serializedResult);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_RESULT, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestResult request = KvStateRequestSerializer.deserializeKvStateRequestResult(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertArrayEquals(serializedResult, request.getSerializedResult());
}
/**
* Tests that we don't try to be smart about <code>null</code> results.
* They should be treated explicitly.
*/
@Test(expected = NullPointerException.class)
public void testNullPointerExceptionOnNullSerializedResult() throws Exception {
new KvStateRequestResult(0, null);
}
/**
* Tests KvState request failure serialization.
*/
@Test
public void testKvStateRequestFailureSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 1111222L;
IllegalStateException cause = new IllegalStateException("Expected test");
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestFailure(
alloc,
requestId,
cause);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_FAILURE, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestFailure request = KvStateRequestSerializer.deserializeKvStateRequestFailure(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertEquals(cause.getClass(), request.getCause().getClass());
assertEquals(cause.getMessage(), request.getCause().getMessage());
}
/**
* Tests KvState server failure serialization.
*/
@Test
public void testServerFailureSerialization() throws Exception {
IllegalStateException cause = new IllegalStateException("Expected test");
ByteBuf buf = KvStateRequestSerializer.serializeServerFailure(alloc, cause);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.SERVER_FAILURE, KvStateRequestSerializer.deserializeHeader(buf));
Throwable request = KvStateRequestSerializer.deserializeServerFailure(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(cause.getClass(), request.getClass());
assertEquals(cause.getMessage(), request.getMessage());
}
/**
* Tests key and namespace serialization utils.
*/
@Test
public void testKeyAndNamespaceSerialization() throws Exception {
TypeSerializer<Long> keySerializer = LongSerializer.INSTANCE;
TypeSerializer<String> namespaceSerializer = StringSerializer.INSTANCE;
long expectedKey = Integer.MAX_VALUE + 12323L;
String expectedNamespace = "knilf";
byte[] serializedKeyAndNamespace = KvStateRequestSerializer.serializeKeyAndNamespace(
expectedKey, keySerializer, expectedNamespace, namespaceSerializer);
Tuple2<Long, String> actual = KvStateRequestSerializer.deserializeKeyAndNamespace(
serializedKeyAndNamespace, keySerializer, namespaceSerializer);
assertEquals(expectedKey, actual.f0.longValue());
assertEquals(expectedNamespace, actual.f1);
}
/**
* Tests key and namespace deserialization utils with too few bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationEmpty() throws Exception {
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too few bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooShort() throws Exception {
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too many bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooMany1() throws Exception {
// Long + null String + 1 byte
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 42, 0, 2}, LongSerializer.INSTANCE,
StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too many bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooMany2() throws Exception {
// Long + null String + 2 bytes
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 42, 0, 2, 2}, LongSerializer.INSTANCE,
StringSerializer.INSTANCE);
}
/**
* Tests value serialization utils.
*/
@Test
public void testValueSerialization() throws Exception {
TypeSerializer<Long> valueSerializer = LongSerializer.INSTANCE;
long expectedValue = Long.MAX_VALUE - 1292929292L;
byte[] serializedValue = KvStateRequestSerializer.serializeValue(expectedValue, valueSerializer);
long actualValue = KvStateRequestSerializer.deserializeValue(serializedValue, valueSerializer);
assertEquals(expectedValue, actualValue);
}
/**
* Tests value deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueEmpty() throws Exception {
KvStateRequestSerializer.deserializeValue(new byte[] {}, LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooShort() throws Exception {
// 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeValue(new byte[] {1}, LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too many bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooMany1() throws Exception {
// Long + 1 byte
KvStateRequestSerializer.deserializeValue(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2},
LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too many bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooMany2() throws Exception {
// Long + 2 bytes
KvStateRequestSerializer.deserializeValue(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 2},
LongSerializer.INSTANCE);
}
/**
* Tests list serialization utils.
*/
@Test
public void testListSerialization() throws Exception {
final long key = 0L;
// objects for heap state list serialisation
final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend =
new HeapKeyedStateBackend<>(
mock(TaskKvStateRegistry.class),
LongSerializer.INSTANCE,
ClassLoader.getSystemClassLoader(),
1,
new KeyGroupRange(0, 0),
async,
new ExecutionConfig()
);
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalListState<VoidNamespace, Long> listState = longHeapKeyedStateBackend.createListState(
VoidNamespaceSerializer.INSTANCE,
new ListStateDescriptor<>("test", LongSerializer.INSTANCE));
testListSerialization(key, listState);
}
/**
* Verifies that the serialization of a list using the given list state
* matches the deserialization with {@link KvStateRequestSerializer#deserializeList}.
*
* @param key
* key of the list state
* @param listState
* list state using the {@link VoidNamespace}, must also be a {@link InternalKvState} instance
*
* @throws Exception
*/
public static void testListSerialization(
final long key,
final InternalListState<VoidNamespace, Long> listState) throws Exception {
TypeSerializer<Long> valueSerializer = LongSerializer.INSTANCE;
listState.setCurrentNamespace(VoidNamespace.INSTANCE);
// List
final int numElements = 10;
final List<Long> expectedValues = new ArrayList<>();
for (int i = 0; i < numElements; i++) {
final long value = ThreadLocalRandom.current().nextLong();
expectedValues.add(value);
listState.add(value);
}
final byte[] serializedKey =
KvStateRequestSerializer.serializeKeyAndNamespace(
key, LongSerializer.INSTANCE,
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE);
final byte[] serializedValues = listState.getSerializedValue(serializedKey);
List<Long> actualValues = KvStateRequestSerializer.deserializeList(serializedValues, valueSerializer);
assertEquals(expectedValues, actualValues);
// Single value
long expectedValue = ThreadLocalRandom.current().nextLong();
byte[] serializedValue = KvStateRequestSerializer.serializeValue(expectedValue, valueSerializer);
List<Long> actualValue = KvStateRequestSerializer.deserializeList(serializedValue, valueSerializer);
assertEquals(1, actualValue.size());
assertEquals(expectedValue, actualValue.get(0).longValue());
}
/**
* Tests list deserialization with too few bytes.
*/
@Test
public void testDeserializeListEmpty() throws Exception {
List<Long> actualValue = KvStateRequestSerializer
.deserializeList(new byte[] {}, LongSerializer.INSTANCE);
assertEquals(0, actualValue.size());
}
/**
* Tests list deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeListTooShort1() throws Exception {
// 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeList(new byte[] {1}, LongSerializer.INSTANCE);
}
/**
* Tests list deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeListTooShort2() throws Exception {
// Long + 1 byte (separator) + 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeList(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 3},
LongSerializer.INSTANCE);
}
/**
* Tests map serialization utils.
*/
@Test
public void testMapSerialization() throws Exception {
final long key = 0L;
// objects for heap state list serialisation
final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend =
new HeapKeyedStateBackend<>(
mock(TaskKvStateRegistry.class),
LongSerializer.INSTANCE,
ClassLoader.getSystemClassLoader(),
1,
new KeyGroupRange(0, 0),
async,
new ExecutionConfig()
);
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalMapState<VoidNamespace, Long, String> mapState = (InternalMapState<VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE));
testMapSerialization(key, mapState);
}
/**
* Verifies that the serialization of a map using the given map state
* matches the deserialization with {@link KvStateRequestSerializer#deserializeList}.
*
* @param key
* key of the map state
* @param mapState
* map state using the {@link VoidNamespace}, must also be a {@link InternalKvState} instance
*
* @throws Exception
*/
public static void testMapSerialization(
final long key,
final InternalMapState<VoidNamespace, Long, String> mapState) throws Exception {
TypeSerializer<Long> userKeySerializer = LongSerializer.INSTANCE;
TypeSerializer<String> userValueSerializer = StringSerializer.INSTANCE;
mapState.setCurrentNamespace(VoidNamespace.INSTANCE);
// Map
final int numElements = 10;
final Map<Long, String> expectedValues = new HashMap<>();
for (int i = 1; i <= numElements; i++) {
final long value = ThreadLocalRandom.current().nextLong();
expectedValues.put(value, Long.toString(value));
mapState.put(value, Long.toString(value));
}
expectedValues.put(0L, null);
mapState.put(0L, null);
final byte[] serializedKey =
KvStateRequestSerializer.serializeKeyAndNamespace(
key, LongSerializer.INSTANCE,
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE);
final byte[] serializedValues = mapState.getSerializedValue(serializedKey);
Map<Long, String> actualValues = KvStateRequestSerializer.deserializeMap(serializedValues, userKeySerializer, userValueSerializer);
assertEquals(expectedValues.size(), actualValues.size());
for (Map.Entry<Long, String> actualEntry : actualValues.entrySet()) {
assertEquals(expectedValues.get(actualEntry.getKey()), actualEntry.getValue());
}
// Single value
ByteArrayOutputStream baos = new ByteArrayOutputStream();
long expectedKey = ThreadLocalRandom.current().nextLong();
String expectedValue = Long.toString(expectedKey);
byte[] isNull = {0};
baos.write(KvStateRequestSerializer.serializeValue(expectedKey, userKeySerializer));
baos.write(isNull);
baos.write(KvStateRequestSerializer.serializeValue(expectedValue, userValueSerializer));
byte[] serializedValue = baos.toByteArray();
Map<Long, String> actualValue = KvStateRequestSerializer.deserializeMap(serializedValue, userKeySerializer, userValueSerializer);
assertEquals(1, actualValue.size());
assertEquals(expectedValue, actualValue.get(expectedKey));
}
/**
* Tests map deserialization with too few bytes.
*/
@Test
public void testDeserializeMapEmpty() throws Exception {
Map<Long, String> actualValue = KvStateRequestSerializer
.deserializeMap(new byte[] {}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
assertEquals(0, actualValue.size());
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort1() throws Exception {
// 1 byte (incomplete Key)
KvStateRequestSerializer.deserializeMap(new byte[] {1}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort2() throws Exception {
// Long (Key) + 1 byte (incomplete Value)
KvStateRequestSerializer.deserializeMap(new byte[]{1, 1, 1, 1, 1, 1, 1, 1, 0},
LongSerializer.INSTANCE, LongSerializer.INSTANCE);
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort3() throws Exception {
// Long (Key1) + Boolean (false) + Long (Value1) + 1 byte (incomplete Key2)
KvStateRequestSerializer.deserializeMap(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3},
LongSerializer.INSTANCE, LongSerializer.INSTANCE);
}
private byte[] randomByteArray(int capacity) {
byte[] bytes = new byte[capacity];
ThreadLocalRandom.current().nextBytes(bytes);
return bytes;
}
}
| |
/**
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.azure.storage.table;
import com.microsoft.azure.storage.LocationMode;
import com.microsoft.azure.storage.RetryNoRetry;
import com.microsoft.azure.storage.SecondaryTests;
import com.microsoft.azure.storage.StorageErrorCodeStrings;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.core.SR;
import com.microsoft.azure.storage.table.TableTestHelper.Class1;
import com.microsoft.azure.storage.table.TableTestHelper.Class2;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Random;
import java.util.UUID;
import com.microsoft.azure.storage.TestRunners.CloudTests;
import com.microsoft.azure.storage.TestRunners.DevFabricTests;
import com.microsoft.azure.storage.TestRunners.DevStoreTests;
import static org.junit.Assert.*;
@Category({ DevFabricTests.class, DevStoreTests.class, CloudTests.class })
public class TableBatchOperationTests {
private CloudTable table;
@Before
public void tableTestMethodSetUp() throws URISyntaxException, StorageException {
this.table = TableTestHelper.getRandomTableReference();
this.table.createIfNotExists();
}
@After
public void tableTestMethodTearDown() throws StorageException {
this.table.deleteIfExists();
}
@Test
public void testBatchAddAll() throws StorageException {
ArrayList<TableOperation> ops = allOpsList();
TableBatchOperation batch = new TableBatchOperation();
boolean added = batch.addAll(ops);
assertTrue(added);
ArrayList<TableResult> results = this.table.execute(batch, null, null);
assertEquals(8, results.size());
Iterator<TableResult> iter = results.iterator();
for (int i = 0; i < 7; i++) {
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, iter.next().getHttpStatusCode());
}
// test to make sure we can't now add a query with addAll()
ops.clear();
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
ops.add(TableOperation.retrieve(ref.partitionKey, ref.rowKey, ref.getClass()));
try {
batch.addAll(ops);
fail(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
catch (Exception e) {
assertEquals(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH, e.getMessage());
}
// test to make sure we can't now add an operation with a different partition key with addAll()
ops.clear();
ref.partitionKey = "jxscl_odata_different";
ops.add(TableOperation.insert(ref));
try {
batch.addAll(ops);
fail(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY);
}
catch (Exception e) {
assertEquals(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY, e.getMessage());
}
}
@Test
public void testBatchAddAllIndex() throws StorageException {
ArrayList<TableOperation> ops = allOpsList();
TableBatchOperation batch = new TableBatchOperation();
boolean added = batch.addAll(0, ops);
assertTrue(added);
ArrayList<TableResult> results = this.table.execute(batch, null, null);
assertEquals(8, results.size());
Iterator<TableResult> iter = results.iterator();
for (int i = 0; i < 7; i++) {
assertEquals(HttpURLConnection.HTTP_NO_CONTENT, iter.next().getHttpStatusCode());
}
// test to make sure we can't now add a query with addAll()
ops.clear();
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
ops.add(TableOperation.retrieve(ref.partitionKey, ref.rowKey, ref.getClass()));
try {
batch.addAll(0, ops);
fail(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
catch (Exception e) {
assertEquals(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH, e.getMessage());
}
// test to make sure we can't now add an operation with a different partition key with addAll()
ops.clear();
ref.partitionKey = "jxscl_odata_different";
ops.add(TableOperation.insert(ref));
try {
batch.addAll(0, ops);
fail(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY);
}
catch (Exception e) {
assertEquals(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY, e.getMessage());
}
}
@Test
public void testBatchAddAllWithRetrieveShouldThrow() throws StorageException {
ArrayList<TableOperation> ops = allOpsList();
TableBatchOperation batch = new TableBatchOperation();
// Insert entity to retrieve
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), null, null);
ops.add(TableOperation.retrieve(baseEntity.getPartitionKey(), baseEntity.getRowKey(), Class1.class));
try {
batch.addAll(ops);
fail(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
catch (IllegalArgumentException e) {
assertEquals(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH, e.getMessage());
}
}
@Test
@Category(SecondaryTests.class)
public void testBatchAddIndex() {
TableBatchOperation batch = new TableBatchOperation();
// add a retrieve
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
TableOperation queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(0, queryOp);
// remove the retrieve
batch.remove(0);
assertEquals(0, batch.size());
// should be able to add an entity with a different partition key
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata_2");
TableOperation op = TableOperation.insert(baseEntity);
batch.add(0, op);
// remove the insert
batch.remove(0);
assertEquals(0, batch.size());
// add a retrieve to the batch
ref = TableTestHelper.generateRandomEntity("jxscl_odata");
queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(0, queryOp);
}
@Test
public void testBatchRemoveAll() throws StorageException {
ArrayList<TableOperation> ops = allOpsList();
TableBatchOperation batch = new TableBatchOperation();
batch.addAll(ops);
assertTrue(batch.removeAll(ops));
assertEquals(0, batch.size());
// should be able to add an entity with a different partition key
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata_2");
batch.insert(baseEntity);
}
@Test
@Category(SecondaryTests.class)
public void testBatchRemoveRange() throws StorageException {
ArrayList<TableOperation> ops = allOpsList();
TableBatchOperation batch = new TableBatchOperation();
batch.addAll(ops);
batch.removeRange(0, ops.size());
assertEquals(0, batch.size());
// should be able to add an entity with a different partition key
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata_2");
batch.insert(baseEntity);
batch.removeRange(0, 1);
batch.addAll(ops);
batch.removeRange(0, ops.size() - 1);
// should be not be able to add an entity with a different partition key
baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata_2");
try {
batch.insert(baseEntity);
fail(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY);
}
catch (IllegalArgumentException e) {
assertEquals(SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY, e.getMessage());
}
batch.removeRange(0, 1);
// should be able to add a retrieve to the batch
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
TableOperation queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(queryOp);
}
@Test
@Category(SecondaryTests.class)
public void testBatchRemove() {
TableBatchOperation batch = new TableBatchOperation();
// add a retrieve
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
TableOperation queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(queryOp);
// remove the retrieve
batch.remove(queryOp);
assertEquals(0, batch.size());
// should be able to add an entity with a different partition key
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata_2");
TableOperation op = TableOperation.insert(baseEntity);
batch.add(op);
// remove the insert
batch.remove(op);
assertEquals(0, batch.size());
// should be able to add a retrieve to the batch
ref = TableTestHelper.generateRandomEntity("jxscl_odata");
queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(queryOp);
}
@Test
public void testBatchLockToPartitionKey() {
try {
TableBatchOperation batch = new TableBatchOperation();
batch.insert(TableTestHelper.generateRandomEntity("jxscl_odata"));
batch.insert(TableTestHelper.generateRandomEntity("jxscl_odata2"));
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), SR.OPS_IN_BATCH_MUST_HAVE_SAME_PARTITION_KEY);
}
}
@Test
public void testBatchRetrieveAndOneMoreOperationShouldThrow() {
Class1 ref2 = TableTestHelper.generateRandomEntity("jxscl_odata");
try {
TableBatchOperation batch = new TableBatchOperation();
batch.insert(TableTestHelper.generateRandomEntity("jxscl_odata"));
batch.retrieve(ref2.getPartitionKey(), ref2.getRowKey(), ref2.getClass());
fail();
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
try {
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve(ref2.getPartitionKey(), ref2.getRowKey(), ref2.getClass());
batch.insert(TableTestHelper.generateRandomEntity("jxscl_odata"));
fail();
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
}
@Test
public void testBatchAddNullShouldThrow() {
try {
TableBatchOperation batch = new TableBatchOperation();
batch.add(null);
fail();
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), String.format(SR.ARGUMENT_NULL_OR_EMPTY, "element"));
}
}
@Test
public void testBatchRetrieveWithNullResolverShouldThrow() {
try {
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve("foo", "blah", (EntityResolver<?>) null);
fail();
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(),
String.format(SR.ARGUMENT_NULL_OR_EMPTY, SR.QUERY_REQUIRES_VALID_CLASSTYPE_OR_RESOLVER));
}
}
@Test
public void testEmptyBatchShouldThrow() throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
try {
this.table.execute(batch, null, null);
fail(SR.EMPTY_BATCH_NOT_ALLOWED);
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), SR.EMPTY_BATCH_NOT_ALLOWED);
}
}
@Test
public void testBatchMultiQueryShouldThrow() {
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
Class1 ref2 = TableTestHelper.generateRandomEntity("jxscl_odata");
try {
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.retrieve(ref2.getPartitionKey(), ref2.getRowKey(), ref2.getClass());
fail(SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
catch (IllegalArgumentException ex) {
assertEquals(ex.getMessage(), SR.RETRIEVE_MUST_BE_ONLY_OPERATION_IN_BATCH);
}
}
@Test
// don't need the category secondary as the request will fail before being sent
public void testBatchSecondaryWriteShouldThrow() {
// create batch with an insert
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
TableOperation op = TableOperation.insert(baseEntity);
TableBatchOperation batch = new TableBatchOperation();
batch.add(op);
// should not be able to make a request to secondary as there are writes
try {
TableRequestOptions options = new TableRequestOptions();
options.setLocationMode(LocationMode.SECONDARY_ONLY);
options.setRetryPolicyFactory(new RetryNoRetry());
this.table.execute(batch, options, null);
fail("Should not be able to make a request to secondary as there are writes.");
}
catch (StorageException e) {
assertEquals(IllegalArgumentException.class, e.getCause().getClass());
assertEquals(SR.PRIMARY_ONLY_COMMAND, e.getCause().getMessage());
}
}
@Test
@Category(SecondaryTests.class)
public void testBatchSecondaryNoWrite() throws StorageException {
// create and insert an entity
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(ref));
// create a batch and add a query for this entity
TableBatchOperation batch = new TableBatchOperation();
TableOperation queryOp = TableOperation.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
batch.add(queryOp);
// should be able to make a request to secondary as there are no writes
TableRequestOptions options = new TableRequestOptions();
options.setLocationMode(LocationMode.SECONDARY_ONLY);
options.setRetryPolicyFactory(new RetryNoRetry());
this.table.execute(batch, options, null);
}
@Test
public void testBatchOver100Entities() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
try {
for (int m = 0; m < 101; m++) {
batch.insert(TableTestHelper.generateRandomEntity("jxscl_odata"));
}
this.table.execute(batch, options, null);
fail("Batch with over 100 entities should fail.");
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Bad Request");
String errorAfterSemiColon = ex.getExtendedErrorInformation().getErrorMessage();
errorAfterSemiColon = errorAfterSemiColon.substring(errorAfterSemiColon.indexOf(":") + 1);
assertTrue(errorAfterSemiColon.startsWith("The batch request operation exceeds the maximum 100 changes per change set."));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.INVALID_INPUT);
}
}
@Test
public void testBatchInsertEntityOver1MB() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
Class1 bigEnt = new Class1();
bigEnt.setA("foo_A");
bigEnt.setB("foo_B");
bigEnt.setC("foo_C");
// 1mb right here
bigEnt.setD(new byte[1024 * 1024]);
bigEnt.setPartitionKey("jxscl_odata");
bigEnt.setRowKey(UUID.randomUUID().toString());
batch.insert(bigEnt);
for (int m = 0; m < 3; m++) {
Class1 ref = new Class1();
ref.setA("foo_A");
ref.setB("foo_B");
ref.setC("foo_C");
ref.setPartitionKey("jxscl_odata");
ref.setRowKey(UUID.randomUUID().toString());
batch.insert(ref);
}
try {
this.table.execute(batch, options, null);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Bad Request");
String errorAfterSemiColon = ex.getExtendedErrorInformation().getErrorMessage();
errorAfterSemiColon = errorAfterSemiColon.substring(errorAfterSemiColon.indexOf(":") + 1);
assertTrue(errorAfterSemiColon.startsWith("The entity is larger than the maximum allowed size (1MB)."));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.ENTITY_TOO_LARGE);
}
}
@Test
public void testBatchInsertEntityWithPropertyMoreThan255chars() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
DynamicTableEntity bigEnt = new DynamicTableEntity();
String propName = "";
for (int m = 0; m < 256; m++) {
propName = propName.concat("a");
}
bigEnt.getProperties().put(propName, new EntityProperty("test"));
bigEnt.setPartitionKey("jxscl_odata");
bigEnt.setRowKey(UUID.randomUUID().toString());
batch.insert(bigEnt);
for (int m = 0; m < 3; m++) {
Class1 ref = new Class1();
ref.setA("foo_A");
ref.setB("foo_B");
ref.setC("foo_C");
ref.setPartitionKey("jxscl_odata");
ref.setRowKey(UUID.randomUUID().toString());
batch.insert(ref);
}
try {
this.table.execute(batch, options, null);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Bad Request");
String errorAfterSemiColon = ex.getExtendedErrorInformation().getErrorMessage();
errorAfterSemiColon = errorAfterSemiColon.substring(errorAfterSemiColon.indexOf(":") + 1);
assertTrue(errorAfterSemiColon.startsWith("The property name exceeds the maximum allowed length (255)."));
assertEquals(ex.getErrorCode(), "PropertyNameTooLong");
}
}
@Test
public void testBatchSizeOver4mb() {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
byte[] datArr = new byte[1024 * 128];
Random rand = new Random();
rand.nextBytes(datArr);
// Each entity is approx 128kb, meaning ~32 entities will result in a request over 4mb.
try {
for (int m = 0; m < 32; m++) {
Class1 ref = new Class1();
ref.setA("foo_A");
ref.setB("foo_B");
ref.setC("foo_C");
ref.setD(datArr);
ref.setPartitionKey("jxscl_odata");
ref.setRowKey(UUID.randomUUID().toString());
batch.insert(ref);
}
this.table.execute(batch, options, null);
fail();
}
catch (StorageException ex) {
assertEquals(ex.getHttpStatusCode(), HttpURLConnection.HTTP_ENTITY_TOO_LARGE);
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.REQUEST_BODY_TOO_LARGE);
assertTrue(ex.getMessage().startsWith(
"The request body is too large and exceeds the maximum permissible limit."));
}
}
@Test
public void testBatchDeleteFail() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
// Insert entity to delete
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
Class1 updatedEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
updatedEntity.setPartitionKey(baseEntity.getPartitionKey());
updatedEntity.setRowKey(baseEntity.getRowKey());
updatedEntity.setEtag(baseEntity.getEtag());
this.table.execute(TableOperation.replace(updatedEntity), options, null);
// add delete to fail
batch.delete(baseEntity);
try {
this.table.execute(batch, options, null);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Precondition Failed");
assertTrue(ex.getExtendedErrorInformation().getErrorMessage()
.startsWith("The update condition specified in the request was not satisfied."));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.UPDATE_CONDITION_NOT_SATISFIED);
}
}
@Test
public void testBatchInsertFail() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
// insert entity
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(ref), options, null);
try {
TableBatchOperation batch = new TableBatchOperation();
batch.insert(ref);
this.table.execute(batch, options, null);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Conflict");
assertTrue(ex.getExtendedErrorInformation().getErrorMessage()
.startsWith("The specified entity already exists"));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.ENTITY_ALREADY_EXISTS);
}
}
@Test
public void testBatchReplaceFail() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
Class1 updatedEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
updatedEntity.setPartitionKey(baseEntity.getPartitionKey());
updatedEntity.setRowKey(baseEntity.getRowKey());
updatedEntity.setEtag(baseEntity.getEtag());
this.table.execute(TableOperation.replace(updatedEntity), options, null);
// add merge to fail
addReplaceToBatch(baseEntity, batch);
try {
this.table.execute(batch);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Precondition Failed");
assertTrue(ex.getExtendedErrorInformation().getErrorMessage()
.startsWith("The update condition specified in the request was not satisfied."));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.UPDATE_CONDITION_NOT_SATISFIED);
}
}
@Test
public void testBatchMergeFail() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
TableBatchOperation batch = new TableBatchOperation();
addInsertBatch(batch);
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
Class1 updatedEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
updatedEntity.setPartitionKey(baseEntity.getPartitionKey());
updatedEntity.setRowKey(baseEntity.getRowKey());
updatedEntity.setEtag(baseEntity.getEtag());
this.table.execute(TableOperation.replace(updatedEntity), options, null);
// add merge to fail
addMergeToBatch(baseEntity, batch);
try {
this.table.execute(batch, options, null);
fail();
}
catch (TableServiceException ex) {
assertEquals(ex.getMessage(), "Precondition Failed");
String errorAfterSemiColon = ex.getExtendedErrorInformation().getErrorMessage();
errorAfterSemiColon = errorAfterSemiColon.substring(errorAfterSemiColon.indexOf(":") + 1);
assertTrue(errorAfterSemiColon
.startsWith("The update condition specified in the request was not satisfied."));
assertEquals(ex.getErrorCode(), StorageErrorCodeStrings.UPDATE_CONDITION_NOT_SATISFIED);
}
}
@Test
public void testBatchEmptyQuery() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.Json);
// insert entity
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 1);
assertNull(results.get(0).getResult());
assertEquals(results.get(0).getHttpStatusCode(), HttpURLConnection.HTTP_NOT_FOUND);
}
@Test
public void testBatchWithAllOperations() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchWithAllOperations(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchWithAllOperations(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchWithAllOperations(options);
}
private void testBatchWithAllOperations(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
// insert
addInsertBatch(batch);
{
// insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef), options, null);
batch.delete(delRef);
}
{
// Insert entity to replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addReplaceToBatch(baseEntity, batch);
}
{
// Insert entity to insert or replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addInsertOrReplaceToBatch(baseEntity, batch);
}
{
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addMergeToBatch(baseEntity, batch);
}
{
// Insert entity to merge, no pre-existing entity
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addInsertOrMergeToBatch(baseEntity, batch);
}
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 6);
Iterator<TableResult> iter = results.iterator();
// insert
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// replace
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// insert or replace
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// merge
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// insert or merge
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testBatchDelete() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchDelete(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchDelete(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchDelete(options);
}
private void testBatchDelete(TableRequestOptions options) throws StorageException {
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
// insert entity
this.table.execute(TableOperation.insert(ref), options, null);
TableBatchOperation batch = new TableBatchOperation();
batch.delete(ref);
ArrayList<TableResult> delResults = this.table.execute(batch, options, null);
for (TableResult r : delResults) {
assertEquals(r.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
try {
this.table.execute(batch, options, null);
fail();
}
catch (StorageException ex) {
assertEquals(ex.getHttpStatusCode(), HttpURLConnection.HTTP_NOT_FOUND);
}
}
@Test
public void testBatchRetrieve() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchRetrieve(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchRetrieve(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchRetrieve(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
options.setPropertyResolver(new Class1());
testBatchRetrieve(options);
}
private void testBatchRetrieve(TableRequestOptions options) throws StorageException {
// insert entity
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(ref), options, null);
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve(ref.getPartitionKey(), ref.getRowKey(), ref.getClass());
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 1);
assertEquals(results.get(0).getHttpStatusCode(), HttpURLConnection.HTTP_OK);
Class1 retrievedRef = results.get(0).getResultAsType();
assertEquals(ref.getA(), retrievedRef.getA());
assertEquals(ref.getB(), retrievedRef.getB());
assertEquals(ref.getC(), retrievedRef.getC());
assertTrue(Arrays.equals(ref.getD(), retrievedRef.getD()));
this.table.execute(TableOperation.delete(ref), options, null);
}
@Test
public void tableBatchRetrieveWithEntityResolver() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
tableBatchRetrieveWithEntityResolver(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
tableBatchRetrieveWithEntityResolver(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
tableBatchRetrieveWithEntityResolver(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
options.setPropertyResolver(new Class1());
tableBatchRetrieveWithEntityResolver(options);
}
private void tableBatchRetrieveWithEntityResolver(TableRequestOptions options) throws StorageException {
// insert entity
Class1 randEnt = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(randEnt), options, null);
TableBatchOperation batch = new TableBatchOperation();
batch.retrieve(randEnt.getPartitionKey(), randEnt.getRowKey(), new EntityResolver<Class1>() {
@Override
public Class1 resolve(String partitionKey, String rowKey, Date timeStamp,
HashMap<String, EntityProperty> properties, String etag) {
assertEquals(properties.size(), 4);
Class1 ref = new Class1();
ref.setA(properties.get("A").getValueAsString());
ref.setB(properties.get("B").getValueAsString());
ref.setC(properties.get("C").getValueAsString());
ref.setD(properties.get("D").getValueAsByteArray());
return ref;
}
});
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 1);
Class1 ent = (Class1) results.get(0).getResult();
// Validate results
assertEquals(ent.getA(), randEnt.getA());
assertEquals(ent.getB(), randEnt.getB());
assertEquals(ent.getC(), randEnt.getC());
assertTrue(Arrays.equals(ent.getD(), randEnt.getD()));
}
@Test
public void testBatchInsert() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchInsert(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchInsert(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchInsert(options);
}
private void testBatchInsert(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
// Add 3 inserts
addInsertBatch(batch); // default echo content (true)
addInsertBatch(batch, true); // set echo content to true
addInsertBatch(batch, false); // set echo content to false
// insert entity
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(ref), options, null);
batch.delete(ref);
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 4);
Iterator<TableResult> iter = results.iterator();
TableResult res = iter.next();
assertEquals(res.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
res = iter.next();
assertEquals(res.getHttpStatusCode(), HttpURLConnection.HTTP_CREATED);
res = iter.next();
assertEquals(res.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testBatchMerge() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchMerge(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchMerge(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchMerge(options);
}
private void testBatchMerge(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
addInsertBatch(batch);
// insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef));
batch.delete(delRef);
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addMergeToBatch(baseEntity, batch);
ArrayList<TableResult> results = this.table.execute(batch);
assertEquals(results.size(), 3);
Iterator<TableResult> iter = results.iterator();
// insert
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// merge
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testBatchReplace() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchReplace(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchReplace(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchReplace(options);
}
private void testBatchReplace(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
addInsertBatch(batch);
// insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef), options, null);
batch.delete(delRef);
// Insert entity to replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addReplaceToBatch(baseEntity, batch);
ArrayList<TableResult> results = this.table.execute(batch);
assertEquals(results.size(), 3);
Iterator<TableResult> iter = results.iterator();
// insert
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// replace
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testBatchInsertOrMerge() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchInsertOrMerge(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchInsertOrMerge(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchInsertOrMerge(options);
}
private void testBatchInsertOrMerge(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
addInsertBatch(batch);
// insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef), options, null);
batch.delete(delRef);
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addInsertOrMergeToBatch(baseEntity, batch);
ArrayList<TableResult> results = this.table.execute(batch);
assertEquals(results.size(), 3);
Iterator<TableResult> iter = results.iterator();
// insert
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// merge
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testBatchInsertOrReplace() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
testBatchInsertOrReplace(options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
testBatchInsertOrReplace(options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
testBatchInsertOrReplace(options);
}
private void testBatchInsertOrReplace(TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
addInsertBatch(batch);
// insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef), options, null);
batch.delete(delRef);
// Insert entity to replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity), options, null);
addInsertOrReplaceToBatch(baseEntity, batch);
ArrayList<TableResult> results = this.table.execute(batch, options, null);
assertEquals(results.size(), 3);
Iterator<TableResult> iter = results.iterator();
// insert
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// delete
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
// replace
assertEquals(iter.next().getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
@Test
public void testInsertBatch1() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
insertAndDeleteBatchWithX(1, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
insertAndDeleteBatchWithX(1, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
insertAndDeleteBatchWithX(1, options);
}
@Test
public void testInsertBatch10() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
insertAndDeleteBatchWithX(10, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
insertAndDeleteBatchWithX(10, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
insertAndDeleteBatchWithX(10, options);
}
@Test
public void testInsertBatch100() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
insertAndDeleteBatchWithX(100, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
insertAndDeleteBatchWithX(100, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
insertAndDeleteBatchWithX(100, options);
}
@Test
public void testUpsertBatch1() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
upsertAndDeleteBatchWithX(1, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
upsertAndDeleteBatchWithX(1, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
upsertAndDeleteBatchWithX(1, options);
}
@Test
public void testUpsertBatch10() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
upsertAndDeleteBatchWithX(10, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
upsertAndDeleteBatchWithX(10, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
upsertAndDeleteBatchWithX(10, options);
}
@Test
public void testUpsertBatch100() throws StorageException {
TableRequestOptions options = new TableRequestOptions();
options.setTablePayloadFormat(TablePayloadFormat.JsonFullMetadata);
upsertAndDeleteBatchWithX(100, options);
options.setTablePayloadFormat(TablePayloadFormat.Json);
upsertAndDeleteBatchWithX(100, options);
options.setTablePayloadFormat(TablePayloadFormat.JsonNoMetadata);
upsertAndDeleteBatchWithX(100, options);
}
private Class1 addInsertBatch(TableBatchOperation batch) {
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
batch.insert(ref);
return ref;
}
private Class1 addInsertBatch(TableBatchOperation batch, boolean echoContent) {
Class1 ref = TableTestHelper.generateRandomEntity("jxscl_odata");
batch.insert(ref, echoContent);
return ref;
}
private Class2 addInsertOrMergeToBatch(Class1 baseEntity, TableBatchOperation batch) {
Class2 secondEntity = createEntityToReplaceOrMerge(baseEntity);
batch.insertOrMerge(secondEntity);
return secondEntity;
}
private Class2 addInsertOrReplaceToBatch(Class1 baseEntity, TableBatchOperation batch) {
Class2 secondEntity = createEntityToReplaceOrMerge(baseEntity);
batch.insertOrReplace(secondEntity);
return secondEntity;
}
private Class2 addMergeToBatch(Class1 baseEntity, TableBatchOperation batch) {
Class2 secondEntity = createEntityToReplaceOrMerge(baseEntity);
batch.merge(secondEntity);
return secondEntity;
}
private Class2 addReplaceToBatch(Class1 baseEntity, TableBatchOperation batch) {
Class2 secondEntity = createEntityToReplaceOrMerge(baseEntity);
batch.replace(secondEntity);
return secondEntity;
}
private void insertAndDeleteBatchWithX(int x, TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
for (int m = 0; m < x; m++) {
addInsertBatch(batch);
}
TableBatchOperation delBatch = new TableBatchOperation();
ArrayList<TableResult> results = this.table.execute(batch, options, null);
for (TableResult r : results) {
assertEquals(r.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
delBatch.delete((Class1) r.getResult());
}
ArrayList<TableResult> delResults = this.table.execute(delBatch, options, null);
for (TableResult r : delResults) {
assertEquals(r.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
}
private void upsertAndDeleteBatchWithX(int x, TableRequestOptions options) throws StorageException {
TableBatchOperation batch = new TableBatchOperation();
for (int m = 0; m < x; m++) {
addInsertOrMergeToBatch(TableTestHelper.generateRandomEntity("jxscl_odata"), batch);
}
TableBatchOperation delBatch = new TableBatchOperation();
ArrayList<TableResult> results = this.table.execute(batch, options, null);
for (TableResult r : results) {
assertEquals(r.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
delBatch.delete((Class2) r.getResult());
}
ArrayList<TableResult> delResults = this.table.execute(delBatch, options, null);
for (TableResult r : delResults) {
assertEquals(r.getHttpStatusCode(), HttpURLConnection.HTTP_NO_CONTENT);
}
}
private ArrayList<TableOperation> allOpsList() throws StorageException {
ArrayList<TableOperation> ops = new ArrayList<TableOperation>();
// insert
ops.add(TableOperation.insert(TableTestHelper.generateRandomEntity("jxscl_odata")));
{
// Insert entity to delete
Class1 delRef = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(delRef));
ops.add(TableOperation.delete(delRef));
}
{
// Insert entity to replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity));
ops.add(TableOperation.replace(createEntityToReplaceOrMerge(baseEntity)));
}
{
// Insert entity to insert or replace
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity));
ops.add(TableOperation.insertOrReplace(createEntityToReplaceOrMerge(baseEntity)));
}
{
// Insert or replace, no pre-existing entity
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
ops.add(TableOperation.insertOrReplace(createEntityToReplaceOrMerge(baseEntity)));
}
{
// Insert entity to merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity));
ops.add(TableOperation.merge(createEntityToReplaceOrMerge(baseEntity)));
}
{
// Insert entity to insert or merge
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
this.table.execute(TableOperation.insert(baseEntity));
ops.add(TableOperation.insertOrMerge(baseEntity));
}
{
// Insert or merge, no pre-existing entity
Class1 baseEntity = TableTestHelper.generateRandomEntity("jxscl_odata");
ops.add(TableOperation.insertOrMerge(baseEntity));
}
return ops;
}
private Class2 createEntityToReplaceOrMerge(Class1 baseEntity) {
Class2 secondEntity = new Class2();
secondEntity.setL("foo_L");
secondEntity.setM("foo_M");
secondEntity.setN("foo_N");
secondEntity.setO("foo_O");
secondEntity.setPartitionKey(baseEntity.getPartitionKey());
secondEntity.setRowKey(baseEntity.getRowKey());
secondEntity.setEtag(baseEntity.getEtag());
return secondEntity;
}
}
| |
package apple.uikit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.NSURL;
import apple.foundation.protocol.NSCopying;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class UIEventAttribution extends NSObject implements NSCopying {
static {
NatJ.register();
}
@Generated
protected UIEventAttribution(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native UIEventAttribution alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native UIEventAttribution allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Owned
@Selector("copyWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public native Object copyWithZone(VoidPtr zone);
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* The destination URL of an attribution. For example, the link opened when an advertisement is tapped.
* <p>
* This field corresponds to `attributed_on_site` in the subsequent attribution report.
*/
@Generated
@Selector("destinationURL")
public native NSURL destinationURL();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native UIEventAttribution init();
/**
* Create a `UIEventAttribution` object.
*
* @param sourceIdentifier An identifier associated with the attribution.
* @param destinationURL The destination URL of the attribution.
* @param sourceDescription A high-level description of the content to be attributed. Pass in an empty string if no description is available.
* @param purchaser The name of the party that purchased the content to be attributed. Pass in an empty string if no name is available.
* @return An instance of `UIEventAttribution` with the specified values for each field.
* <p>
* The `sourceDescription` and `purchaser` fields may be truncated by the system if they are too long.
*/
@Generated
@Selector("initWithSourceIdentifier:destinationURL:sourceDescription:purchaser:")
public native UIEventAttribution initWithSourceIdentifierDestinationURLSourceDescriptionPurchaser(
byte sourceIdentifier, NSURL destinationURL, String sourceDescription, String purchaser);
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native UIEventAttribution new_objc();
/**
* The name of the party that purchased the content to be attributed. For example, this could be the name of the party that purchased the placement of an advertisement.
* <p>
* The system may truncate this field if it is too long.
*/
@Generated
@Selector("purchaser")
public native String purchaser();
/**
* The URL to which the attribution report will be sent. You do not provide this field at creation time. Instead, you must define the URL
* as a string in your app's Info.plist under the `NSAdvertisingAttributionReportEndpoint` key.
* <p>
* This field corresponds to `source_site` in the subsequent attribution report.
*/
@Generated
@Selector("reportEndpoint")
public native NSURL reportEndpoint();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
/**
* A high-level description of the source of the attribution. For example, this could be a description of the content of an advertisement a user taps on.
* <p>
* The system may truncate this field if it is too long.
*/
@Generated
@Selector("sourceDescription")
public native String sourceDescription();
/**
* An identifier that is associated with the source of the attribution. For example, you may choose to use this as a campaign identifier to measure the effectiveness of different advertisement campaigns.
* <p>
* This field corresponds to `source_id` in the subsequent attribution report.
*/
@Generated
@Selector("sourceIdentifier")
public native byte sourceIdentifier();
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.prepare;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.model.ModelHandler;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeFactoryImpl;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.runtime.PredicateImpl;
import org.apache.calcite.schema.AggregateFunction;
import org.apache.calcite.schema.Function;
import org.apache.calcite.schema.FunctionParameter;
import org.apache.calcite.schema.ScalarFunction;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.TableFunction;
import org.apache.calcite.schema.TableMacro;
import org.apache.calcite.schema.Wrapper;
import org.apache.calcite.schema.impl.ScalarFunctionImpl;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlOperatorBinding;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.SqlSyntax;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.FamilyOperandTypeChecker;
import org.apache.calcite.sql.type.InferTypes;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlReturnTypeInference;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.ListSqlOperatorTable;
import org.apache.calcite.sql.validate.SqlMoniker;
import org.apache.calcite.sql.validate.SqlMonikerImpl;
import org.apache.calcite.sql.validate.SqlMonikerType;
import org.apache.calcite.sql.validate.SqlNameMatcher;
import org.apache.calcite.sql.validate.SqlNameMatchers;
import org.apache.calcite.sql.validate.SqlUserDefinedAggFunction;
import org.apache.calcite.sql.validate.SqlUserDefinedFunction;
import org.apache.calcite.sql.validate.SqlUserDefinedTableFunction;
import org.apache.calcite.sql.validate.SqlUserDefinedTableMacro;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.util.Util;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
/**
* Implementation of {@link org.apache.calcite.prepare.Prepare.CatalogReader}
* and also {@link org.apache.calcite.sql.SqlOperatorTable} based on tables and
* functions defined schemas.
*/
public class CalciteCatalogReader implements Prepare.CatalogReader {
protected final CalciteSchema rootSchema;
protected final RelDataTypeFactory typeFactory;
private final List<List<String>> schemaPaths;
protected final SqlNameMatcher nameMatcher;
protected final CalciteConnectionConfig config;
public CalciteCatalogReader(CalciteSchema rootSchema,
List<String> defaultSchema, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) {
this(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()),
ImmutableList.of(Preconditions.checkNotNull(defaultSchema),
ImmutableList.<String>of()),
typeFactory, config);
}
protected CalciteCatalogReader(CalciteSchema rootSchema,
SqlNameMatcher nameMatcher, List<List<String>> schemaPaths,
RelDataTypeFactory typeFactory, CalciteConnectionConfig config) {
this.rootSchema = Preconditions.checkNotNull(rootSchema);
this.nameMatcher = nameMatcher;
this.schemaPaths =
Util.immutableCopy(Util.isDistinct(schemaPaths)
? schemaPaths
: new LinkedHashSet<>(schemaPaths));
this.typeFactory = typeFactory;
this.config = config;
}
public CalciteCatalogReader withSchemaPath(List<String> schemaPath) {
return new CalciteCatalogReader(rootSchema, nameMatcher,
ImmutableList.of(schemaPath, ImmutableList.<String>of()), typeFactory, config);
}
public Prepare.PreparingTable getTable(final List<String> names) {
// First look in the default schema, if any.
// If not found, look in the root schema.
CalciteSchema.TableEntry entry = SqlValidatorUtil.getTableEntry(this, names);
if (entry != null) {
final Table table = entry.getTable();
if (table instanceof Wrapper) {
final Prepare.PreparingTable relOptTable =
((Wrapper) table).unwrap(Prepare.PreparingTable.class);
if (relOptTable != null) {
return relOptTable;
}
}
return RelOptTableImpl.create(this,
table.getRowType(typeFactory), entry, null);
}
return null;
}
@Override public CalciteConnectionConfig getConfig() {
return config;
}
private Collection<Function> getFunctionsFrom(List<String> names) {
final List<Function> functions2 = Lists.newArrayList();
final List<List<String>> schemaNameList = new ArrayList<>();
if (names.size() > 1) {
// Name qualified: ignore path. But we do look in "/catalog" and "/",
// the last 2 items in the path.
if (schemaPaths.size() > 1) {
schemaNameList.addAll(Util.skip(schemaPaths));
} else {
schemaNameList.addAll(schemaPaths);
}
} else {
for (List<String> schemaPath : schemaPaths) {
CalciteSchema schema =
SqlValidatorUtil.getSchema(rootSchema, schemaPath, nameMatcher);
if (schema != null) {
schemaNameList.addAll(schema.getPath());
}
}
}
for (List<String> schemaNames : schemaNameList) {
CalciteSchema schema =
SqlValidatorUtil.getSchema(rootSchema,
Iterables.concat(schemaNames, Util.skipLast(names)), nameMatcher);
if (schema != null) {
final String name = Util.last(names);
functions2.addAll(schema.getFunctions(name, true));
}
}
return functions2;
}
public RelDataType getNamedType(SqlIdentifier typeName) {
CalciteSchema.TypeEntry typeEntry = SqlValidatorUtil.getTypeEntry(getRootSchema(), typeName);
if (typeEntry != null) {
return typeEntry.getType().apply(typeFactory);
} else {
return null;
}
}
public List<SqlMoniker> getAllSchemaObjectNames(List<String> names) {
final CalciteSchema schema =
SqlValidatorUtil.getSchema(rootSchema, names, nameMatcher);
if (schema == null) {
return ImmutableList.of();
}
final List<SqlMoniker> result = new ArrayList<>();
// Add root schema if not anonymous
if (!schema.name.equals("")) {
result.add(moniker(schema, null, SqlMonikerType.SCHEMA));
}
final Map<String, CalciteSchema> schemaMap = schema.getSubSchemaMap();
for (String subSchema : schemaMap.keySet()) {
result.add(moniker(schema, subSchema, SqlMonikerType.SCHEMA));
}
for (String table : schema.getTableNames()) {
result.add(moniker(schema, table, SqlMonikerType.TABLE));
}
final NavigableSet<String> functions = schema.getFunctionNames();
for (String function : functions) { // views are here as well
result.add(moniker(schema, function, SqlMonikerType.FUNCTION));
}
return result;
}
private SqlMonikerImpl moniker(CalciteSchema schema, String name,
SqlMonikerType type) {
final List<String> path = schema.path(name);
if (path.size() == 1
&& !schema.root().name.equals("")
&& type == SqlMonikerType.SCHEMA) {
type = SqlMonikerType.CATALOG;
}
return new SqlMonikerImpl(path, type);
}
public List<List<String>> getSchemaPaths() {
return schemaPaths;
}
public Prepare.PreparingTable getTableForMember(List<String> names) {
return getTable(names);
}
@SuppressWarnings("deprecation")
public RelDataTypeField field(RelDataType rowType, String alias) {
return nameMatcher.field(rowType, alias);
}
@SuppressWarnings("deprecation")
public boolean matches(String string, String name) {
return nameMatcher.matches(string, name);
}
public RelDataType createTypeFromProjection(final RelDataType type,
final List<String> columnNameList) {
return SqlValidatorUtil.createTypeFromProjection(type, columnNameList,
typeFactory, nameMatcher.isCaseSensitive());
}
public void lookupOperatorOverloads(final SqlIdentifier opName,
SqlFunctionCategory category,
SqlSyntax syntax,
List<SqlOperator> operatorList) {
if (syntax != SqlSyntax.FUNCTION) {
return;
}
final Predicate<Function> predicate;
if (category == null) {
predicate = Predicates.alwaysTrue();
} else if (category.isTableFunction()) {
predicate = new PredicateImpl<Function>() {
public boolean test(Function function) {
return function instanceof TableMacro
|| function instanceof TableFunction;
}
};
} else {
predicate = new PredicateImpl<Function>() {
public boolean test(Function function) {
return !(function instanceof TableMacro
|| function instanceof TableFunction);
}
};
}
final Collection<Function> functions =
Collections2.filter(getFunctionsFrom(opName.names), predicate);
if (functions.isEmpty()) {
return;
}
operatorList.addAll(
Collections2.transform(functions,
new com.google.common.base.Function<Function, SqlOperator>() {
public SqlOperator apply(Function function) {
return toOp(opName, function);
}
}));
}
/** Creates an operator table that contains functions in the given class.
*
* @see ModelHandler#addFunctions */
public static SqlOperatorTable operatorTable(String className) {
// Dummy schema to collect the functions
final CalciteSchema schema =
CalciteSchema.createRootSchema(false, false);
ModelHandler.addFunctions(schema.plus(), null, ImmutableList.<String>of(),
className, "*", true);
// The following is technical debt; see [CALCITE-2082] Remove
// RelDataTypeFactory argument from SqlUserDefinedAggFunction constructor
final SqlTypeFactoryImpl typeFactory =
new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final ListSqlOperatorTable table = new ListSqlOperatorTable();
for (String name : schema.getFunctionNames()) {
for (Function function : schema.getFunctions(name, true)) {
final SqlIdentifier id = new SqlIdentifier(name, SqlParserPos.ZERO);
table.add(
toOp(typeFactory, id, function));
}
}
return table;
}
private SqlOperator toOp(SqlIdentifier name, final Function function) {
return toOp(typeFactory, name, function);
}
/** Converts a function to a {@link org.apache.calcite.sql.SqlOperator}.
*
* <p>The {@code typeFactory} argument is technical debt; see [CALCITE-2082]
* Remove RelDataTypeFactory argument from SqlUserDefinedAggFunction
* constructor. */
private static SqlOperator toOp(RelDataTypeFactory typeFactory,
SqlIdentifier name, final Function function) {
List<RelDataType> argTypes = new ArrayList<>();
List<SqlTypeFamily> typeFamilies = new ArrayList<>();
for (FunctionParameter o : function.getParameters()) {
final RelDataType type = o.getType(typeFactory);
argTypes.add(type);
typeFamilies.add(
Util.first(type.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
final Predicate<Integer> optional =
new PredicateImpl<Integer>() {
public boolean test(Integer input) {
return function.getParameters().get(input).isOptional();
}
};
final FamilyOperandTypeChecker typeChecker =
OperandTypes.family(typeFamilies, optional);
final List<RelDataType> paramTypes = toSql(typeFactory, argTypes);
if (function instanceof ScalarFunction) {
return new SqlUserDefinedFunction(name, infer((ScalarFunction) function),
InferTypes.explicit(argTypes), typeChecker, paramTypes, function);
} else if (function instanceof AggregateFunction) {
return new SqlUserDefinedAggFunction(name,
infer((AggregateFunction) function), InferTypes.explicit(argTypes),
typeChecker, (AggregateFunction) function, false, false, typeFactory);
} else if (function instanceof TableMacro) {
return new SqlUserDefinedTableMacro(name, ReturnTypes.CURSOR,
InferTypes.explicit(argTypes), typeChecker, paramTypes,
(TableMacro) function);
} else if (function instanceof TableFunction) {
return new SqlUserDefinedTableFunction(name, ReturnTypes.CURSOR,
InferTypes.explicit(argTypes), typeChecker, paramTypes,
(TableFunction) function);
} else {
throw new AssertionError("unknown function type " + function);
}
}
private static SqlReturnTypeInference infer(final ScalarFunction function) {
return new SqlReturnTypeInference() {
public RelDataType inferReturnType(SqlOperatorBinding opBinding) {
final RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
final RelDataType type;
if (function instanceof ScalarFunctionImpl) {
type = ((ScalarFunctionImpl) function).getReturnType(typeFactory,
opBinding);
} else {
type = function.getReturnType(typeFactory);
}
return toSql(typeFactory, type);
}
};
}
private static SqlReturnTypeInference infer(
final AggregateFunction function) {
return new SqlReturnTypeInference() {
public RelDataType inferReturnType(SqlOperatorBinding opBinding) {
final RelDataTypeFactory typeFactory = opBinding.getTypeFactory();
final RelDataType type = function.getReturnType(typeFactory);
return toSql(typeFactory, type);
}
};
}
private static List<RelDataType> toSql(
final RelDataTypeFactory typeFactory, List<RelDataType> types) {
return Lists.transform(types,
new com.google.common.base.Function<RelDataType, RelDataType>() {
public RelDataType apply(RelDataType type) {
return toSql(typeFactory, type);
}
});
}
private static RelDataType toSql(RelDataTypeFactory typeFactory,
RelDataType type) {
if (type instanceof RelDataTypeFactoryImpl.JavaType
&& ((RelDataTypeFactoryImpl.JavaType) type).getJavaClass()
== Object.class) {
return typeFactory.createTypeWithNullability(
typeFactory.createSqlType(SqlTypeName.ANY), true);
}
return JavaTypeFactoryImpl.toSql(typeFactory, type);
}
public List<SqlOperator> getOperatorList() {
return null;
}
public CalciteSchema getRootSchema() {
return rootSchema;
}
public RelDataTypeFactory getTypeFactory() {
return typeFactory;
}
public void registerRules(RelOptPlanner planner) throws Exception {
}
@SuppressWarnings("deprecation")
@Override public boolean isCaseSensitive() {
return nameMatcher.isCaseSensitive();
}
public SqlNameMatcher nameMatcher() {
return nameMatcher;
}
@Override public <C> C unwrap(Class<C> aClass) {
if (aClass.isInstance(this)) {
return aClass.cast(this);
}
return null;
}
}
// End CalciteCatalogReader.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.