gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2005-2012 Roger Kapsi, Sam Berlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ardverk.collection;
import java.io.Serializable;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.SortedMap;
/**
* <h3>PATRICIA {@link Trie}</h3>
*
* <i>Practical Algorithm to Retrieve Information Coded in Alphanumeric</i>
*
* <p>A PATRICIA {@link Trie} is a compressed {@link Trie}. Instead of storing
* all data at the edges of the {@link Trie} (and having empty internal nodes),
* PATRICIA stores data in every node. This allows for very efficient traversal,
* insert, delete, predecessor, successor, prefix, range, and {@link #select(Object)}
* operations. All operations are performed at worst in O(K) time, where K
* is the number of bits in the largest item in the tree. In practice,
* operations actually take O(A(K)) time, where A(K) is the average number of
* bits of all items in the tree.
*
* <p>Most importantly, PATRICIA requires very few comparisons to keys while
* doing any operation. While performing a lookup, each comparison (at most
* K of them, described above) will perform a single bit comparison against
* the given key, instead of comparing the entire key to another key.
*
* <p>The {@link Trie} can return operations in lexicographical order using the
* {@link #traverse(Cursor)}, 'prefix', 'submap', or 'iterator' methods. The
* {@link Trie} can also scan for items that are 'bitwise' (using an XOR
* metric) by the 'select' method. Bitwise closeness is determined by the
* {@link KeyAnalyzer} returning true or false for a bit being set or not in
* a given key.
*
* <p>Any methods here that take an {@link Object} argument may throw a
* {@link ClassCastException} if the method is expecting an instance of K
* and it isn't K.
*
* @see <a href="http://en.wikipedia.org/wiki/Radix_tree">Radix Tree</a>
* @see <a href="http://www.csse.monash.edu.au/~lloyd/tildeAlgDS/Tree/PATRICIA">PATRICIA</a>
* @see <a href="http://www.imperialviolet.org/binary/critbit.pdf">Crit-Bit Tree</a>
*
* @author Roger Kapsi
* @author Sam Berlin
*/
public class PatriciaTrie<K, V> extends AbstractPatriciaTrie<K, V>
implements Serializable {
private static final long serialVersionUID = -2246014692353432660L;
public PatriciaTrie() {
super();
}
public PatriciaTrie(Map<? extends K, ? extends V> m) {
super(m);
}
public PatriciaTrie(KeyAnalyzer<? super K> keyAnalyzer) {
super(keyAnalyzer);
}
public PatriciaTrie(KeyAnalyzer<? super K> keyAnalyzer,
Map<? extends K, ? extends V> m) {
super(keyAnalyzer, m);
}
@Override
public Comparator<? super K> comparator() {
return keyAnalyzer;
}
@Override
public SortedMap<K, V> prefixMap(K prefix) {
int lengthInBits = lengthInBits(prefix);
if (lengthInBits == 0) {
return this;
}
return new PrefixRangeMap(prefix);
}
@Override
public K firstKey() {
return firstEntry().getKey();
}
@Override
public K lastKey() {
TrieEntry<K, V> entry = lastEntry();
if (entry != null) {
return entry.getKey();
}
return null;
}
@Override
public SortedMap<K, V> headMap(K toKey) {
return new RangeEntryMap(null, toKey);
}
@Override
public SortedMap<K, V> subMap(K fromKey, K toKey) {
return new RangeEntryMap(fromKey, toKey);
}
@Override
public SortedMap<K, V> tailMap(K fromKey) {
return new RangeEntryMap(fromKey, null);
}
/**
* Returns an entry strictly higher than the given key,
* or null if no such entry exists.
*/
private TrieEntry<K,V> higherEntry(K key) {
// TODO: Cleanup so that we don't actually have to add/remove from the
// tree. (We do it here because there are other well-defined
// functions to perform the search.)
int lengthInBits = lengthInBits(key);
if (lengthInBits == 0) {
if (!root.isEmpty()) {
// If data in root, and more after -- return it.
if (size() > 1) {
return nextEntry(root);
} else { // If no more after, no higher entry.
return null;
}
} else {
// Root is empty & we want something after empty, return first.
return firstEntry();
}
}
TrieEntry<K, V> found = getNearestEntryForKey(key);
if (compareKeys(key, found.key)) {
return nextEntry(found);
}
int bitIndex = bitIndex(key, found.key);
if (Tries.isValidBitIndex(bitIndex)) {
TrieEntry<K, V> added = new TrieEntry<K, V>(key, null, bitIndex);
addEntry(added);
incrementSize(); // must increment because remove will decrement
TrieEntry<K, V> ceil = nextEntry(added);
removeEntry(added);
modCount -= 2; // we didn't really modify it.
return ceil;
} else if (Tries.isNullBitKey(bitIndex)) {
if (!root.isEmpty()) {
return firstEntry();
} else if (size() > 1) {
return nextEntry(firstEntry());
} else {
return null;
}
} else if (Tries.isEqualBitKey(bitIndex)) {
return nextEntry(found);
}
// we should have exited above.
throw new IllegalStateException("invalid lookup: " + key);
}
/**
* Returns a key-value mapping associated with the least key greater
* than or equal to the given key, or null if there is no such key.
*/
TrieEntry<K,V> ceilingEntry(K key) {
// Basically:
// Follow the steps of adding an entry, but instead...
//
// - If we ever encounter a situation where we found an equal
// key, we return it immediately.
//
// - If we hit an empty root, return the first iterable item.
//
// - If we have to add a new item, we temporarily add it,
// find the successor to it, then remove the added item.
//
// These steps ensure that the returned value is either the
// entry for the key itself, or the first entry directly after
// the key.
// TODO: Cleanup so that we don't actually have to add/remove from the
// tree. (We do it here because there are other well-defined
// functions to perform the search.)
int lengthInBits = lengthInBits(key);
if (lengthInBits == 0) {
if (!root.isEmpty()) {
return root;
} else {
return firstEntry();
}
}
TrieEntry<K, V> found = getNearestEntryForKey(key);
if (compareKeys(key, found.key)) {
return found;
}
int bitIndex = bitIndex(key, found.key);
if (Tries.isValidBitIndex(bitIndex)) {
TrieEntry<K, V> added = new TrieEntry<K, V>(key, null, bitIndex);
addEntry(added);
incrementSize(); // must increment because remove will decrement
TrieEntry<K, V> ceil = nextEntry(added);
removeEntry(added);
modCount -= 2; // we didn't really modify it.
return ceil;
} else if (Tries.isNullBitKey(bitIndex)) {
if (!root.isEmpty()) {
return root;
} else {
return firstEntry();
}
} else if (Tries.isEqualBitKey(bitIndex)) {
return found;
}
// we should have exited above.
throw new IllegalStateException("invalid lookup: " + key);
}
/**
* Returns a key-value mapping associated with the greatest key
* strictly less than the given key, or null if there is no such key.
*/
TrieEntry<K,V> lowerEntry(K key) {
// Basically:
// Follow the steps of adding an entry, but instead...
//
// - If we ever encounter a situation where we found an equal
// key, we return it's previousEntry immediately.
//
// - If we hit root (empty or not), return null.
//
// - If we have to add a new item, we temporarily add it,
// find the previousEntry to it, then remove the added item.
//
// These steps ensure that the returned value is always just before
// the key or null (if there was nothing before it).
// TODO: Cleanup so that we don't actually have to add/remove from the
// tree. (We do it here because there are other well-defined
// functions to perform the search.)
int lengthInBits = lengthInBits(key);
if (lengthInBits == 0) {
return null; // there can never be anything before root.
}
TrieEntry<K, V> found = getNearestEntryForKey(key);
if (compareKeys(key, found.key)) {
return previousEntry(found);
}
int bitIndex = bitIndex(key, found.key);
if (Tries.isValidBitIndex(bitIndex)) {
TrieEntry<K, V> added = new TrieEntry<K, V>(key, null, bitIndex);
addEntry(added);
incrementSize(); // must increment because remove will decrement
TrieEntry<K, V> prior = previousEntry(added);
removeEntry(added);
modCount -= 2; // we didn't really modify it.
return prior;
} else if (Tries.isNullBitKey(bitIndex)) {
return null;
} else if (Tries.isEqualBitKey(bitIndex)) {
return previousEntry(found);
}
// we should have exited above.
throw new IllegalStateException("invalid lookup: " + key);
}
/**
* Returns a key-value mapping associated with the greatest key
* less than or equal to the given key, or null if there is no such key.
*/
TrieEntry<K,V> floorEntry(K key) {
// TODO: Cleanup so that we don't actually have to add/remove from the
// tree. (We do it here because there are other well-defined
// functions to perform the search.)
int lengthInBits = lengthInBits(key);
if (lengthInBits == 0) {
if (!root.isEmpty()) {
return root;
} else {
return null;
}
}
TrieEntry<K, V> found = getNearestEntryForKey(key);
if (compareKeys(key, found.key)) {
return found;
}
int bitIndex = bitIndex(key, found.key);
if (Tries.isValidBitIndex(bitIndex)) {
TrieEntry<K, V> added = new TrieEntry<K, V>(key, null, bitIndex);
addEntry(added);
incrementSize(); // must increment because remove will decrement
TrieEntry<K, V> floor = previousEntry(added);
removeEntry(added);
modCount -= 2; // we didn't really modify it.
return floor;
} else if (Tries.isNullBitKey(bitIndex)) {
if (!root.isEmpty()) {
return root;
} else {
return null;
}
} else if (Tries.isEqualBitKey(bitIndex)) {
return found;
}
// we should have exited above.
throw new IllegalStateException("invalid lookup: " + key);
}
public TrieEntry<K,V> getFloor(K key) {
int lengthInBits = lengthInBits(key);
if (lengthInBits == 0) {
if (!root.isEmpty()) {
return root;
} else {
return null;
}
}
TrieEntry<K, V> found = getNearestEntryForKey(key);
if (compareKeys(key, found.key)) {
return found;
}
int bitIndex = bitIndex(key, found.key);
if (Tries.isValidBitIndex(bitIndex) || Tries.isEqualBitKey(bitIndex)) {
return found;
} else if (Tries.isNullBitKey(bitIndex)) {
if (!root.isEmpty()) {
return root;
} else {
return null;
}
}
// we should have exited above.
throw new IllegalStateException("invalid lookup: " + key);
}
/**
* Finds the subtree that contains the prefix.
*
* This is very similar to getR but with the difference that
* we stop the lookup if h.bitIndex > lengthInBits.
*/
private TrieEntry<K, V> subtree(K prefix) {
int lengthInBits = lengthInBits(prefix);
TrieEntry<K, V> current = root.left;
TrieEntry<K, V> path = root;
while(true) {
if (current.bitIndex <= path.bitIndex
|| lengthInBits < current.bitIndex) {
break;
}
path = current;
if (!isBitSet(prefix, current.bitIndex)) {
current = current.left;
} else {
current = current.right;
}
}
// Make sure the entry is valid for a subtree.
TrieEntry<K, V> entry = current.isEmpty() ? path : current;
// If entry is root, it can't be empty.
if (entry.isEmpty()) {
return null;
}
// if root && length of root is less than length of lookup,
// there's nothing.
// (this prevents returning the whole subtree if root has an empty
// string and we want to lookup things with "\0")
if (entry == root && lengthInBits(entry.getKey()) < lengthInBits) {
return null;
}
// Found key's length-th bit differs from our key
// which means it cannot be the prefix...
if (isBitSet(prefix, lengthInBits)
!= isBitSet(entry.key, lengthInBits)) {
return null;
}
// ... or there are less than 'length' equal bits
int bitIndex = bitIndex(prefix, entry.key);
if (bitIndex >= 0 && bitIndex < lengthInBits) {
return null;
}
return entry;
}
/**
* Returns the last entry the {@link Trie} is storing.
*
* <p>This is implemented by going always to the right until
* we encounter a valid uplink. That uplink is the last key.
*/
private TrieEntry<K, V> lastEntry() {
return followRight(root.left);
}
/**
* Traverses down the right path until it finds an uplink.
*/
private TrieEntry<K, V> followRight(TrieEntry<K, V> node) {
// if Trie is empty, no last entry.
if (node.right == null) {
return null;
}
// Go as far right as possible, until we encounter an uplink.
while (node.right.bitIndex > node.bitIndex) {
node = node.right;
}
return node.right;
}
/**
* Returns the node lexicographically before the given node (or null if none).
*
* This follows four simple branches:
* - If the uplink that returned us was a right uplink:
* - If predecessor's left is a valid uplink from predecessor, return it.
* - Else, follow the right path from the predecessor's left.
* - If the uplink that returned us was a left uplink:
* - Loop back through parents until we encounter a node where
* node != node.parent.left.
* - If node.parent.left is uplink from node.parent:
* - If node.parent.left is not root, return it.
* - If it is root & root isEmpty, return null.
* - If it is root & root !isEmpty, return root.
* - If node.parent.left is not uplink from node.parent:
* - Follow right path for first right child from node.parent.left
*
* @param start
*/
private TrieEntry<K, V> previousEntry(TrieEntry<K, V> start) {
if (start.predecessor == null) {
throw new IllegalArgumentException("must have come from somewhere!");
}
if (start.predecessor.right == start) {
if (isValidUplink(start.predecessor.left, start.predecessor)) {
return start.predecessor.left;
} else {
return followRight(start.predecessor.left);
}
} else {
TrieEntry<K, V> node = start.predecessor;
while (node.parent != null && node == node.parent.left) {
node = node.parent;
}
if (node.parent == null) { // can be null if we're looking up root.
return null;
}
if (isValidUplink(node.parent.left, node.parent)) {
if (node.parent.left == root) {
if (root.isEmpty()) {
return null;
} else {
return root;
}
} else {
return node.parent.left;
}
} else {
return followRight(node.parent.left);
}
}
}
/**
* Returns the entry lexicographically after the given entry.
* If the given entry is null, returns the first node.
*
* This will traverse only within the subtree. If the given node
* is not within the subtree, this will have undefined results.
*/
private TrieEntry<K, V> nextEntryInSubtree(TrieEntry<K, V> node,
TrieEntry<K, V> parentOfSubtree) {
if (node == null) {
return firstEntry();
} else {
return nextEntryImpl(node.predecessor, node, parentOfSubtree);
}
}
private boolean isPrefix(K key, K prefix) {
return keyAnalyzer.isPrefix(key, prefix);
}
/**
* A range view of the {@link Trie}
*/
private abstract class RangeMap extends AbstractMap<K, V>
implements SortedMap<K, V> {
/**
* The {@link #entrySet()} view
*/
private transient volatile Set<Map.Entry<K, V>> entrySet;
/**
* Creates and returns an {@link #entrySet()}
* view of the {@link RangeMap}
*/
protected abstract Set<Map.Entry<K, V>> createEntrySet();
/**
* Returns the FROM Key
*/
protected abstract K getFromKey();
/**
* Whether or not the {@link #getFromKey()} is in the range
*/
protected abstract boolean isFromInclusive();
/**
* Returns the TO Key
*/
protected abstract K getToKey();
/**
* Whether or not the {@link #getToKey()} is in the range
*/
protected abstract boolean isToInclusive();
@Override
public Comparator<? super K> comparator() {
return PatriciaTrie.this.comparator();
}
@Override
public boolean containsKey(Object key) {
if (!inRange(Tries.<K>cast(key))) {
return false;
}
return PatriciaTrie.this.containsKey(key);
}
@Override
public V remove(Object key) {
if (!inRange(Tries.<K>cast(key))) {
return null;
}
return PatriciaTrie.this.remove(key);
}
@Override
public V get(Object key) {
if (!inRange(Tries.<K>cast(key))) {
return null;
}
return PatriciaTrie.this.get(key);
}
@Override
public V put(K key, V value) {
if (!inRange(key)) {
throw new IllegalArgumentException(
"Key is out of range: " + key);
}
return PatriciaTrie.this.put(key, value);
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = createEntrySet();
}
return entrySet;
}
@Override
public SortedMap<K, V> subMap(K fromKey, K toKey) {
if (!inRange2(fromKey)) {
throw new IllegalArgumentException(
"FromKey is out of range: " + fromKey);
}
if (!inRange2(toKey)) {
throw new IllegalArgumentException(
"ToKey is out of range: " + toKey);
}
return createRangeMap(fromKey, isFromInclusive(),
toKey, isToInclusive());
}
@Override
public SortedMap<K, V> headMap(K toKey) {
if (!inRange2(toKey)) {
throw new IllegalArgumentException(
"ToKey is out of range: " + toKey);
}
return createRangeMap(getFromKey(), isFromInclusive(),
toKey, isToInclusive());
}
@Override
public SortedMap<K, V> tailMap(K fromKey) {
if (!inRange2(fromKey)) {
throw new IllegalArgumentException(
"FromKey is out of range: " + fromKey);
}
return createRangeMap(fromKey, isFromInclusive(),
getToKey(), isToInclusive());
}
/**
* Returns true if the provided key is greater than TO and
* less than FROM
*/
protected boolean inRange(K key) {
K fromKey = getFromKey();
K toKey = getToKey();
return (fromKey == null || inFromRange(key, false))
&& (toKey == null || inToRange(key, false));
}
/**
* This form allows the high endpoint (as well as all legit keys)
*/
protected boolean inRange2(K key) {
K fromKey = getFromKey();
K toKey = getToKey();
return (fromKey == null || inFromRange(key, false))
&& (toKey == null || inToRange(key, true));
}
/**
* Returns true if the provided key is in the FROM range
* of the {@link RangeMap}
*/
protected boolean inFromRange(K key, boolean forceInclusive) {
K fromKey = getFromKey();
boolean fromInclusive = isFromInclusive();
int ret = keyAnalyzer.compare(key, fromKey);
if (fromInclusive || forceInclusive) {
return ret >= 0;
} else {
return ret > 0;
}
}
/**
* Returns true if the provided key is in the TO range
* of the {@link RangeMap}
*/
protected boolean inToRange(K key, boolean forceInclusive) {
K toKey = getToKey();
boolean toInclusive = isToInclusive();
int ret = keyAnalyzer.compare(key, toKey);
if (toInclusive || forceInclusive) {
return ret <= 0;
} else {
return ret < 0;
}
}
/**
* Creates and returns a sub-range view of the current {@link RangeMap}
*/
protected abstract SortedMap<K, V> createRangeMap(K fromKey,
boolean fromInclusive, K toKey, boolean toInclusive);
}
/**
* A {@link RangeMap} that deals with {@link Entry}s
*/
private class RangeEntryMap extends RangeMap {
/**
* The key to start from, null if the beginning.
*/
protected final K fromKey;
/**
* The key to end at, null if till the end.
*/
protected final K toKey;
/**
* Whether or not the 'from' is inclusive.
*/
protected final boolean fromInclusive;
/**
* Whether or not the 'to' is inclusive.
*/
protected final boolean toInclusive;
/**
* Creates a {@link RangeEntryMap} with the fromKey included and
* the toKey excluded from the range
*/
protected RangeEntryMap(K fromKey, K toKey) {
this(fromKey, true, toKey, false);
}
/**
* Creates a {@link RangeEntryMap}
*/
protected RangeEntryMap(K fromKey, boolean fromInclusive,
K toKey, boolean toInclusive) {
if (fromKey == null && toKey == null) {
throw new IllegalArgumentException("must have a from or to!");
}
if (fromKey != null && toKey != null
&& keyAnalyzer.compare(fromKey, toKey) > 0) {
throw new IllegalArgumentException("fromKey > toKey");
}
this.fromKey = fromKey;
this.fromInclusive = fromInclusive;
this.toKey = toKey;
this.toInclusive = toInclusive;
}
@Override
public K firstKey() {
Map.Entry<K,V> e = null;
if (fromKey == null) {
e = firstEntry();
} else {
if (fromInclusive) {
e = ceilingEntry(fromKey);
} else {
e = higherEntry(fromKey);
}
}
K first = e != null ? e.getKey() : null;
if (e == null || toKey != null && !inToRange(first, false)) {
throw new NoSuchElementException();
}
return first;
}
@Override
public K lastKey() {
Map.Entry<K,V> e;
if (toKey == null) {
e = lastEntry();
} else {
if (toInclusive) {
e = floorEntry(toKey);
} else {
e = lowerEntry(toKey);
}
}
K last = e != null ? e.getKey() : null;
if (e == null || fromKey != null && !inFromRange(last, false)) {
throw new NoSuchElementException();
}
return last;
}
@Override
protected Set<Entry<K, V>> createEntrySet() {
return new RangeEntrySet(this);
}
@Override
public K getFromKey() {
return fromKey;
}
@Override
public K getToKey() {
return toKey;
}
@Override
public boolean isFromInclusive() {
return fromInclusive;
}
@Override
public boolean isToInclusive() {
return toInclusive;
}
@Override
protected SortedMap<K, V> createRangeMap(K fromKey, boolean fromInclusive,
K toKey, boolean toInclusive) {
return new RangeEntryMap(fromKey, fromInclusive, toKey, toInclusive);
}
}
/**
* A {@link Set} view of a {@link RangeMap}
*/
private class RangeEntrySet extends AbstractSet<Map.Entry<K, V>> {
private final RangeMap delegate;
private int size = -1;
private int expectedModCount = -1;
/**
* Creates a {@link RangeEntrySet}
*/
public RangeEntrySet(RangeMap delegate) {
if (delegate == null) {
throw new NullPointerException("delegate");
}
this.delegate = delegate;
}
@Override
public Iterator<Map.Entry<K, V>> iterator() {
K fromKey = delegate.getFromKey();
K toKey = delegate.getToKey();
TrieEntry<K, V> first = null;
if (fromKey == null) {
first = firstEntry();
} else {
first = ceilingEntry(fromKey);
}
TrieEntry<K, V> last = null;
if (toKey != null) {
last = ceilingEntry(toKey);
}
return new EntryIterator(first, last);
}
@Override
public int size() {
if (size == -1 || expectedModCount != PatriciaTrie.this.modCount) {
size = 0;
for (Iterator<?> it = iterator(); it.hasNext(); it.next()) {
++size;
}
expectedModCount = PatriciaTrie.this.modCount;
}
return size;
}
@Override
public boolean isEmpty() {
return !iterator().hasNext();
}
@Override
public boolean contains(Object o) {
if (!(o instanceof Map.Entry<?, ?>)) {
return false;
}
@SuppressWarnings("unchecked")
Map.Entry<K, V> entry = (Map.Entry<K, V>) o;
K key = entry.getKey();
if (!delegate.inRange(key)) {
return false;
}
TrieEntry<K, V> node = getEntry(key);
return node != null && Tries.areEqual(
node.getValue(), entry.getValue());
}
@Override
public boolean remove(Object o) {
if (!(o instanceof Map.Entry<?, ?>)) {
return false;
}
@SuppressWarnings("unchecked")
Map.Entry<K, V> entry = (Map.Entry<K, V>) o;
K key = entry.getKey();
if (!delegate.inRange(key)) {
return false;
}
TrieEntry<K, V> node = getEntry(key);
if (node != null && Tries.areEqual(
node.getValue(), entry.getValue())) {
removeEntry(node);
return true;
}
return false;
}
/**
* An {@link Iterator} for {@link RangeEntrySet}s.
*/
private final class EntryIterator extends TrieIterator<Map.Entry<K,V>> {
private final K excludedKey;
/**
* Creates a {@link EntryIterator}
*/
private EntryIterator(
TrieEntry<K,V> first,
TrieEntry<K,V> last) {
super(first);
this.excludedKey = (last != null ? last.getKey() : null);
}
@Override
public boolean hasNext() {
return next != null && !Tries.areEqual(next.key, excludedKey);
}
@Override
public Map.Entry<K,V> next() {
if (next == null || Tries.areEqual(next.key, excludedKey)) {
throw new NoSuchElementException();
}
return nextEntry();
}
}
}
/**
* A submap used for prefix views over the {@link Trie}.
*/
private class PrefixRangeMap extends RangeMap {
private final K prefix;
private K fromKey = null;
private K toKey = null;
private int expectedModCount = -1;
private int size = -1;
/**
* Creates a {@link PrefixRangeMap}
*/
private PrefixRangeMap(K prefix) {
this.prefix = prefix;
}
/**
* This method does two things. It determinates the FROM
* and TO range of the {@link PrefixRangeMap} and the number
* of elements in the range. This method must be called every
* time the {@link Trie} has changed.
*/
private int fixup() {
// The trie has changed since we last
// found our toKey / fromKey
if (size == - 1 || PatriciaTrie.this.modCount != expectedModCount) {
Iterator<Map.Entry<K, V>> it = entrySet().iterator();
size = 0;
Map.Entry<K, V> entry = null;
if (it.hasNext()) {
entry = it.next();
size = 1;
}
fromKey = entry == null ? null : entry.getKey();
if (fromKey != null) {
TrieEntry<K, V> prior = previousEntry((TrieEntry<K, V>)entry);
fromKey = prior == null ? null : prior.getKey();
}
toKey = fromKey;
while (it.hasNext()) {
++size;
entry = it.next();
}
toKey = entry == null ? null : entry.getKey();
if (toKey != null) {
entry = nextEntry((TrieEntry<K, V>)entry);
toKey = entry == null ? null : entry.getKey();
}
expectedModCount = PatriciaTrie.this.modCount;
}
return size;
}
@Override
public K firstKey() {
fixup();
Map.Entry<K,V> e = null;
if (fromKey == null) {
e = firstEntry();
} else {
e = higherEntry(fromKey);
}
K first = e != null ? e.getKey() : null;
if (e == null || !isPrefix(first, prefix)) {
throw new NoSuchElementException();
}
return first;
}
@Override
public K lastKey() {
fixup();
Map.Entry<K,V> e = null;
if (toKey == null) {
e = lastEntry();
} else {
e = lowerEntry(toKey);
}
K last = e != null ? e.getKey() : null;
if (e == null || !isPrefix(last, prefix)) {
throw new NoSuchElementException();
}
return last;
}
/**
* Returns true if this {@link PrefixRangeMap}'s key is a prefix
* of the provided key.
*/
@Override
protected boolean inRange(K key) {
return isPrefix(key, prefix);
}
/**
* Same as {@link #inRange(Object)}
*/
@Override
protected boolean inRange2(K key) {
return inRange(key);
}
/**
* Returns true if the provided Key is in the FROM range
* of the {@link PrefixRangeMap}
*/
@Override
protected boolean inFromRange(K key, boolean forceInclusive) {
return isPrefix(key, prefix);
}
/**
* Returns true if the provided Key is in the TO range
* of the {@link PrefixRangeMap}
*/
@Override
protected boolean inToRange(K key, boolean forceInclusive) {
return isPrefix(key, prefix);
}
@Override
protected Set<Map.Entry<K, V>> createEntrySet() {
return new PrefixRangeEntrySet(this);
}
@Override
public K getFromKey() {
return fromKey;
}
@Override
public K getToKey() {
return toKey;
}
@Override
public boolean isFromInclusive() {
return false;
}
@Override
public boolean isToInclusive() {
return false;
}
@Override
protected SortedMap<K, V> createRangeMap(
K fromKey, boolean fromInclusive,
K toKey, boolean toInclusive) {
return new RangeEntryMap(fromKey, fromInclusive, toKey, toInclusive);
}
}
/**
* A prefix {@link RangeEntrySet} view of the {@link Trie}
*/
private final class PrefixRangeEntrySet extends RangeEntrySet {
private final PrefixRangeMap delegate;
private TrieEntry<K, V> prefixStart;
private int expectedModCount = -1;
/**
* Creates a {@link PrefixRangeEntrySet}
*/
public PrefixRangeEntrySet(PrefixRangeMap delegate) {
super(delegate);
this.delegate = delegate;
}
@Override
public int size() {
return delegate.fixup();
}
@Override
public Iterator<Map.Entry<K,V>> iterator() {
if (PatriciaTrie.this.modCount != expectedModCount) {
prefixStart = subtree(delegate.prefix);
expectedModCount = PatriciaTrie.this.modCount;
}
if (prefixStart == null) {
Set<Map.Entry<K,V>> empty = Collections.emptySet();
return empty.iterator();
} else if (lengthInBits(delegate.prefix) >= prefixStart.bitIndex) {
return new SingletonIterator(prefixStart);
} else {
return new EntryIterator(prefixStart, delegate.prefix);
}
}
/**
* An {@link Iterator} that holds a single {@link TrieEntry}.
*/
private final class SingletonIterator implements Iterator<Map.Entry<K, V>> {
private final TrieEntry<K, V> entry;
private int hit = 0;
public SingletonIterator(TrieEntry<K, V> entry) {
this.entry = entry;
}
@Override
public boolean hasNext() {
return hit == 0;
}
@Override
public Map.Entry<K, V> next() {
if (hit != 0) {
throw new NoSuchElementException();
}
++hit;
return entry;
}
@Override
public void remove() {
if (hit != 1) {
throw new IllegalStateException();
}
++hit;
PatriciaTrie.this.removeEntry(entry);
}
}
/**
* An {@link Iterator} for iterating over a prefix search.
*/
private final class EntryIterator extends TrieIterator<Map.Entry<K, V>> {
// values to reset the subtree if we remove it.
protected final K prefix;
protected boolean lastOne;
protected TrieEntry<K, V> subtree; // the subtree to search within
/**
* Starts iteration at the given entry & search only
* within the given subtree.
*/
EntryIterator(TrieEntry<K, V> startScan, K prefix) {
subtree = startScan;
next = PatriciaTrie.this.followLeft(startScan);
this.prefix = prefix;
}
@Override
public Map.Entry<K,V> next() {
Map.Entry<K, V> entry = nextEntry();
if (lastOne) {
next = null;
}
return entry;
}
@Override
protected TrieEntry<K, V> findNext(TrieEntry<K, V> prior) {
return PatriciaTrie.this.nextEntryInSubtree(prior, subtree);
}
@Override
public void remove() {
// If the current entry we're removing is the subtree
// then we need to find a new subtree parent.
boolean needsFixing = false;
int bitIdx = subtree.bitIndex;
if (current == subtree) {
needsFixing = true;
}
super.remove();
// If the subtree changed its bitIndex or we
// removed the old subtree, get a new one.
if (bitIdx != subtree.bitIndex || needsFixing) {
subtree = subtree(prefix);
}
// If the subtree's bitIndex is less than the
// length of our prefix, it's the last item
// in the prefix tree.
if (lengthInBits(prefix) >= subtree.bitIndex) {
lastOne = true;
}
}
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import org.drools.core.InitialFact;
import org.drools.core.RuleBaseConfiguration;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.ValueType;
import org.drools.core.common.ClassAwareObjectStore;
import org.drools.core.common.DroolsObjectInputStream;
import org.drools.core.common.EventFactHandle;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.InternalWorkingMemoryEntryPoint;
import org.drools.core.common.Memory;
import org.drools.core.common.MemoryFactory;
import org.drools.core.common.UpdateContext;
import org.drools.core.impl.StatefulKnowledgeSessionImpl.WorkingMemoryReteExpireAction;
import org.drools.core.marshalling.impl.MarshallerReaderContext;
import org.drools.core.marshalling.impl.MarshallerWriteContext;
import org.drools.core.marshalling.impl.PersisterEnums;
import org.drools.core.marshalling.impl.ProtobufMessages;
import org.drools.core.marshalling.impl.ProtobufMessages.Timers.ExpireTimer;
import org.drools.core.marshalling.impl.ProtobufMessages.Timers.Timer;
import org.drools.core.marshalling.impl.TimersInputMarshaller;
import org.drools.core.marshalling.impl.TimersOutputMarshaller;
import org.drools.core.reteoo.RuleRemovalContext.CleanupAdapter;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.reteoo.compiled.CompiledNetwork;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.EntryPointId;
import org.drools.core.rule.EvalCondition;
import org.drools.core.spi.Constraint;
import org.drools.core.spi.ObjectType;
import org.drools.core.spi.PropagationContext;
import org.drools.core.time.Job;
import org.drools.core.time.JobContext;
import org.drools.core.time.JobHandle;
import org.drools.core.time.TimerService;
import org.drools.core.time.impl.DefaultJobHandle;
import org.drools.core.time.impl.PointInTimeTrigger;
import org.drools.core.util.bitmask.BitMask;
import org.drools.core.util.bitmask.EmptyBitMask;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* <code>ObjectTypeNodes<code> are responsible for filtering and propagating the matching
* fact assertions propagated from the <code>Rete</code> node using <code>ObjectType</code> interface.
* <p/>
* The assert and retract methods do not attempt to filter as this is the role of the <code>Rete</code>
* node which builds up a cache of matching <code>ObjectTypdeNodes</code>s for each asserted object, using
* the <code>matches(Object object)</code> method. Incorrect propagation in these methods is not checked and
* will result in <code>ClassCastExpcections</code> later on in the network.
* <p/>
* Filters <code>Objects</code> coming from the <code>Rete</code> using a
* <code>ObjectType</code> semantic module.
*
* @see Rete
*/
public class ObjectTypeNode extends ObjectSource
implements
ObjectSink,
Externalizable,
MemoryFactory {
// ------------------------------------------------------------
// Instance members
// ------------------------------------------------------------
private static final long serialVersionUID = 510l;
/**
* The <code>ObjectType</code> semantic module.
*/
protected ObjectType objectType;
private boolean objectMemoryEnabled;
private static final transient ExpireJob job = new ExpireJob();
private long expirationOffset = -1;
private boolean queryNode;
protected CompiledNetwork compiledNetwork;
/* always dirty after serialisation */
protected transient boolean dirty;
/* reset counter when dirty */
protected transient IdGenerator idGenerator;
public int getOtnIdCounter() {
return idGenerator.otnIdCounter;
}
public ObjectTypeNode() {
}
/**
* Construct given a semantic <code>ObjectType</code> and the provided
* unique id. All <code>ObjectTypdeNode</code> have node memory.
*
* @param id The unique id for the node.
* @param objectType The semantic object-type differentiator.
*/
public ObjectTypeNode(final int id,
final EntryPointNode source,
final ObjectType objectType,
final BuildContext context) {
super(id,
context.getPartitionId(),
context.getKnowledgeBase().getConfiguration().isMultithreadEvaluation(),
source,
context.getKnowledgeBase().getConfiguration().getAlphaNodeHashingThreshold());
this.objectType = objectType;
idGenerator = new IdGenerator(id);
setObjectMemoryEnabled(context.isObjectTypeNodeMemoryEnabled());
if (ClassObjectType.DroolsQuery_ObjectType.isAssignableFrom(objectType)) {
queryNode = true;
}
this.dirty = true;
}
private static class IdGenerator {
private final int otnId;
private int otnIdCounter;
private IdGenerator(int otnId) {
this.otnId = otnId;
}
private Id nextId() {
return new Id(otnId, otnIdCounter++);
}
private void reset() {
otnIdCounter = 0;
}
}
public static final Id DEFAULT_ID = new Id(-1, 0);
public static class Id {
private final int otnId;
private final int id;
public Id(int otnId, int id) {
this.otnId = otnId;
this.id = id;
}
@Override
public String toString() {
return "ObjectTypeNode.Id[" + otnId + "#" + id + "]";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || !(o instanceof Id)) return false;
Id otherId = (Id) o;
return id == otherId.id && otnId == otherId.otnId;
}
@Override
public int hashCode() {
return 31 * otnId + 37 * id;
}
public boolean before(Id otherId) {
return otherId != null && (otnId < otherId.otnId || (otnId == otherId.otnId && id < otherId.id));
}
public int getId() {
return id;
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
super.readExternal(in);
objectType = (ObjectType) in.readObject();
// this is here as not all objectTypeNodes used ClassObjectTypes in packages (i.e. rules with those nodes did not exist yet)
// and thus have no wiring targets
if (objectType instanceof ClassObjectType) {
objectType = ((DroolsObjectInputStream) in).getKnowledgeBase().getClassFieldAccessorCache().getClassObjectType((ClassObjectType) objectType, true);
}
objectMemoryEnabled = in.readBoolean();
expirationOffset = in.readLong();
queryNode = in.readBoolean();
dirty = true;
idGenerator = new IdGenerator(id);
}
public void writeExternal(ObjectOutput out) throws IOException {
super.writeExternal(out);
out.writeObject(objectType);
out.writeBoolean(objectMemoryEnabled);
out.writeLong(expirationOffset);
out.writeBoolean(queryNode);
}
public short getType() {
return NodeTypeEnums.ObjectTypeNode;
}
/**
* Retrieve the semantic <code>ObjectType</code> differentiator.
*
* @return The semantic <code>ObjectType</code> differentiator.
*/
public ObjectType getObjectType() {
return this.objectType;
}
@Override
public BitMask calculateDeclaredMask(List<String> settableProperties) {
return EmptyBitMask.get();
}
public boolean isAssignableFrom(final ObjectType objectType) {
return this.objectType.isAssignableFrom(objectType);
}
public void setCompiledNetwork(CompiledNetwork compiledNetwork) {
this.compiledNetwork = compiledNetwork;
this.compiledNetwork.setObjectTypeNode(this);
}
public void assertInitialFact(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if (objectMemoryEnabled) {
InitialFactObjectTypeNodeMemory memory = (InitialFactObjectTypeNodeMemory) workingMemory.getNodeMemory(this);
memory.add(factHandle);
}
checkDirty();
propagateAssert(factHandle, context, workingMemory);
}
private void checkDirty() {
if (dirty) {
resetIdGenerator();
updateTupleSinkId(this, this);
dirty = false;
}
}
/**
* Propagate the <code>FactHandleimpl</code> through the <code>Rete</code> network. All
* <code>FactHandleImpl</code> should be remembered in the node memory, so that later runtime rule attachmnents
* can have the matched facts propagated to them.
*
* @param factHandle The fact handle.
* @param context The propagation context.
* @param workingMemory The working memory session.
*/
public void assertObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
checkDirty();
if ( context.getReaderContext() == null && this.objectType.isEvent() && this.expirationOffset >= 0 && this.expirationOffset != Long.MAX_VALUE ) {
scheduleExpiration(context, workingMemory, factHandle, expirationOffset, new WorkingMemoryReteExpireAction((EventFactHandle) factHandle, this));
}
}
public void propagateAssert(InternalFactHandle factHandle, PropagationContext context, InternalWorkingMemory workingMemory) {
if (compiledNetwork != null) {
compiledNetwork.assertObject(factHandle,
context,
workingMemory);
} else {
this.sink.propagateAssertObject(factHandle,
context,
workingMemory);
}
}
public static void scheduleExpiration(PropagationContext context, InternalWorkingMemory workingMemory, InternalFactHandle handle, long expirationOffset, WorkingMemoryReteExpireAction expireAction) {
// schedule expiration
TimerService clock = workingMemory.getTimerService();
// DROOLS-455 the calculation of the effectiveEnd may overflow and become negative
EventFactHandle eventFactHandle = (EventFactHandle) handle;
long effectiveEnd = eventFactHandle.getEndTimestamp() + expirationOffset;
long nextTimestamp = Math.max( clock.getCurrentTime(),
effectiveEnd >= 0 ? effectiveEnd : Long.MAX_VALUE );
JobContext jobctx = new ExpireJobContext( expireAction,
workingMemory );
JobHandle jobHandle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( nextTimestamp, null, null ) );
jobctx.setJobHandle( jobHandle );
eventFactHandle.addJob(jobHandle);
}
/**
* Retract the <code>FactHandleimpl</code> from the <code>Rete</code> network. Also remove the
* <code>FactHandleImpl</code> from the node memory.
*
* @param factHandle The fact handle.
* @param context The propagation context.
* @param workingMemory The working memory session.
*/
public void retractObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
checkDirty();
doRetractObject(factHandle, context, workingMemory);
}
public static void doRetractObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
for (RightTuple rightTuple = factHandle.getFirstRightTuple(); rightTuple != null; ) {
RightTuple nextRightTuple = rightTuple.getHandleNext();
rightTuple.getRightTupleSink().retractRightTuple(rightTuple,
context,
workingMemory);
rightTuple = nextRightTuple;
}
factHandle.clearRightTuples();
for (LeftTuple leftTuple = factHandle.getFirstLeftTuple(); leftTuple != null; leftTuple = leftTuple.getLeftParentNext()) {
// must go via the LiaNode, so that the fact counter is updated, for linking
((LeftInputAdapterNode) leftTuple.getLeftTupleSink().getLeftTupleSource()).retractLeftTuple(leftTuple,
context,
workingMemory);
}
factHandle.clearLeftTuples();
}
protected void resetIdGenerator() {
idGenerator.reset();
}
public void modifyObject(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
checkDirty();
context.setObjectType(objectType);
if (compiledNetwork != null) {
compiledNetwork.modifyObject(factHandle,
modifyPreviousTuples,
context.adaptModificationMaskForObjectType(objectType, workingMemory),
workingMemory);
} else {
this.sink.propagateModifyObject(factHandle,
modifyPreviousTuples,
context.adaptModificationMaskForObjectType(objectType, workingMemory),
workingMemory);
}
}
public void updateSink(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
checkDirty();
// Regular updateSink
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory(this);
Iterator<InternalFactHandle> it = memory.iterator();
while (it.hasNext()) {
sink.assertObject(it.next(),
context,
workingMemory);
}
}
/**
* Rete needs to know that this ObjectTypeNode has been added
*/
public void attach(BuildContext context) {
this.source.addObjectSink(this);
InternalWorkingMemory[] workingMemories = context.getWorkingMemories();
InternalWorkingMemory workingMemory = workingMemories.length > 0 ? workingMemories[0] : null;
if ( workingMemory != null ) {
InternalWorkingMemoryEntryPoint wmEntryPoint = (InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint(((EntryPointNode) source).getEntryPoint().getEntryPointId());
ObjectTypeConf objectTypeConf = wmEntryPoint.getObjectTypeConfigurationRegistry().getObjectTypeConfByClass( ((ClassObjectType) objectType).getClassType() );
if (objectTypeConf != null) {
objectTypeConf.resetCache();
}
}
}
public void networkUpdated(UpdateContext updateContext) {
this.dirty = true;
}
protected static void updateTupleSinkId(ObjectTypeNode otn,
ObjectSource source) {
for (ObjectSink sink : source.sink.getSinks()) {
if (sink instanceof BetaNode) {
((BetaNode) sink).setRightInputOtnId(otn.nextOtnId());
} else if (sink instanceof LeftInputAdapterNode) {
for (LeftTupleSink liaChildSink : ((LeftInputAdapterNode) sink).getSinkPropagator().getSinks()) {
liaChildSink.setLeftInputOtnId(otn.nextOtnId());
}
} else if (sink instanceof WindowNode) {
((WindowNode) sink).setRightInputOtnId(otn.nextOtnId());
updateTupleSinkId(otn, (WindowNode) sink);
} else if (sink instanceof AlphaNode) {
updateTupleSinkId(otn, (AlphaNode) sink);
}
}
}
public Id nextOtnId() {
return idGenerator.nextId();
}
/**
* OTN needs to override remove to avoid releasing the node ID, since OTN are
* never removed from the rulebase in the current implementation
*/
public void remove(RuleRemovalContext context,
ReteooBuilder builder,
InternalWorkingMemory[] workingMemories) {
doRemove(context,
builder,
workingMemories);
}
/**
* OTN needs to override remove to avoid releasing the node ID, since OTN are
* never removed from the rulebase in the current implementation
*/
protected void doRemove(final RuleRemovalContext context,
final ReteooBuilder builder,
final InternalWorkingMemory[] workingMemories) {
if (!context.getKnowledgeBase().getConfiguration().isPhreakEnabled() && context.getCleanupAdapter() != null) {
for (InternalWorkingMemory workingMemory : workingMemories) {
CleanupAdapter adapter = context.getCleanupAdapter();
final ObjectTypeNodeMemory memory = (ObjectTypeNodeMemory) workingMemory.getNodeMemory(this);
Iterator<InternalFactHandle> it = memory.iterator();
while (it.hasNext()) {
InternalFactHandle handle = it.next();
for (LeftTuple leftTuple = handle.getFirstLeftTuple(); leftTuple != null; leftTuple = leftTuple.getLeftParentNext()) {
adapter.cleanUp(leftTuple,
workingMemory);
}
}
}
context.setCleanupAdapter(null);
}
}
/**
* Creates memory for the node using PrimitiveLongMap as its optimised for storage and reteivals of Longs.
* However PrimitiveLongMap is not ideal for spase data. So it should be monitored incase its more optimal
* to switch back to a standard HashMap.
*/
public Memory createMemory(final RuleBaseConfiguration config, InternalWorkingMemory wm) {
Class<?> classType = ((ClassObjectType) getObjectType()).getClassType();
if (InitialFact.class.isAssignableFrom(classType)) {
return new InitialFactObjectTypeNodeMemory(classType);
}
return new ObjectTypeNodeMemory(classType, wm);
}
public boolean isObjectMemoryEnabled() {
return this.objectMemoryEnabled;
}
public void setObjectMemoryEnabled(boolean objectMemoryEnabled) {
this.objectMemoryEnabled = objectMemoryEnabled;
}
public String toString() {
return "[ObjectTypeNode(" + this.id + ")::" + ((EntryPointNode) this.source).getEntryPoint() + " objectType=" + this.objectType + " expiration=" + this.getExpirationOffset() + "ms ]";
}
/**
* Uses he hashCode() of the underlying ObjectType implementation.
*/
public int hashCode() {
return this.objectType.hashCode() ^ this.source.hashCode();
}
public boolean equals(final Object object) {
if (this == object) {
return true;
}
if (object == null || !(object instanceof ObjectTypeNode)) {
return false;
}
final ObjectTypeNode other = (ObjectTypeNode) object;
return this.objectType.equals(other.objectType) && this.source.equals(other.source);
}
private boolean usesDeclaration(final Constraint[] constraints) {
boolean usesDecl = false;
for (int i = 0; !usesDecl && i < constraints.length; i++) {
usesDecl = this.usesDeclaration(constraints[i]);
}
return usesDecl;
}
private boolean usesDeclaration(final Constraint constraint) {
boolean usesDecl = false;
final Declaration[] declarations = constraint.getRequiredDeclarations();
for (int j = 0; !usesDecl && j < declarations.length; j++) {
usesDecl = (declarations[j].getPattern().getObjectType() == this.objectType);
}
return usesDecl;
}
private boolean usesDeclaration(final EvalCondition condition) {
boolean usesDecl = false;
final Declaration[] declarations = condition.getRequiredDeclarations();
for (int j = 0; !usesDecl && j < declarations.length; j++) {
usesDecl = (declarations[j].getPattern().getObjectType() == this.objectType);
}
return usesDecl;
}
/**
* @return the entryPoint
*/
public EntryPointId getEntryPoint() {
return ((EntryPointNode) this.source).getEntryPoint();
}
public long getExpirationOffset() {
return expirationOffset;
}
public void setExpirationOffset(long expirationOffset) {
this.expirationOffset = expirationOffset;
if (!this.objectType.getValueType().equals(ValueType.QUERY_TYPE)) {
if (expirationOffset > 0) {
// override memory enabled settings
this.setObjectMemoryEnabled(true);
} else if (expirationOffset == 0) {
// disable memory
this.setObjectMemoryEnabled(false);
}
}
}
public static class ExpireJob
implements
Job {
public void execute(JobContext ctx) {
ExpireJobContext context = (ExpireJobContext) ctx;
context.workingMemory.queueWorkingMemoryAction(context.expireAction);
((EventFactHandle)context.getExpireAction().getFactHandle()).removeJob(context.getJobHandle());
}
}
public static class ExpireJobContext
implements
JobContext,
Externalizable {
public WorkingMemoryReteExpireAction expireAction;
public InternalWorkingMemory workingMemory;
public JobHandle handle;
public ExpireJobContext(WorkingMemoryReteExpireAction expireAction,
InternalWorkingMemory workingMemory) {
super();
this.expireAction = expireAction;
this.workingMemory = workingMemory;
}
public JobHandle getJobHandle() {
return this.handle;
}
public void setJobHandle(JobHandle jobHandle) {
this.handle = jobHandle;
}
public WorkingMemoryReteExpireAction getExpireAction() {
return expireAction;
}
public void setExpireAction(WorkingMemoryReteExpireAction expireAction) {
this.expireAction = expireAction;
}
public InternalWorkingMemory getWorkingMemory() {
return workingMemory;
}
public void setWorkingMemory(InternalWorkingMemory workingMemory) {
this.workingMemory = workingMemory;
}
public JobHandle getHandle() {
return handle;
}
public void setHandle(JobHandle handle) {
this.handle = handle;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
//this.behavior = (O)
}
public void writeExternal(ObjectOutput out) throws IOException {
// TODO Auto-generated method stub
}
}
public static class ExpireJobContextTimerOutputMarshaller
implements
TimersOutputMarshaller {
public void write(JobContext jobCtx,
MarshallerWriteContext outputCtx) throws IOException {
outputCtx.writeShort( PersisterEnums.EXPIRE_TIMER );
// ExpireJob, no state
ExpireJobContext ejobCtx = (ExpireJobContext) jobCtx;
WorkingMemoryReteExpireAction expireAction = ejobCtx.getExpireAction();
outputCtx.writeInt( expireAction.getFactHandle().getId() );
outputCtx.writeUTF( expireAction.getNode().getEntryPoint().getEntryPointId() );
outputCtx.writeUTF( ((ClassObjectType) expireAction.getNode().getObjectType()).getClassType().getName() );
DefaultJobHandle jobHandle = (DefaultJobHandle) ejobCtx.getJobHandle();
PointInTimeTrigger trigger = (PointInTimeTrigger) jobHandle.getTimerJobInstance().getTrigger();
outputCtx.writeLong( trigger.hasNextFireTime().getTime() );
}
public ProtobufMessages.Timers.Timer serialize(JobContext jobCtx,
MarshallerWriteContext outputCtx) {
// ExpireJob, no state
ExpireJobContext ejobCtx = ( ExpireJobContext ) jobCtx;
WorkingMemoryReteExpireAction expireAction = ejobCtx.getExpireAction();
DefaultJobHandle jobHandle = ( DefaultJobHandle ) ejobCtx.getJobHandle();
PointInTimeTrigger trigger = ( PointInTimeTrigger ) jobHandle.getTimerJobInstance().getTrigger();
return ProtobufMessages.Timers.Timer.newBuilder()
.setType( ProtobufMessages.Timers.TimerType.EXPIRE )
.setExpire( ProtobufMessages.Timers.ExpireTimer.newBuilder()
.setHandleId( expireAction.getFactHandle().getId() )
.setEntryPointId( expireAction.getNode().getEntryPoint().getEntryPointId() )
.setClassName( ((ClassObjectType)expireAction.getNode().getObjectType()).getClassType().getName() )
.setNextFireTimestamp( trigger.hasNextFireTime().getTime() )
.build() )
.build();
}
}
public static class ExpireJobContextTimerInputMarshaller
implements
TimersInputMarshaller {
public void read(MarshallerReaderContext inCtx) throws IOException,
ClassNotFoundException {
InternalFactHandle factHandle = inCtx.handles.get( inCtx.readInt() );
String entryPointId = inCtx.readUTF();
EntryPointNode epn = inCtx.wm.getKnowledgeBase().getRete().getEntryPointNode( new EntryPointId( entryPointId ) );
String className = inCtx.readUTF();
Class< ? > cls = inCtx.wm.getKnowledgeBase().getRootClassLoader().loadClass( className );
ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType( cls ) );
long nextTimeStamp = inCtx.readLong();
TimerService clock = inCtx.wm.getTimerService();
JobContext jobctx = new ExpireJobContext( new WorkingMemoryReteExpireAction( (EventFactHandle) factHandle, otn ),
inCtx.wm );
JobHandle handle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( nextTimeStamp,
null,
null ) );
jobctx.setJobHandle( handle );
}
public void deserialize(MarshallerReaderContext inCtx,
Timer _timer) throws ClassNotFoundException {
ExpireTimer _expire = _timer.getExpire();
InternalFactHandle factHandle = inCtx.handles.get( _expire.getHandleId() );
EntryPointNode epn = inCtx.wm.getKnowledgeBase().getRete().getEntryPointNode( new EntryPointId( _expire.getEntryPointId() ) );
Class<?> cls = inCtx.wm.getKnowledgeBase().getRootClassLoader().loadClass( _expire.getClassName() );
ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType( cls ) );
TimerService clock = inCtx.wm.getTimerService();
JobContext jobctx = new ExpireJobContext( new WorkingMemoryReteExpireAction((EventFactHandle)factHandle, otn),
inCtx.wm );
JobHandle jobHandle = clock.scheduleJob( job,
jobctx,
new PointInTimeTrigger( _expire.getNextFireTimestamp(), null, null ) );
jobctx.setJobHandle( jobHandle );
((EventFactHandle) factHandle).addJob(jobHandle);
}
}
public void byPassModifyToBetaNode(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
throw new UnsupportedOperationException("This should never get called, as the PropertyReactive first happens at the AlphaNode");
}
public static class ObjectTypeNodeMemory implements Memory {
private ClassAwareObjectStore.SingleClassStore store;
private Class<?> classType;
ObjectTypeNodeMemory(Class<?> classType) {
this.classType = classType;
}
ObjectTypeNodeMemory(Class<?> classType, InternalWorkingMemory wm) {
this(classType);
store = ((ClassAwareObjectStore) wm.getObjectStore()).getOrCreateClassStore(classType);
}
public short getNodeType() {
return NodeTypeEnums.ObjectTypeNode;
}
public Iterator<InternalFactHandle> iterator() {
return store.factHandlesIterator(true);
}
public SegmentMemory getSegmentMemory() {
return null;
}
public void setSegmentMemory(SegmentMemory segmentMemory) {
throw new UnsupportedOperationException();
}
public Memory getPrevious() {
throw new UnsupportedOperationException();
}
public void setPrevious(Memory previous) {
throw new UnsupportedOperationException();
}
public void nullPrevNext() {
throw new UnsupportedOperationException();
}
public void setNext(Memory next) {
throw new UnsupportedOperationException();
}
public Memory getNext() {
throw new UnsupportedOperationException();
}
public void reset() { }
public String toString() {
return "ObjectTypeMemory for " + classType;
}
}
public static class InitialFactObjectTypeNodeMemory extends ObjectTypeNodeMemory {
private List<InternalFactHandle> list = Collections.emptyList();
InitialFactObjectTypeNodeMemory(Class<?> classType) {
super(classType);
}
public void add(InternalFactHandle factHandle) {
list = Arrays.asList(factHandle);
}
@Override
public Iterator<InternalFactHandle> iterator() {
return list.iterator();
}
@Override
public void reset() {
list = Collections.emptyList();
}
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.telemetry.reporter;
import static org.camunda.bpm.engine.impl.util.ConnectUtil.METHOD_NAME_POST;
import static org.camunda.bpm.engine.impl.util.ConnectUtil.PARAM_NAME_RESPONSE_STATUS_CODE;
import static org.camunda.bpm.engine.impl.util.ConnectUtil.addRequestTimeoutConfiguration;
import static org.camunda.bpm.engine.impl.util.ConnectUtil.assembleRequestParameters;
import static org.camunda.bpm.engine.impl.util.StringUtil.hasText;
import static org.camunda.bpm.engine.management.Metrics.ACTIVTY_INSTANCE_START;
import static org.camunda.bpm.engine.management.Metrics.EXECUTED_DECISION_ELEMENTS;
import static org.camunda.bpm.engine.management.Metrics.EXECUTED_DECISION_INSTANCES;
import static org.camunda.bpm.engine.management.Metrics.ROOT_PROCESS_INSTANCE_START;
import java.net.HttpURLConnection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TimerTask;
import javax.ws.rs.core.MediaType;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.impl.ProcessEngineLogger;
import org.camunda.bpm.engine.impl.cmd.IsTelemetryEnabledCmd;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
import org.camunda.bpm.engine.impl.metrics.Meter;
import org.camunda.bpm.engine.impl.metrics.MetricsRegistry;
import org.camunda.bpm.engine.impl.metrics.util.MetricsUtil;
import org.camunda.bpm.engine.impl.persistence.entity.PropertyEntity;
import org.camunda.bpm.engine.impl.telemetry.CommandCounter;
import org.camunda.bpm.engine.impl.telemetry.TelemetryLogger;
import org.camunda.bpm.engine.impl.telemetry.TelemetryRegistry;
import org.camunda.bpm.engine.impl.telemetry.dto.ApplicationServerImpl;
import org.camunda.bpm.engine.impl.telemetry.dto.CommandImpl;
import org.camunda.bpm.engine.impl.telemetry.dto.TelemetryDataImpl;
import org.camunda.bpm.engine.impl.telemetry.dto.InternalsImpl;
import org.camunda.bpm.engine.impl.telemetry.dto.MetricImpl;
import org.camunda.bpm.engine.impl.telemetry.dto.ProductImpl;
import org.camunda.bpm.engine.impl.util.ExceptionUtil;
import org.camunda.bpm.engine.impl.util.JsonUtil;
import org.camunda.bpm.engine.impl.util.TelemetryUtil;
import org.camunda.bpm.engine.telemetry.Command;
import org.camunda.bpm.engine.telemetry.Metric;
import org.camunda.connect.spi.CloseableConnectorResponse;
import org.camunda.connect.spi.Connector;
import org.camunda.connect.spi.ConnectorRequest;
public class TelemetrySendingTask extends TimerTask {
protected static final Set<String> METRICS_TO_REPORT = new HashSet<>();
protected static final TelemetryLogger LOG = ProcessEngineLogger.TELEMETRY_LOGGER;
protected static final String TELEMETRY_INIT_MESSAGE_SENT_NAME = "camunda.telemetry.initial.message.sent";
protected static final String UUID4_PATTERN = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}";
static {
METRICS_TO_REPORT.add(ROOT_PROCESS_INSTANCE_START);
METRICS_TO_REPORT.add(EXECUTED_DECISION_INSTANCES);
METRICS_TO_REPORT.add(EXECUTED_DECISION_ELEMENTS);
METRICS_TO_REPORT.add(ACTIVTY_INSTANCE_START);
}
protected CommandExecutor commandExecutor;
protected String telemetryEndpoint;
protected TelemetryDataImpl staticData;
protected Connector<? extends ConnectorRequest<?>> httpConnector;
protected int telemetryRequestRetries;
protected TelemetryRegistry telemetryRegistry;
protected MetricsRegistry metricsRegistry;
protected int telemetryRequestTimeout;
protected boolean sendInitialMessage;
public TelemetrySendingTask(CommandExecutor commandExecutor,
String telemetryEndpoint,
int telemetryRequestRetries,
TelemetryDataImpl data,
Connector<? extends ConnectorRequest<?>> httpConnector,
TelemetryRegistry telemetryRegistry,
MetricsRegistry metricsRegistry,
int telemetryRequestTimeout,
boolean sendInitialMessage) {
this.commandExecutor = commandExecutor;
this.telemetryEndpoint = telemetryEndpoint;
this.telemetryRequestRetries = telemetryRequestRetries;
this.staticData = data;
this.httpConnector = httpConnector;
this.telemetryRegistry = telemetryRegistry;
this.metricsRegistry = metricsRegistry;
this.telemetryRequestTimeout = telemetryRequestTimeout;
this.sendInitialMessage = sendInitialMessage;
}
@Override
public void run() {
LOG.startTelemetrySendingTask();
if (sendInitialMessage) {
sendInitialMessage();
}
if (!isTelemetryEnabled()) {
LOG.telemetryDisabled();
return;
}
TelemetryUtil.toggleLocalTelemetry(true, telemetryRegistry, metricsRegistry);
performDataSend(false, () -> updateAndSendData(true, true));
}
public TelemetryDataImpl updateAndSendData(boolean sendData, boolean addLegacyNames) {
updateStaticData();
InternalsImpl dynamicData = resolveDynamicData(sendData, addLegacyNames);
TelemetryDataImpl mergedData = new TelemetryDataImpl(staticData);
mergedData.mergeInternals(dynamicData);
if(sendData) {
try {
sendData(mergedData, false);
} catch (Exception e) {
// so that we send it again the next time
restoreDynamicData(dynamicData);
throw e;
}
}
return mergedData;
}
protected void sendInitialMessage() {
try {
commandExecutor.execute(new SendInitialMsgCmd());
} catch (ProcessEngineException pex) {
// the property might have been inserted already by another cluster node after we checked it, ignore that
if (!ExceptionUtil.checkConstraintViolationException(pex)) {
LOG.exceptionWhileSendingTelemetryData(pex, true);
}
} catch (Exception e) {
LOG.exceptionWhileSendingTelemetryData(e, true);
}
}
protected void sendInitialMessage(CommandContext commandContext) {
/*
* check on init message property to minimize the risk of sending the
* message twice in case another node in the cluster toggled the value
* and successfully sent the message already - it is not 100% safe but
* good enough as sending the message twice is still OK
*/
if (null == commandContext.getPropertyManager().findPropertyById(TELEMETRY_INIT_MESSAGE_SENT_NAME)) {
// message has not been sent yet
performDataSend(true, () -> {
TelemetryDataImpl initData = new TelemetryDataImpl(staticData.getInstallation(), new ProductImpl(staticData.getProduct()));
InternalsImpl internals = new InternalsImpl();
internals.setTelemetryEnabled(new IsTelemetryEnabledCmd().execute(commandContext));
initData.getProduct().setInternals(internals);
sendData(initData, true);
sendInitialMessage = false;
commandContext.getPropertyManager().insert(new PropertyEntity(TELEMETRY_INIT_MESSAGE_SENT_NAME, "true"));
});
} else {
// message has already been sent by another node
sendInitialMessage = false;
}
}
protected void updateStaticData() {
InternalsImpl internals = staticData.getProduct().getInternals();
if (internals.getApplicationServer() == null) {
ApplicationServerImpl applicationServer = telemetryRegistry.getApplicationServer();
internals.setApplicationServer(applicationServer);
}
if (internals.isTelemetryEnabled() == null) {
internals.setTelemetryEnabled(true);// this can only be true, otherwise we would not collect data to send
}
// license key and Webapps data is fed from the outside to the registry but needs to be constantly updated
internals.setLicenseKey(telemetryRegistry.getLicenseKey());
internals.setWebapps(telemetryRegistry.getWebapps());
}
protected boolean isTelemetryEnabled() {
Boolean telemetryEnabled = commandExecutor.execute(new IsTelemetryEnabledCmd());
return telemetryEnabled != null && telemetryEnabled.booleanValue();
}
protected void sendData(TelemetryDataImpl dataToSend, boolean isInitialMessage) {
String telemetryData = JsonUtil.asString(dataToSend);
Map<String, Object> requestParams = assembleRequestParameters(METHOD_NAME_POST,
telemetryEndpoint,
MediaType.APPLICATION_JSON,
telemetryData);
requestParams = addRequestTimeoutConfiguration(requestParams, telemetryRequestTimeout);
ConnectorRequest<?> request = httpConnector.createRequest();
request.setRequestParameters(requestParams);
LOG.sendingTelemetryData(telemetryData, isInitialMessage);
CloseableConnectorResponse response = (CloseableConnectorResponse) request.execute();
if (response == null) {
LOG.unexpectedResponseWhileSendingTelemetryData(isInitialMessage);
} else {
int responseCode = (int) response.getResponseParameter(PARAM_NAME_RESPONSE_STATUS_CODE);
if (isSuccessStatusCode(responseCode)) {
if (responseCode != HttpURLConnection.HTTP_ACCEPTED) {
LOG.unexpectedResponseSuccessCode(responseCode, isInitialMessage);
}
LOG.telemetrySentSuccessfully(isInitialMessage);
} else {
throw LOG.unexpectedResponseWhileSendingTelemetryData(responseCode, isInitialMessage);
}
}
}
/**
* @return true if status code is 2xx
*/
protected boolean isSuccessStatusCode(int statusCode) {
return (statusCode / 100) == 2;
}
protected void restoreDynamicData(InternalsImpl internals) {
Map<String, Command> commands = internals.getCommands();
for (Map.Entry<String, Command> entry : commands.entrySet()) {
telemetryRegistry.markOccurrence(entry.getKey(), entry.getValue().getCount());
}
if (metricsRegistry != null) {
Map<String, Metric> metrics = internals.getMetrics();
for (String metricToReport : METRICS_TO_REPORT) {
Metric metricValue = metrics.get(metricToReport);
metricsRegistry.markTelemetryOccurrence(metricToReport, metricValue.getCount());
}
}
}
protected InternalsImpl resolveDynamicData(boolean reset, boolean addLegacyNames) {
InternalsImpl result = new InternalsImpl();
Map<String, Metric> metrics = calculateMetrics(reset, addLegacyNames);
result.setMetrics(metrics);
// command counts are modified after the metrics are retrieved, because
// metric retrieval can fail and resetting the command count is a side effect
// that we would otherwise have to undo
Map<String, Command> commands = fetchAndResetCommandCounts(reset);
result.setCommands(commands);
return result;
}
protected Map<String, Command> fetchAndResetCommandCounts(boolean reset) {
Map<String, Command> commandsToReport = new HashMap<>();
Map<String, CommandCounter> originalCounts = telemetryRegistry.getCommands();
synchronized (originalCounts) {
for (Map.Entry<String, CommandCounter> counter : originalCounts.entrySet()) {
long occurrences = counter.getValue().get(reset);
commandsToReport.put(counter.getKey(), new CommandImpl(occurrences));
}
}
return commandsToReport;
}
protected Map<String, Metric> calculateMetrics(boolean reset, boolean addLegacyNames) {
Map<String, Metric> metrics = new HashMap<>();
if (metricsRegistry != null) {
Map<String, Meter> telemetryMeters = metricsRegistry.getTelemetryMeters();
for (String metricToReport : METRICS_TO_REPORT) {
long value = telemetryMeters.get(metricToReport).get(reset);
if (addLegacyNames) {
metrics.put(metricToReport, new MetricImpl(value));
}
// add public names
metrics.put(MetricsUtil.resolvePublicName(metricToReport), new MetricImpl(value));
}
}
return metrics;
}
protected class SendInitialMsgCmd implements org.camunda.bpm.engine.impl.interceptor.Command<Void> {
@Override
public Void execute(CommandContext commandContext) {
sendInitialMessage(commandContext);
return null;
}
}
protected void performDataSend(Boolean isInitialMessage, Runnable runnable) {
if (validateData(staticData)) {
int triesLeft = telemetryRequestRetries + 1;
boolean requestSuccessful = false;
do {
try {
triesLeft--;
runnable.run();
requestSuccessful = true;
} catch (Exception e) {
LOG.exceptionWhileSendingTelemetryData(e, isInitialMessage);
}
} while (!requestSuccessful && triesLeft > 0);
} else {
LOG.sendingTelemetryDataFails(staticData);
}
}
protected Boolean validateData(TelemetryDataImpl dataToSend) {
// validate product data
ProductImpl product = dataToSend.getProduct();
String installationId = dataToSend.getInstallation();
String edition = product.getEdition();
String version = product.getVersion();
String name = product.getName();
// ensure that data is not null or empty strings
boolean validProductData = hasText(name) && hasText(version) && hasText(edition) && hasText(installationId);
// validate installation id
if (validProductData) {
validProductData = validProductData && installationId.matches(UUID4_PATTERN);
}
return validProductData;
}
}
| |
/**
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.guice;
import static com.google.inject.matcher.Matchers.any;
import com.google.inject.AbstractModule;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.googlecode.guice.PackageVisibilityTestModule.PublicUserOfPackagePrivate;
import junit.framework.TestCase;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import java.io.File;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.concurrent.TimeoutException;
/**
* This test is in a separate package so we can test package-level visibility
* with confidence.
*
* @author mcculls@gmail.com (Stuart McCulloch)
*/
public class BytecodeGenTest extends TestCase {
private final ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader();
private final Module interceptorModule = new AbstractModule() {
protected void configure() {
bindInterceptor(any(), any(), new MethodInterceptor() {
public Object invoke(MethodInvocation chain)
throws Throwable {
return chain.proceed() + " WORLD";
}
});
}
};
private final Module noopInterceptorModule = new AbstractModule() {
protected void configure() {
bindInterceptor(any(), any(), new MethodInterceptor() {
public Object invoke(MethodInvocation chain)
throws Throwable {
return chain.proceed();
}
});
}
};
public void testPackageVisibility() {
Injector injector = Guice.createInjector(new PackageVisibilityTestModule());
injector.getInstance(PublicUserOfPackagePrivate.class); // This must pass.
}
public void testInterceptedPackageVisibility() {
Injector injector = Guice.createInjector(interceptorModule, new PackageVisibilityTestModule());
injector.getInstance(PublicUserOfPackagePrivate.class); // This must pass.
}
public void testEnhancerNaming() {
Injector injector = Guice.createInjector(interceptorModule, new PackageVisibilityTestModule());
PublicUserOfPackagePrivate pupp = injector.getInstance(PublicUserOfPackagePrivate.class);
assertTrue(pupp.getClass().getName().startsWith(
PublicUserOfPackagePrivate.class.getName() + "$$EnhancerByGuice$$"));
}
// TODO(sameb): Figure out how to test FastClass naming tests.
/**
* Custom URL classloader with basic visibility rules
*/
static class TestVisibilityClassLoader
extends URLClassLoader {
boolean hideInternals;
public TestVisibilityClassLoader(boolean hideInternals) {
super(new URL[0]);
this.hideInternals = hideInternals;
final String[] classpath = System.getProperty("java.class.path").split(File.pathSeparator);
for (final String element : classpath) {
try {
// is it a remote/local URL?
addURL(new URL(element));
} catch (final MalformedURLException e1) {
try {
// nope - perhaps it's a filename?
addURL(new File(element).toURI().toURL());
} catch (final MalformedURLException e2) {
throw new RuntimeException(e1);
}
}
}
}
/**
* Classic parent-delegating classloaders are meant to override findClass.
* However, non-delegating classloaders (as used in OSGi) instead override
* loadClass to provide support for "class-space" separation.
*/
@Override
protected Class<?> loadClass(final String name, final boolean resolve)
throws ClassNotFoundException {
synchronized (this) {
// check our local cache to avoid duplicates
final Class<?> clazz = findLoadedClass(name);
if (clazz != null) {
return clazz;
}
}
if (name.startsWith("java.")) {
// standard bootdelegation of java.*
return super.loadClass(name, resolve);
} else if (!name.contains(".internal.") && !name.contains(".cglib.")) {
/*
* load public and test classes directly from the classpath - we don't
* delegate to our parent because then the loaded classes would also be
* able to see private internal Guice classes, as they are also loaded
* by the parent classloader.
*/
final Class<?> clazz = findClass(name);
if (resolve) {
resolveClass(clazz);
}
return clazz;
}
// hide internal non-test classes
if (hideInternals) {
throw new ClassNotFoundException();
}
return super.loadClass(name, resolve);
}
}
/** as loaded by another class loader */
private Class<ProxyTest> proxyTestClass;
private Class<ProxyTestImpl> realClass;
private Module testModule;
@SuppressWarnings("unchecked")
protected void setUp() throws Exception {
super.setUp();
ClassLoader testClassLoader = new TestVisibilityClassLoader(true);
proxyTestClass = (Class<ProxyTest>) testClassLoader.loadClass(ProxyTest.class.getName());
realClass = (Class<ProxyTestImpl>) testClassLoader.loadClass(ProxyTestImpl.class.getName());
testModule = new AbstractModule() {
public void configure() {
bind(proxyTestClass).to(realClass);
}
};
}
interface ProxyTest {
String sayHello();
}
/**
* Note: this class must be marked as public or protected so that the Guice
* custom classloader will intercept it. Private and implementation classes
* are not intercepted by the custom classloader.
*
* @see com.google.inject.internal.BytecodeGen.Visibility
*/
public static class ProxyTestImpl implements ProxyTest {
static {
//System.out.println(ProxyTestImpl.class.getClassLoader());
}
public String sayHello() {
return "HELLO";
}
}
public void testProxyClassLoading() throws Exception {
Object testObject = Guice.createInjector(interceptorModule, testModule)
.getInstance(proxyTestClass);
// verify method interception still works
Method m = realClass.getMethod("sayHello");
assertEquals("HELLO WORLD", m.invoke(testObject));
}
public void testSystemClassLoaderIsUsedIfProxiedClassUsesIt() {
ProxyTest testProxy = Guice.createInjector(interceptorModule, new Module() {
public void configure(Binder binder) {
binder.bind(ProxyTest.class).to(ProxyTestImpl.class);
}
}).getInstance(ProxyTest.class);
if (ProxyTest.class.getClassLoader() == systemClassLoader) {
assertSame(testProxy.getClass().getClassLoader(), systemClassLoader);
} else {
assertNotSame(testProxy.getClass().getClassLoader(), systemClassLoader);
}
}
public void testProxyClassUnloading() {
Object testObject = Guice.createInjector(interceptorModule, testModule)
.getInstance(proxyTestClass);
assertNotNull(testObject.getClass().getClassLoader());
assertNotSame(testObject.getClass().getClassLoader(), systemClassLoader);
// take a weak reference to the generated proxy class
Reference<Class<?>> clazzRef = new WeakReference<Class<?>>(testObject.getClass());
assertNotNull(clazzRef.get());
// null the proxy
testObject = null;
/*
* this should be enough to queue the weak reference
* unless something is holding onto it accidentally.
*/
final int MAX_COUNT = 100;
String[] buf;
System.gc();
//TODO(cgruber): Use com.google.common.testing.GcFinalization and a countdown latch to un-flake.
for (int count = 0 ; clazzRef.get() != null ; count++) {
buf = new String[8 * 1024 * 1024];
buf = null;
System.gc();
assertTrue("Timeout waiting for class to be unloaded. This may be a flaky result.",
count <= MAX_COUNT);
}
// This test could be somewhat flaky when the GC isn't working.
// If it fails, run the test again to make sure it's failing reliably.
assertNull("Proxy class was not unloaded.", clazzRef.get());
}
public void testProxyingPackagePrivateMethods() {
Injector injector = Guice.createInjector(interceptorModule);
assertEquals("HI WORLD", injector.getInstance(PackageClassPackageMethod.class).sayHi());
assertEquals("HI WORLD", injector.getInstance(PublicClassPackageMethod.class).sayHi());
assertEquals("HI WORLD", injector.getInstance(ProtectedClassProtectedMethod.class).sayHi());
}
static class PackageClassPackageMethod {
String sayHi() {
return "HI";
}
}
public static class PublicClassPackageMethod {
String sayHi() {
return "HI";
}
}
protected static class ProtectedClassProtectedMethod {
protected String sayHi() {
return "HI";
}
}
static class Hidden {
}
public static class HiddenMethodReturn {
public Hidden method() {
return new Hidden();
}
}
public static class HiddenMethodParameter {
public void method(Hidden h) {
}
}
public void testClassLoaderBridging() throws Exception {
ClassLoader testClassLoader = new TestVisibilityClassLoader(false);
Class hiddenMethodReturnClass = testClassLoader.loadClass(HiddenMethodReturn.class.getName());
Class hiddenMethodParameterClass = testClassLoader.loadClass(HiddenMethodParameter.class.getName());
Injector injector = Guice.createInjector(noopInterceptorModule);
Class hiddenClass = testClassLoader.loadClass(Hidden.class.getName());
Constructor ctor = hiddenClass.getDeclaredConstructor();
ctor.setAccessible(true);
// don't use bridging for proxies with private parameters
Object o1 = injector.getInstance(hiddenMethodParameterClass);
o1.getClass().getDeclaredMethod("method", hiddenClass).invoke(o1, ctor.newInstance());
// don't use bridging for proxies with private return types
Object o2 = injector.getInstance(hiddenMethodReturnClass);
o2.getClass().getDeclaredMethod("method").invoke(o2);
}
}
| |
/*
* Copyright (c) 2000 David Flanagan. All rights reserved.
* This code is from the book Java Examples in a Nutshell, 2nd Edition.
* It is provided AS-IS, WITHOUT ANY WARRANTY either expressed or implied.
* You may study, use, and modify it for any non-commercial purpose.
* You may distribute it non-commercially as long as you retain this notice.
* For a commercial use license, or to purchase the book (recommended),
* visit http://www.davidflanagan.com/javaexamples2.
*/
package com.davidflanagan.examples.gui;
import java.awt.*;
import java.awt.print.*;
import java.awt.geom.*;
import java.awt.font.*;
import javax.swing.*;
import javax.swing.text.*;
import java.util.*;
/**
* This class implements the Pageable and Printable interfaces and allows
* the contents of any JTextComponent to be printed using the java.awt.print
* printing API.
**/
public class PrintableDocument implements Pageable, Printable {
View root; // The root View to be printed
PageFormat format; // Paper plus page orientation
int numPages; // How many pages in the document
double printX, printY; // coordinates of upper-left of print area
double printWidth; // Width of the printable area
double printHeight; // Height of the printable area
Rectangle drawRect; // The rectangle in which the document is painted
// How lenient are we with the bottom margin in widow/orphan prevention?
static final double MARGIN_ADJUST = .97;
// The font we use for printing page numbers
static final Font headerFont = new Font("Serif", Font.PLAIN, 12);
/**
* This constructor allows printing the contents of any JTextComponent
* using a default PageFormat
*/
public PrintableDocument(JTextComponent textComponent) {
this(textComponent, new PageFormat());
}
/**
* This constructor allows the contents of any JTextComponent to be
* printed, using any specified PageFormat object
**/
public PrintableDocument(JTextComponent textComponent, PageFormat format) {
// Remember the page format, and ask it for the printable area
this.format = format;
this.printX = format.getImageableX();
this.printY = format.getImageableY();
this.printWidth = format.getImageableWidth();
this.printHeight = format.getImageableHeight();
double paperWidth = format.getWidth();
// Get the document and its root Element from the text component
Document document = textComponent.getDocument();
Element rootElement = document.getDefaultRootElement();
// Get the EditorKit and its ViewFactory from the text component
EditorKit editorKit =textComponent.getUI().getEditorKit(textComponent);
ViewFactory viewFactory = editorKit.getViewFactory();
// Use the ViewFactory to create a root View object for the document
// This is the object we'll print.
root = viewFactory.create(rootElement);
// The Swing text architecture requires us to call setParent() on
// our root View before we use it for anything. In order to do this,
// we need a View object that can serve as the parent. We use a
// custom implementation defined below.
root.setParent(new ParentView(root, viewFactory, textComponent));
// Tell the view how wide the page is; it has to format itself
// to fit within this width. The height doesn't really matter here
root.setSize((float)printWidth, (float)printHeight);
// Now that the view has formatted itself for the specified width,
// Ask it how tall it is.
double documentHeight = root.getPreferredSpan(View.Y_AXIS);
// Set up the rectangle that tells the view where to draw itself
// We'll use it in other methods of this class.
drawRect = new Rectangle((int)printX, (int)printY,
(int)printWidth, (int)documentHeight);
// Now if the document is taller than one page, we have to
// figure out where the page breaks are.
if (documentHeight > printHeight) paginate(root, drawRect);
// Once we've broken it into pages, figure out how man pages.
numPages = pageLengths.size() + 1;
}
// This is the starting offset of the page we're currently working on
double pageStart = 0;
/**
* This method loops through the children of the specified view,
* recursing as necessary, and inserts pages breaks when needed.
* It makes a rudimentary attempt to avoid "widows" and "orphans".
**/
protected void paginate(View v, Rectangle2D allocation) {
// Figure out how tall this view is, and tell it to allocate
// that space among its children
double myheight = v.getPreferredSpan(View.Y_AXIS);
v.setSize((float)printWidth, (float)myheight);
// Now loop through each of the children
int numkids = v.getViewCount();
for(int i = 0; i < numkids; i++) {
View kid = v.getView(i); // this is the child we're working with
// Figure out its size and location
Shape kidshape = v.getChildAllocation(i, allocation);
if (kidshape == null) continue;
Rectangle2D kidbox = kidshape.getBounds2D();
// This is the Y coordinate of the bottom of the child
double kidpos = kidbox.getY() + kidbox.getHeight() - pageStart;
// If this is the first child of a group, then we want to ensure
// that it doesn't get left by itself at the bottom of a page.
// I.e. we want to prevent "widows"
if ((numkids > 1) && (i == 0)) {
// If it is not near the end of the page, then just move
// on to the next child
if (kidpos < printY + printHeight*MARGIN_ADJUST) continue;
// Otherwise, the child is near the bottom of the page, so
// break the page before this child and place this child on
// the new page.
breakPage(kidbox.getY());
continue;
}
// If this is the last child of a group, we don't want it to
// appear by itself at the top of a new page, so allow it to
// squeeze past the bottom margin if necessary. This helps to
// prevent "orphans"
if ((numkids > 1) && (i == numkids-1)) {
// If it fits normally, just move on to the next one
if (kidpos < printY + printHeight) continue;
// Otherwise, if it fits with extra space, then break the
// at the end of the group
if (kidpos < printY + printHeight/MARGIN_ADJUST) {
breakPage(allocation.getY() + allocation.getHeight());
continue;
}
}
// If the child is not the first or last of a group, then we use
// the bottom margin strictly. If the child fits on the page,
// then move on to the next child.
if (kidpos < printY+printHeight) continue;
// If we get here, the child doesn't fit on this page. If it has
// no children, then break the page before this child and continue.
if (kid.getViewCount() == 0) {
breakPage(kidbox.getY());
continue;
}
// If we get here, then the child did not fit on the page, but it
// has kids of its own, so recurse to see if any of those kids
// will fit on the page.
paginate(kid, kidbox);
}
}
// For a document of n pages, this list stores the lengths of pages
// 0 through n-2. The last page is assumed to have a full length
ArrayList pageLengths = new ArrayList();
// For a document of n pages, this list stores the starting offset of
// pages 1 through n-1. The offset of page 0 is always 0
ArrayList pageOffsets = new ArrayList();
/**
* Break a page at the specified Y coordinate. Store the necessary
* information into the pageLengths and pageOffsets lists
**/
void breakPage(double y) {
double pageLength = y-pageStart-printY;
pageStart = y-printY;
pageLengths.add(new Double(pageLength));
pageOffsets.add(new Double(pageStart));
}
/** Return the number of pages. This is a Pageable method. */
public int getNumberOfPages() { return numPages; }
/**
* Return the PageFormat object for the specified page. This
* implementation uses the computed length of the page in the returned
* PageFormat object. The PrinterJob will use this as a clipping region,
* which will prevent extraneous parts of the document from being drawn
* in the top and bottom margins.
**/
public PageFormat getPageFormat(int pagenum) {
// On the last page, just return the user-specified page format
if (pagenum == numPages-1) return format;
// Otherwise, look up the height of this page and return an
// appropriate PageFormat.
double pageLength = ((Double)pageLengths.get(pagenum)).doubleValue();
PageFormat f = (PageFormat) format.clone();
Paper p = f.getPaper();
if (f.getOrientation() == PageFormat.PORTRAIT)
p.setImageableArea(printX, printY, printWidth, pageLength);
else
p.setImageableArea(printY, printX, pageLength, printWidth);
f.setPaper(p);
return f;
}
/**
* This Printable method returns the Printable object for the specified
* page. Since this class implements both Pageable and Printable, it just
* returns this.
**/
public Printable getPrintable(int pagenum) { return this; }
/**
* This is the basic Printable method that prints a specified page
**/
public int print(Graphics g, PageFormat format, int pageIndex) {
// Return an error code on attempts to print past the end of the doc
if (pageIndex >= numPages) return NO_SUCH_PAGE;
// Cast the Graphics object so we can use Java2D operations
Graphics2D g2 = (Graphics2D)g;
// Display a page number centered in the area of the top margin.
// Set a new clipping region so we can draw into the top margin
// But remember the original clipping region so we can restore it
Shape originalClip = g.getClip();
g.setClip(new Rectangle(0, 0, (int)printWidth, (int)printY));
// Compute the header to display, measure it, then display it
String numString = "- " + (pageIndex+1) + " -";
Rectangle2D numBounds = // Get the width and height of the string
headerFont.getStringBounds(numString, g2.getFontRenderContext());
LineMetrics metrics = // Get the ascent and descent of the font
headerFont.getLineMetrics(numString, g2.getFontRenderContext());
g.setFont(headerFont); // Set the font
g.setColor(Color.black); // Print with black ink
g.drawString(numString, // Display the string
(int)(printX + (printWidth-numBounds.getWidth())/2),
(int)((printY-numBounds.getHeight())/2 + metrics.getAscent()));
g.setClip(originalClip); // Restore the clipping region
// Figure out the staring position of the page within the document
double pageStart = 0.0;
if (pageIndex > 0)
pageStart = ((Double)pageOffsets.get(pageIndex-1)).doubleValue();
// Scroll so that the appropriate part of the document is lined up
// with the upper-left corner of the page
g2.translate(0.0, -pageStart);
// Now paint the entire document. The PrinterJob will have
// established a clipping region, so that only the desired portion
// of the document will actually be drawn on this sheet of paper.
root.paint(g, drawRect);
// Finally return a success code
return PAGE_EXISTS;
}
/**
* This inner class is a concrete implementation of View, with a
* couple of key method implementations. An instance of this class
* is used as the parent of the root View object we want to print
**/
static class ParentView extends View {
ViewFactory viewFactory; // The ViewFactory for the hierarchy of views
Container container; // The Container for the hierarchy of views
public ParentView(View v, ViewFactory viewFactory, Container container)
{
super(v.getElement());
this.viewFactory = viewFactory;
this.container = container;
}
// These methods return key pieces of information required by
// the View hierarchy.
public ViewFactory getViewFactory() { return viewFactory; }
public Container getContainer() { return container; }
// These methods are abstract in View, so we've got to provide
// dummy implementations of them here, even though they're never used.
public void paint(Graphics g, Shape allocation) {}
public float getPreferredSpan(int axis) { return 0.0f; }
public int viewToModel(float x,float y,Shape a,Position.Bias[] bias) {
return 0;
}
public Shape modelToView(int pos, Shape a, Position.Bias b)
throws BadLocationException {
return a;
}
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.peterkuterna.android.apps.devoxxfrsched.ui.tablet;
import net.peterkuterna.android.apps.devoxxfrsched.R;
import net.peterkuterna.android.apps.devoxxfrsched.provider.CfpContract;
import net.peterkuterna.android.apps.devoxxfrsched.ui.BaseActivity;
import net.peterkuterna.android.apps.devoxxfrsched.ui.SessionsFragment;
import net.peterkuterna.android.apps.devoxxfrsched.ui.TracksAdapter;
import net.peterkuterna.android.apps.devoxxfrsched.util.ActivityHelper;
import net.peterkuterna.android.apps.devoxxfrsched.util.UIUtils;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListPopupWindow;
import android.widget.PopupWindow;
import android.widget.TextView;
/**
* A tablet-specific fragment that is a giant {@link android.widget.Spinner}
* -like widget. It shows a {@link ListPopupWindow} containing a list of tracks,
* using {@link TracksAdapter}.
*
* Requires API level 11 or later since {@link ListPopupWindow} is API level
* 11+.
*/
public class TracksDropdownFragment extends Fragment implements
LoaderManager.LoaderCallbacks<Cursor>, AdapterView.OnItemClickListener,
AdapterView.OnItemSelectedListener, PopupWindow.OnDismissListener {
private boolean mAutoloadTarget = true;
private Cursor mCursor;
private TracksAdapter mAdapter;
private Uri mTracksUri;
private String mNextType;
private ListPopupWindow mListPopupWindow;
private ViewGroup mRootView;
private TextView mTitle;
private TextView mAbstract;
private Handler mHandler = new Handler();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mAdapter = new TracksAdapter(getActivity(), null);
mAdapter.setHasAllItem(true);
if (savedInstanceState != null) {
// Prevent auto-load behavior on orientation change.
mAutoloadTarget = false;
}
}
public void reloadFromArguments(Bundle arguments) {
if (mListPopupWindow != null) {
mListPopupWindow.setAdapter(mAdapter);
}
getLoaderManager().initLoader(TracksAdapter.TracksQuery._TOKEN,
arguments, this);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mRootView = (ViewGroup) inflater.inflate(
R.layout.fragment_tracks_dropdown, null);
mTitle = (TextView) mRootView.findViewById(R.id.track_title);
mAbstract = (TextView) mRootView.findViewById(R.id.track_abstract);
mRootView.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
mListPopupWindow = new ListPopupWindow(getActivity());
mListPopupWindow.setAdapter(mAdapter);
mListPopupWindow.setModal(true);
mListPopupWindow.setContentWidth(400);
mListPopupWindow.setAnchorView(mRootView);
mListPopupWindow
.setOnItemClickListener(TracksDropdownFragment.this);
mListPopupWindow.show();
mListPopupWindow
.setOnDismissListener(TracksDropdownFragment.this);
}
});
return mRootView;
}
/** {@inheritDoc} */
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
selectTrack(position);
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position,
long id) {
selectTrack(position);
}
@SuppressLint("NewApi") protected void selectTrack(int position) {
final Cursor cursor = (Cursor) mAdapter.getItem(position);
loadTrack(cursor, true);
if (cursor != null) {
UIUtils.setLastUsedTrackID(getActivity(),
cursor.getString(TracksAdapter.TracksQuery.TRACK_ID));
} else {
UIUtils.setLastUsedTrackID(getActivity(),
CfpContract.Tracks.ALL_TRACK_ID);
}
if (mListPopupWindow != null) {
mListPopupWindow.dismiss();
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
public void loadTrack(Cursor cursor, boolean loadTargetFragment) {
final Resources res = getResources();
final String trackId;
final int trackColor;
if (cursor != null) {
trackColor = cursor.getInt(TracksAdapter.TracksQuery.TRACK_COLOR);
trackId = cursor.getString(TracksAdapter.TracksQuery.TRACK_ID);
if (mTitle != null) {
mTitle.setText(cursor
.getString(TracksAdapter.TracksQuery.TRACK_NAME));
}
if (mAbstract != null) {
mAbstract.setText(cursor
.getString(TracksAdapter.TracksQuery.TRACK_ABSTRACT));
}
} else {
trackColor = res.getColor(R.color.all_track_color);
trackId = CfpContract.Tracks.ALL_TRACK_ID;
if (mTitle != null) {
mTitle.setText(R.string.all_sessions_title);
}
if (mAbstract != null) {
mAbstract.setText(R.string.all_sessions_subtitle);
}
}
boolean isDark = UIUtils.isColorDark(trackColor);
mRootView.setBackgroundColor(trackColor);
if (isDark) {
if (mTitle != null) {
mTitle.setTextColor(res.getColor(R.color.body_text_1_inverse));
}
if (mAbstract != null) {
mAbstract.setTextColor(res
.getColor(R.color.body_text_2_inverse));
}
mRootView.findViewById(R.id.track_dropdown_arrow)
.setBackgroundResource(
R.drawable.track_dropdown_arrow_light);
} else {
if (mTitle != null) {
mTitle.setTextColor(res.getColor(R.color.body_text_1));
}
if (mAbstract != null) {
mAbstract.setTextColor(res.getColor(R.color.body_text_2));
}
mRootView
.findViewById(R.id.track_dropdown_arrow)
.setBackgroundResource(R.drawable.track_dropdown_arrow_dark);
}
if (loadTargetFragment) {
final Intent intent = new Intent(Intent.ACTION_VIEW);
final Uri trackUri = CfpContract.Tracks.buildTrackUri(trackId);
intent.putExtra(SessionsFragment.EXTRA_TRACK, trackUri);
if (cursor == null) {
intent.setData(CfpContract.Sessions.CONTENT_URI);
} else {
intent.setData(CfpContract.Tracks.buildSessionsUri(trackId));
}
((BaseActivity) getActivity())
.openActivityOrFragment(intent);
}
}
public void onDismiss() {
mListPopupWindow = null;
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
final Intent intent = ActivityHelper.fragmentArgumentsToIntent(args);
final Uri tracksUri = intent.getData();
if (tracksUri == null) {
return null;
}
final String[] projection = TracksAdapter.TracksQuery.PROJECTION_WITH_SESSIONS_COUNT;
return new CursorLoader(getActivity(), tracksUri, projection, null,
null, CfpContract.Tracks.DEFAULT_SORT);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, final Cursor data) {
if (getActivity() != null) {
// If there was a last-opened track, load it. Otherwise load the
// first
// track.
data.moveToFirst();
String lastTrackID = UIUtils.getLastUsedTrackID(getActivity());
if (lastTrackID != null) {
while (!data.isAfterLast()) {
if (lastTrackID.equals(data
.getString(TracksAdapter.TracksQuery.TRACK_ID))) {
break;
}
data.moveToNext();
}
if (data.isAfterLast()) {
mHandler.post(new Runnable() {
@Override
public void run() {
loadTrack(null, mAutoloadTarget);
}
});
} else {
mHandler.post(new Runnable() {
@Override
public void run() {
loadTrack(data, mAutoloadTarget);
}
});
}
} else {
mHandler.post(new Runnable() {
@Override
public void run() {
loadTrack(null, mAutoloadTarget);
}
});
}
}
mAdapter.swapCursor(data);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
mAdapter.swapCursor(null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.support;
import java.io.InputStream;
import java.net.URL;
import java.util.Map;
import java.util.Properties;
import java.util.function.Consumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Factory to create {@link LRUCache} instances.
*/
public abstract class LRUCacheFactory {
/**
* Factory key
*/
public static final String FACTORY = "lru-cache-factory";
private static final Logger LOG = LoggerFactory.getLogger(LRUCacheFactory.class);
private static volatile LRUCacheFactory instance;
/**
* Initializes and creates the cache factory if not explicit set.
*/
public static void init() {
if (instance == null) {
instance = createLRUCacheFactory();
}
}
/**
* Use this to set a specific LRUCacheFactory instance, such as before starting Camel, that then avoids doing auto
* discovery of the cache factory via classpath.
*/
public static void setLRUCacheFactory(LRUCacheFactory cacheFactory) {
instance = cacheFactory;
}
/**
* Gets (and creates if needed) the LRUCacheFactory to use.
*/
public static LRUCacheFactory getInstance() {
if (instance == null) {
synchronized (LRUCacheFactory.class) {
if (instance == null) {
instance = createLRUCacheFactory();
}
}
}
return instance;
}
private static LRUCacheFactory createLRUCacheFactory() {
LOG.trace("createLRUCacheFactory");
try {
ClassLoader classLoader = LRUCacheFactory.class.getClassLoader();
URL url = classLoader.getResource("META-INF/services/org/apache/camel/" + FACTORY);
if (url != null) {
Properties props = new Properties();
try (InputStream is = url.openStream()) {
props.load(is);
}
String clazzName = props.getProperty("class");
if (clazzName != null) {
LOG.trace("Loading class: {}", clazzName);
Class<?> clazz = classLoader.loadClass(clazzName);
LOG.trace("Creating LRUCacheFactory instance from class: {}", clazzName);
Object factory = clazz.getDeclaredConstructor().newInstance();
LOG.trace("Created LRUCacheFactory instance: {}", factory);
LOG.info("Detected and using LRUCacheFactory: {}", factory);
return (LRUCacheFactory) factory;
}
}
} catch (Throwable t) {
LOG.warn("Error creating LRUCacheFactory. Will use DefaultLRUCacheFactory.", t);
}
// use default
LOG.debug("Creating DefaultLRUCacheFactory");
return new DefaultLRUCacheFactory();
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUCache(int maximumCacheSize) {
return getInstance().createLRUCache(maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUCache(int maximumCacheSize, Consumer<V> onEvict) {
return getInstance().createLRUCache(maximumCacheSize, onEvict);
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize, and will
* stop on eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUCache(int initialCapacity, int maximumCacheSize) {
return getInstance().createLRUCache(initialCapacity, maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize,load factor
* and ordering mode.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) {
return getInstance().createLRUCache(initialCapacity, maximumCacheSize, stopOnEviction);
}
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUSoftCache(int maximumCacheSize) {
return getInstance().createLRUSoftCache(maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUSoftCache(int initialCapacity, int maximumCacheSize) {
return getInstance().createLRUSoftCache(initialCapacity, maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUSoftCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) {
return getInstance().createLRUSoftCache(initialCapacity, maximumCacheSize, stopOnEviction);
}
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUWeakCache(int maximumCacheSize) {
return getInstance().createLRUWeakCache(maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUWeakCache(int initialCapacity, int maximumCacheSize) {
return getInstance().createLRUWeakCache(initialCapacity, maximumCacheSize);
}
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public static <K, V> Map<K, V> newLRUWeakCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction) {
return getInstance().createLRUWeakCache(initialCapacity, maximumCacheSize, stopOnEviction);
}
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUCache(int maximumCacheSize);
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified maximumCacheSize, and will stop on eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUCache(int maximumCacheSize, Consumer<V> onEvict);
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize, and will
* stop on eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUCache(int initialCapacity, int maximumCacheSize);
/**
* Constructs an empty <tt>LRUCache</tt> instance with the specified initial capacity, maximumCacheSize,load factor
* and ordering mode.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction);
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUSoftCache(int maximumCacheSize);
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUSoftCache(int initialCapacity, int maximumCacheSize);
/**
* Constructs an empty <tt>LRUSoftCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUSoftCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction);
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUWeakCache(int maximumCacheSize);
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUWeakCache(int initialCapacity, int maximumCacheSize);
/**
* Constructs an empty <tt>LRUWeakCache</tt> instance with the specified maximumCacheSize, and will stop on
* eviction.
*
* @param initialCapacity the initial capacity.
* @param maximumCacheSize the max capacity.
* @param stopOnEviction whether to stop service on eviction.
* @throws IllegalArgumentException if the initial capacity is negative
*/
public abstract <K, V> Map<K, V> createLRUWeakCache(int initialCapacity, int maximumCacheSize, boolean stopOnEviction);
}
| |
/**
* Copyright 2016 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ottogroup.bi.streaming.operator.json;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.NoSuchElementException;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.junit.Assert;
import org.junit.Test;
/**
* Test case for {@link JsonProcessingUtils}
* @author mnxfst
* @since Jan 13, 2016
*/
public class JsonProcessingUtilsTest {
/**
* Test case for {@link JsonProcessingUtils#toPathArray(String)} being provided null as input
*/
@Test
public void testToPathArray_withNullInput() {
Assert.assertArrayEquals(new String[0], JsonProcessingUtils.toPathArray(null));
}
/**
* Test case for {@link JsonProcessingUtils#toPathArray(String)} being provided a string without dots
* as input
*/
@Test
public void testToPathArray_withStringWithoutDots() {
Assert.assertArrayEquals(new String[]{"test"}, JsonProcessingUtils.toPathArray("test"));
}
/**
* Test case for {@link JsonProcessingUtils#toPathArray(String)} being provided a string with
* dot-separated input
*/
@Test
public void testToPathArray_withValidInput() {
Assert.assertArrayEquals(new String[]{"test", "me", "now"}, JsonProcessingUtils.toPathArray("test.me.now"));
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided an empty string
* as input
*/
@Test
public void testToPathList_withEmptyStringInput() {
Assert.assertTrue(JsonProcessingUtils.toPathList(null, ",").isEmpty());
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided a dot
* as separator string
*/
@Test
public void testToPathList_withDotStringAsSeparator() {
Assert.assertTrue(JsonProcessingUtils.toPathList("path.to.content", ".").isEmpty());
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided
* a string holding only a separator
*/
@Test
public void testToPathList_withOnlySeparator() {
Assert.assertTrue(JsonProcessingUtils.toPathList(",", ",").isEmpty());
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided
* a string holding a separator and two empty path elements
*/
@Test
public void testToPathList_withOnlySeparatorTwoEmptyPathElements() {
Assert.assertTrue(JsonProcessingUtils.toPathList(" , ", ",").isEmpty());
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided
* a string holding a separator which is not set ad separator
*/
@Test
public void testToPathList_withValueAndSeparatorNotContainedInValue() {
List<String[]> result = JsonProcessingUtils.toPathList(" wh;ww ", ",");
Assert.assertEquals(1, result.size());
Assert.assertArrayEquals(new String[]{"wh;ww"}, result.get(0));
}
/**
* Test case for {@link JsonProcessingUtils#toPathList(String, String)} being provided
* a string holding one element and a valid content separator
*/
@Test
public void testToPathList_withOnePathAndValidSeparator() {
List<String[]> paths = JsonProcessingUtils.toPathList("path.to.content", ",");
Assert.assertEquals(1, paths.size());
Assert.assertArrayEquals(new String[]{"path","to","content"}, paths.get(0));
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided null as input to json object parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testGetFieldValue_withNullObject() throws Exception {
new JsonProcessingUtils().getFieldValue(null, new String[]{"queues"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided null as input to path parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testGetFieldValue_withNullPath() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[value-00,value-11]]]}";
new JsonProcessingUtils().getFieldValue(new JSONObject(s), null);
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided an empty array as input to path parameter
*/
@Test
public void testGetFieldValue_withEmptyPath() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[value-00,value-11]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(input, new JsonProcessingUtils().getFieldValue(input, new String[0]));
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(JSONObject, String[])} being provided
* a path pointing to an non-existing element
*/
@Test(expected=NoSuchElementException.class)
public void testGetFieldValue_withNonExistingElementPath() throws Exception {
JSONObject json = new JSONObject();
json.accumulate("key", "value");
new JsonProcessingUtils().getFieldValue(json, new String[]{"non-existing"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(JSONObject, String[])} being provided
* a flat json object and an one-element path pointing to an existing key
*/
@Test
public void testGetFieldValue_withFlatJSONAndExistingElementPath() throws Exception {
JSONObject json = new JSONObject();
json.accumulate("key", "value");
Assert.assertEquals("value", new JsonProcessingUtils().getFieldValue(json, new String[]{"key"}));
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a path pointing to no existing element
*/
@Test(expected=NoSuchElementException.class)
public void testGetFieldValue_withNestedJSONAndInvalidPath() throws Exception {
String s = "{\"values\": { \"key-1\":\"value-1\"}}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getFieldValue(input, new String[]{"value","key-2"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a path pointing to no existing element
*/
@Test(expected=NoSuchElementException.class)
public void testGetFieldValue_withNestedJSONAndInvalidPath2() throws Exception {
String s = "{\"values\": { \"key-1\":\"value-1\"}}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getFieldValue(input, new String[]{"values","key-2"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a path pointing into an array
*/
@Test(expected=NoSuchElementException.class)
public void testGetFieldValue_withNestedJSONAndPathIntoArray() throws Exception {
String s = "{\"values\": { \"key-1\":[value-1]}}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getFieldValue(input, new String[]{"values","key-1", "another"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a valid path
*/
@Test
public void testGetFieldValue_withNestedJSONAndValidPath() throws Exception {
String s = "{\"values\": { \"key-1\":\"value-1\"}}";
JSONObject input = new JSONObject(s);
Assert.assertEquals("value-1", new JsonProcessingUtils().getFieldValue(input, new String[]{"values","key-1"}));
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a valid path which points to an array of arrays
*/
@Test(expected=NoSuchElementException.class)
public void testGetFieldValue_withNestedJSONAndValidPathToArrayOfArrays() throws Exception {
String s = "{\"values\": { \"key-1\":[[value-1]]}}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getFieldValue(input, new String[]{"values","key-1[0]","test"});
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a nested json object and a valid path which points to an array holding an object structure
*/
@Test
public void testGetFieldValue_withNestedJSONAndValidPathToArrayHoldingObject() throws Exception {
String s = "{\"values\": { \"key-1\":[{\"test\":\"value-1\"}]}}";
JSONObject input = new JSONObject(s);
Assert.assertEquals("value-1", new JsonProcessingUtils().getFieldValue(input, new String[]{"values","key-1[0]","test"}));
}
/**
* Test case for {@link JsonProcessingUtils#getFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a single element path pointing to an array element
*/
@Test
public void testGetFieldValue_withSingleElementPathPointingToArrayElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[00,value-11]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals("value-2", new JsonProcessingUtils().getFieldValue(input, new String[]{"values[0][1]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getTextFieldValue(org.apache.sling.commons.json.JSONObject, String[])}
* being provided a single element path pointing to an array element
*/
@Test
public void testGetTextFieldValue_withSingleElementPathPointingToArrayElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals("0", new JsonProcessingUtils().getTextFieldValue(input, new String[]{"values[2][0][0]"}));
Assert.assertEquals("11", new JsonProcessingUtils().getTextFieldValue(input, new String[]{"values[2][0][1]"}));
Assert.assertEquals("{\"text\":\"val\"}", new JsonProcessingUtils().getTextFieldValue(input, new String[]{"values[2][0][2]"}));
Assert.assertEquals("null", new JsonProcessingUtils().getTextFieldValue(input, new String[]{"values[2][0][3]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a non-integer string element
*/
@Test(expected=NumberFormatException.class)
public void testGetIntegerFieldValue_withReferencePointingTowardsStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"values[0][0]"});
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a non-integer boolean element
*/
@Test(expected=NumberFormatException.class)
public void testGetIntegerFieldValue_withReferencePointingTowardsBooleanElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"values[0][3]"});
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a an integer element
*/
@Test
public void testGetIntegerFieldValue_withReferencePointingTowardsIntegerElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(Integer.valueOf(5), new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"values[0][4]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a an integer element inside a string
*/
@Test
public void testGetIntegerFieldValue_withReferencePointingTowardsIntegerInsideStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"12345\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(Integer.valueOf(12345), new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"values[0][5]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[], boolean)} being provided
* valid json but request for field that does not exist (field is required)
*/
@Test(expected=NoSuchElementException.class)
public void testGetIntegerFieldValue_withRequiredFieldWhichDoesNotExist() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"12345\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"does", "not", "exit"}, true);
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[], boolean)} being provided
* valid json and request for field that holds non-string/non-integer value
*/
@Test(expected=NumberFormatException.class)
public void testGetIntegerFieldValue_withFieldNonStringNonInteger() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"18188ddd\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"values[0][5]"}, true);
}
/**
* Test case for {@link JsonProcessingUtils#getIntegerFieldValue(JSONObject, String[], boolean)} being provided
* valid json but request for field that does not exist (field is not required)
*/
@Test
public void testGetIntegerFieldValue_withNonRequiredFieldWhichDoesNotExist() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"12345\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertNull(new JsonProcessingUtils().getIntegerFieldValue(input, new String[]{"does", "not", "exit"}, false));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a non-double string element
*/
@Test(expected=NumberFormatException.class)
public void testGetDoubleFieldValue_withReferencePointingTowardsStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getDoubleFieldValue(input, new String[]{"values[0][0]"});
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a non-double boolean element
*/
@Test(expected=NumberFormatException.class)
public void testGetDoubleFieldValue_withReferencePointingTowardsBooleanElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getDoubleFieldValue(input, new String[]{"values[0][3]"});
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a double element
*/
@Test
public void testGetDoubleFieldValue_withReferencePointingTowardsIntegerElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5.1],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(Double.valueOf(5.1), new JsonProcessingUtils().getDoubleFieldValue(input, new String[]{"values[0][4]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a double element inside a string
*/
@Test
public void testGetDoubleFieldValue_withReferencePointingTowardsIntegerInsideStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"1.23\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(Double.valueOf(1.23), new JsonProcessingUtils().getDoubleFieldValue(input, new String[]{"values[0][5]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a non-boolean string element
*/
@Test
public void testGetBooleanFieldValue_withReferencePointingTowardsStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertFalse((Boolean)new JsonProcessingUtils().getBooleanFieldValue(input, new String[]{"values[0][0]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards an integer element
*/
@Test(expected=ParseException.class)
public void testGetBooleanFieldValue_withReferencePointingTowardsIntegerElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, 5],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertTrue((Boolean)new JsonProcessingUtils().getBooleanFieldValue(input, new String[]{"values[0][3]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a boolean element
*/
@Test
public void testGetBooleanFieldValue_withReferencePointingTowardsBooleanElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertTrue((Boolean)new JsonProcessingUtils().getBooleanFieldValue(input, new String[]{"values[0][3]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getDoubleFieldValue(JSONObject, String[])} being provided
* values but a path pointing towards a boolean element inside a string
*/
@Test
public void testGetBooleanFieldValue_withReferencePointingTowardsBooleanInsideStringElement() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertTrue((Boolean)new JsonProcessingUtils().getBooleanFieldValue(input, new String[]{"values[0][5]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getZonedDateTimeFieldValue(JSONObject, String[])} being provided
* a {@link JSONObject} holding a date value
*/
@Test
public void testGetZonedDateTimeValue_withDateInStringContent() throws Exception {
ZonedDateTime now = ZonedDateTime.now();
String s = "{\"values\":[[\""+now.toString()+"\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(now, new JsonProcessingUtils().getZonedDateTimeFieldValue(input, new String[]{"values[0][0]"}));
}
/**
* Test case for {@link JsonProcessingUtils#getZonedDateTimeFieldValue(JSONObject, String[], String)} being provided
* a {@link JSONObject} holding a date value
*/
@Test
public void testGetZonedDateTimeValue_withDateInStringContentNullPattern() throws Exception {
ZonedDateTime now = ZonedDateTime.now();
String s = "{\"values\":[[\""+now.toString()+"\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(now, new JsonProcessingUtils().getZonedDateTimeFieldValue(input, new String[]{"values[0][0]"}, null));
}
/**
* Test case for {@link JsonProcessingUtils#getZonedDateTimeFieldValue(JSONObject, String[])} being provided
* a {@link JSONObject} holding a number at the referenced location
*/
@Test(expected=ParseException.class)
public void testGetZonedDateTimeValue_withNumberInLocation() throws Exception {
String s = "{\"values\":[[10, value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getZonedDateTimeFieldValue(input, new String[]{"values[0][0]"});
}
/**
* Test case for {@link JsonProcessingUtils#getZonedDateTimeFieldValue(JSONObject, String[], String)} being provided
* a {@link JSONObject} holding a number at the referenced location
*/
@Test(expected=ParseException.class)
public void testGetZonedDateTimeValue_withNumberInLocationAndNullPattern() throws Exception {
String s = "{\"values\":[[10, value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getZonedDateTimeFieldValue(input, new String[]{"values[0][0]"}, null);
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* null as formatter
*/
@Test(expected=IllegalArgumentException.class)
public void testGetDateTimeValue_withNullFormatter() throws Exception {
String s = "{\"values\":[[\"2016-01-16\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
SimpleDateFormat f = null;
new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, f);
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* an empty string as formatter
*/
@Test(expected=ParseException.class)
public void testGetDateTimeValue_withEmptyFormatterString() throws Exception {
String s = "{\"values\":[[\"2016-01-16\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, "");
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* a valid formatter
*/
@Test
public void testGetDateTimeValue_withValidFormatter() throws Exception {
String s = "{\"values\":[[\"2016-01-16\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, f);
Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd").parse("2016-01-16"), new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, f));
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* a valid formatter string
*/
@Test
public void testGetDateTimeValue_withValidFormatterString() throws Exception {
String s = "{\"values\":[[\"2016-01-16\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
Assert.assertEquals(new SimpleDateFormat("yyyy-MM-dd").parse("2016-01-16"), new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, "yyyy-MM-dd"));
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* a valid formatter but non-matching content
*/
@Test(expected=ParseException.class)
public void testGetDateTimeValue_withValidFormatterNonMatchingContet() throws Exception {
String s = "{\"values\":[[\"20160116\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, f);
}
/**
* Test case for {@link JsonProcessingUtils#getDateTimeFieldValue(JSONObject, String[], java.text.SimpleDateFormat)} being provided
* a valid formatter string and non-matching content
*/
@Test(expected=ParseException.class)
public void testGetDateTimeValue_withValidFormatterStringNonMatchingContent() throws Exception {
String s = "{\"values\":[[\"20160116\", value-2, value-3, true, 5, \"true\"],[value-a,value-b,value-c],[[00,11,{\"text\":\"val\"}, null]]]}";
JSONObject input = new JSONObject(s);
new JsonProcessingUtils().getDateTimeFieldValue(input, new String[]{"values[0][0]"}, "yyyy-MM-dd");
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(org.apache.sling.commons.json.JSONArray, int)} being provided
* null as input to array parameter
*/
@Test(expected = IllegalArgumentException.class)
public void testGetArrayElement_withNullArray() throws Exception {
new JsonProcessingUtils().getArrayElement(null, 1);
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(org.apache.sling.commons.json.JSONArray, int)} being provided
* -1 as input to position parameter
*/
@Test(expected = NoSuchElementException.class)
public void testGetArrayElement_withValueArrayAndNegativePosition() throws Exception {
Collection<String> testCollection = new HashSet<>();
testCollection.add("value-1");
testCollection.add("value-2");
testCollection.add("value-3");
new JsonProcessingUtils().getArrayElement(new JSONArray(testCollection), -1);
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(org.apache.sling.commons.json.JSONArray, int)} being provided
* a position after max. allowed value
*/
@Test(expected = NoSuchElementException.class)
public void testGetArrayElement_withValueArrayAndInvalidPosition() throws Exception {
Collection<String> testCollection = new ArrayList<>();
testCollection.add("value-1");
testCollection.add("value-2");
testCollection.add("value-3");
new JsonProcessingUtils().getArrayElement(new JSONArray(testCollection), 3);
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(org.apache.sling.commons.json.JSONArray, int)} being provided
* a position after max. allowed value
*/
@Test
public void testGetArrayElement_withValueArrayAndValidPosition() throws Exception {
Collection<String> testCollection = new ArrayList<>();
testCollection.add("value-1");
testCollection.add("value-2");
testCollection.add("value-3");
Assert.assertEquals("value-1", new JsonProcessingUtils().getArrayElement(new JSONArray(testCollection), 0));
Assert.assertEquals("value-2", new JsonProcessingUtils().getArrayElement(new JSONArray(testCollection), 1));
Assert.assertEquals("value-3", new JsonProcessingUtils().getArrayElement(new JSONArray(testCollection), 2));
}
/**
* Test case for {@link JsonProcessingUtils#containsArrayReference(String)} being provided an empty
* string as input
*/
@Test
public void testContainsArrayReference_withNullInput() {
Assert.assertFalse(new JsonProcessingUtils().containsArrayReference(null));
}
/**
* Test case for {@link JsonProcessingUtils#containsArrayReference(String)} being provided invalid input
*/
@Test
public void testContainsArrayReference_withInvalidInput() {
Assert.assertFalse(new JsonProcessingUtils().containsArrayReference("d[100][1][100].ddk"));
}
/**
* Test case for {@link JsonProcessingUtils#containsArrayReference(String)} being provided a valid input
*/
@Test
public void testContainsArrayReference_withValidInput() {
Assert.assertTrue(new JsonProcessingUtils().containsArrayReference("va[100][1][100]"));
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* null as input to json parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testGetArrayElement_withNullJSON() throws Exception {
new JsonProcessingUtils().getArrayElement(null, "field");
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* null as input to path parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testGetArrayElement_withNullPath() throws Exception {
new JsonProcessingUtils().getArrayElement(new JSONObject(), null);
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* an empty path
*/
@Test
public void testGetArrayElement_withEmptyPath() throws Exception {
JSONObject input = new JSONObject();
Assert.assertEquals(input, new JsonProcessingUtils().getArrayElement(input, ""));
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* a path pointing to an unknown element
*/
@Test(expected=NoSuchElementException.class)
public void testGetArrayElement_withUnknownElementPath() throws Exception {
JSONObject input = new JSONObject();
Assert.assertEquals(input, new JsonProcessingUtils().getArrayElement(input, "test[1]"));
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* a path pointing to an element which is not an array
*/
@Test(expected=NoSuchElementException.class)
public void testGetArrayElement_withNonArrayElementPath() throws Exception {
JSONObject input = new JSONObject();
input.put("test", "value");
Assert.assertEquals(input, new JsonProcessingUtils().getArrayElement(input, "test[1]"));
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* a path which points to an non-existing element behind the array
*/
@Test(expected=NoSuchElementException.class)
public void testGetArrayElement_withPathBehindArray() throws Exception {
String s = "{\"values\":[value-1]}";
JSONObject object = new JSONObject(s);
new JsonProcessingUtils().getArrayElement(object, "values[0][1]");
}
/**
* Test case for {@link JsonProcessingUtils#getArrayElement(JSONObject, String)} being provided
* valid input
*/
@Test
public void testGetArrayElement_withObjectAndPath() throws Exception {
String s = "{\"values\":[[value-1, value-2, value-3],[value-a,value-b,value-c],[[value-00,value-11]]]}";
JSONObject object = new JSONObject(s);
Assert.assertEquals("value-00", new JsonProcessingUtils().getArrayElement(object, "values[2][0][0]"));
Assert.assertEquals("value-11", new JsonProcessingUtils().getArrayElement(object, "values[2][0][1]"));
Assert.assertEquals("value-1", new JsonProcessingUtils().getArrayElement(object, "values[0][0]"));
Assert.assertEquals("value-2", new JsonProcessingUtils().getArrayElement(object, "values[0][1]"));
Assert.assertEquals("value-3", new JsonProcessingUtils().getArrayElement(object, "values[0][2]"));
Assert.assertEquals("value-a", new JsonProcessingUtils().getArrayElement(object, "values[1][0]"));
Assert.assertEquals("value-b", new JsonProcessingUtils().getArrayElement(object, "values[1][1]"));
Assert.assertEquals("value-c", new JsonProcessingUtils().getArrayElement(object, "values[1][2]"));
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided null as input to JSON object parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testInsertField_withNullJSONObject() throws Exception {
new JsonProcessingUtils().insertField(null, new String[]{"test"}, "value");
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided null as input to path parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testInsertField_withNullPathParameter() throws Exception {
new JsonProcessingUtils().insertField(new JSONObject(), null, "value");
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided null as input to value parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testInsertField_withNullValueParameter() throws Exception {
new JsonProcessingUtils().insertField(new JSONObject(), new String[]{"test"}, null);
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided an empty path
*/
@Test
public void testInsertField_withEmptyValueParameter() throws Exception {
Assert.assertEquals("{}", new JsonProcessingUtils().insertField(new JSONObject(), new String[0], "test").toString());;
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a one-element path
*/
@Test
public void testInsertField_withOneElementPath() throws Exception {
Assert.assertEquals("{\"key\":\"test\"}", new JsonProcessingUtils().insertField(new JSONObject(), new String[]{"key"}, "test").toString());;
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a one-element path but existing element at that position
*/
@Test(expected=JSONException.class)
public void testInsertField_withOneElementPathAndExistingElement() throws Exception {
new JsonProcessingUtils().insertField(new JSONObject("{\"key\":\"test\"}"), new String[]{"key"}, "test");
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a one-element path but existing element at that position
*/
@Test(expected=JSONException.class)
public void testInsertField_withOneElementPathAndExistingElementOverrideFalse() throws Exception {
new JsonProcessingUtils().insertField(new JSONObject("{\"key\":\"test\"}"), new String[]{"key"}, "test", false);
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a one-element path but existing element at that position (override activated)
*/
@Test
public void testInsertField_withOneElementPathAndExistingElementButOverrideActivated() throws Exception {
Assert.assertEquals("{\"key\":\"new-value\"}",
new JsonProcessingUtils().insertField(new JSONObject("{\"key\":\"test\"}"), new String[]{"key"}, "new-value", true).toString());
Assert.assertEquals("{\"data\":{\"field\":{\"id\":\"1\"},\"query\":\"v2\"}}",
new JsonProcessingUtils().insertField(new JSONObject("{\"data\":{\"field\":{\"id\":\"1\"},\"query\":\"q2=v1\"}}"), new String[]{"data","query"}, "v2", true).toString());
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a two-element path but existing element at that position
*/
@Test(expected=JSONException.class)
public void testInsertField_withTwoElementPathAndExistingElement() throws Exception {
final String content = "{\"firstLevel\":{\"key\":\"test\"}}";
new JsonProcessingUtils().insertField(new JSONObject(content), new String[]{"firstLevel", "key"}, "test");
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a two-element path and valid destination position
*/
@Test
public void testInsertField_withTwoElementPathAndValidDestinationElement() throws Exception {
final String content = "{\"firstLevel\":{\"key\":\"test\"}}";
final JSONObject input = new JSONObject(content);
new JsonProcessingUtils().insertField(input, new String[]{"firstLevel","secondLevel"}, "no-test");
Assert.assertEquals("{\"firstLevel\":{\"key\":\"test\",\"secondLevel\":\"no-test\"}}", input.toString());
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a three-element path and content that has only two levels and second level references value
*/
@Test(expected=JSONException.class)
public void testInsertField_withThreeElementPathAndExistingTwoLevelElement() throws Exception {
final String content = "{\"firstLevel\":{\"key\":\"test\"}}";
new JsonProcessingUtils().insertField(new JSONObject(content), new String[]{"firstLevel", "key", "thirdLevel"}, "test");
}
/**
* Test case for {@link JsonProcessingUtils#insertField(JSONObject, String[], java.io.Serializable)}
* being provided a three-element path and content that has only two levels
*/
@Test
public void testInsertField_withThreeElementPathTwoLevelDocumentExpectInsertion() throws Exception {
final String content = "{\"firstLevel\":{\"key\":\"test\"}}";
final String expected = "{\"firstLevel\":{\"key\":\"test\",\"secondLevel\":{\"thirdLevel\":\"another\"}}}";
String result = new JsonProcessingUtils().insertField(new JSONObject(content), new String[]{"firstLevel", "secondLevel", "thirdLevel"}, "another").toString();
Assert.assertEquals(expected, result);
}
/**
* Test case for {@link JsonProcessingUtils#updateArrayElement(JSONArray, int, java.io.Serializable)} being
* provided null as input to array element parameter
*/
@Test(expected=IllegalArgumentException.class)
public void testUpdateArrayElement_withNullArray() throws Exception {
new JsonProcessingUtils().updateArrayElement(null, 1, "10");
}
/**
* Test case for {@link JsonProcessingUtils#updateArrayElement(JSONArray, int, java.io.Serializable)} being
* provided a position less than zero
*/
@Test(expected=NoSuchElementException.class)
public void testUpdateArrayElement_withPositionLessZero() throws Exception {
new JsonProcessingUtils().updateArrayElement(new JSONArray(), -1, "10");
}
/**
* Test case for {@link JsonProcessingUtils#updateArrayElement(JSONArray, int, java.io.Serializable)} being
* provided a position outside the array (larger)
*/
@Test(expected=NoSuchElementException.class)
public void testUpdateArrayElement_withPositionLargerThanArray() throws Exception {
List<String> content = new ArrayList<>();
content.add("test-1");
content.add("test-2");
content.add("test-3");
new JsonProcessingUtils().updateArrayElement(new JSONArray(content), 3, "10");
}
/**
* Test case for {@link JsonProcessingUtils#updateArrayElement(JSONArray, int, java.io.Serializable)} being
* provided a valid position inside the array
*/
@Test
public void testUpdateArrayElement_withPositionInsideArray() throws Exception {
List<String> content = new ArrayList<>();
content.add("test-1");
content.add("test-2");
content.add("test-3");
JSONArray array = new JSONArray(content);
Assert.assertEquals("test-2", new JsonProcessingUtils().getArrayElement(array, 1));
new JsonProcessingUtils().updateArrayElement(array, 1, "test-4");
Assert.assertEquals("test-4", new JsonProcessingUtils().getArrayElement(array, 1));
}
}
| |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
package com.google.crypto.tink.signature;
import com.google.crypto.tink.KeyTemplate;
import com.google.crypto.tink.KeyTypeManager;
import com.google.crypto.tink.PrivateKeyTypeManager;
import com.google.crypto.tink.PublicKeySign;
import com.google.crypto.tink.Registry;
import com.google.crypto.tink.proto.Ed25519KeyFormat;
import com.google.crypto.tink.proto.Ed25519PrivateKey;
import com.google.crypto.tink.proto.Ed25519PublicKey;
import com.google.crypto.tink.proto.KeyData.KeyMaterialType;
import com.google.crypto.tink.subtle.Ed25519Sign;
import com.google.crypto.tink.subtle.Validators;
import com.google.protobuf.ByteString;
import com.google.protobuf.ExtensionRegistryLite;
import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* This instance of {@code KeyManager} generates new {@code Ed25519PrivateKey} keys and produces new
* instances of {@code Ed25519Sign}.
*/
public final class Ed25519PrivateKeyManager
extends PrivateKeyTypeManager<Ed25519PrivateKey, Ed25519PublicKey> {
Ed25519PrivateKeyManager() {
super(
Ed25519PrivateKey.class,
Ed25519PublicKey.class,
new KeyTypeManager.PrimitiveFactory<PublicKeySign, Ed25519PrivateKey>(PublicKeySign.class) {
@Override
public PublicKeySign getPrimitive(Ed25519PrivateKey keyProto)
throws GeneralSecurityException {
return new Ed25519Sign(keyProto.getKeyValue().toByteArray());
}
});
}
@Override
public String getKeyType() {
return "type.googleapis.com/google.crypto.tink.Ed25519PrivateKey";
}
@Override
public int getVersion() {
return 0;
}
@Override
public Ed25519PublicKey getPublicKey(Ed25519PrivateKey key) throws GeneralSecurityException {
return key.getPublicKey();
}
@Override
public KeyMaterialType keyMaterialType() {
return KeyMaterialType.ASYMMETRIC_PRIVATE;
}
@Override
public Ed25519PrivateKey parseKey(ByteString byteString) throws InvalidProtocolBufferException {
return Ed25519PrivateKey.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public void validateKey(Ed25519PrivateKey keyProto) throws GeneralSecurityException {
Validators.validateVersion(keyProto.getVersion(), getVersion());
new Ed25519PublicKeyManager().validateKey(keyProto.getPublicKey());
if (keyProto.getKeyValue().size() != Ed25519Sign.SECRET_KEY_LEN) {
throw new GeneralSecurityException("invalid Ed25519 private key: incorrect key length");
}
}
@Override
public KeyFactory<Ed25519KeyFormat, Ed25519PrivateKey> keyFactory() {
return new KeyFactory<Ed25519KeyFormat, Ed25519PrivateKey>(Ed25519KeyFormat.class) {
@Override
public void validateKeyFormat(Ed25519KeyFormat format) throws GeneralSecurityException {}
@Override
public Ed25519KeyFormat parseKeyFormat(ByteString byteString)
throws InvalidProtocolBufferException {
return Ed25519KeyFormat.parseFrom(byteString, ExtensionRegistryLite.getEmptyRegistry());
}
@Override
public Ed25519PrivateKey createKey(Ed25519KeyFormat format) throws GeneralSecurityException {
Ed25519Sign.KeyPair keyPair = Ed25519Sign.KeyPair.newKeyPair();
Ed25519PublicKey publicKey =
Ed25519PublicKey.newBuilder()
.setVersion(getVersion())
.setKeyValue(ByteString.copyFrom(keyPair.getPublicKey()))
.build();
return Ed25519PrivateKey.newBuilder()
.setVersion(getVersion())
.setKeyValue(ByteString.copyFrom(keyPair.getPrivateKey()))
.setPublicKey(publicKey)
.build();
}
@Override
public Ed25519PrivateKey deriveKey(Ed25519KeyFormat format, InputStream inputStream)
throws GeneralSecurityException {
Validators.validateVersion(format.getVersion(), getVersion());
byte[] pseudorandomness = new byte[Ed25519Sign.SECRET_KEY_LEN];
try {
int read = inputStream.read(pseudorandomness);
if (read != Ed25519Sign.SECRET_KEY_LEN) {
throw new GeneralSecurityException("Not enough pseudorandomness given");
}
Ed25519Sign.KeyPair keyPair = Ed25519Sign.KeyPair.newKeyPairFromSeed(pseudorandomness);
Ed25519PublicKey publicKey =
Ed25519PublicKey.newBuilder()
.setVersion(getVersion())
.setKeyValue(ByteString.copyFrom(keyPair.getPublicKey()))
.build();
return Ed25519PrivateKey.newBuilder()
.setVersion(getVersion())
.setKeyValue(ByteString.copyFrom(keyPair.getPrivateKey()))
.setPublicKey(publicKey)
.build();
} catch (IOException e) {
throw new GeneralSecurityException("Reading pseudorandomness failed", e);
}
}
@Override
public Map<String, KeyFactory.KeyFormat<Ed25519KeyFormat>> keyFormats()
throws GeneralSecurityException {
Map<String, KeyFactory.KeyFormat<Ed25519KeyFormat>> result = new HashMap<>();
result.put(
"ED25519",
new KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.TINK));
result.put(
"ED25519_RAW",
new KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.RAW));
// This is identical to ED25519_RAW.
// It is needed to maintain backward compatibility with SignatureKeyTemplates.
// TODO(b/185475349): remove this in 2.0.0.
result.put(
"ED25519WithRawOutput",
new KeyFormat<>(
Ed25519KeyFormat.getDefaultInstance(), KeyTemplate.OutputPrefixType.RAW));
return Collections.unmodifiableMap(result);
}
};
}
/**
* Registers the {@link Ed25519PrivateKeyManager} and the {@link Ed25519PublicKeyManager} with the
* registry, so that the the Ed25519-Keys can be used with Tink.
*/
public static void registerPair(boolean newKeyAllowed) throws GeneralSecurityException {
Registry.registerAsymmetricKeyManagers(
new Ed25519PrivateKeyManager(), new Ed25519PublicKeyManager(), newKeyAllowed);
}
/**
* @return A {@link KeyTemplate} that generates new instances of ED25519 keys.
* @deprecated use {@code KeyTemplates.get("ED25519")}
*/
@Deprecated
public static final KeyTemplate ed25519Template() {
return KeyTemplate.create(
new Ed25519PrivateKeyManager().getKeyType(),
/*value=*/ new byte[0],
KeyTemplate.OutputPrefixType.TINK);
}
/**
* @return A {@link KeyTemplate} that generates new instances of Ed25519 keys. Keys generated from
* this template creates raw signatures of exactly 64 bytes. It's compatible with most other
* libraries.
* @deprecated use {@code KeyTemplates.get("ED25519_RAW")}
*/
@Deprecated
public static final KeyTemplate rawEd25519Template() {
return KeyTemplate.create(
new Ed25519PrivateKeyManager().getKeyType(),
/*value=*/ new byte[0],
KeyTemplate.OutputPrefixType.RAW);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.client.cache;
import java.util.AbstractQueue;
import java.util.Collection;
import java.util.Iterator;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Abstract implementation of a BlockingQueue bounded by the size of elements,
* works similar to LinkedBlockingQueue except that capacity is bounded by the size in bytes of the elements in the queue.
*/
public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implements BlockingQueue<E>
{
private final Queue<E> delegate;
private final AtomicLong currentSize = new AtomicLong(0);
private final Lock putLock = new ReentrantLock();
private final Condition notFull = putLock.newCondition();
private final Lock takeLock = new ReentrantLock();
private final Condition notEmpty = takeLock.newCondition();
private final AtomicInteger elementCount = new AtomicInteger(0);
private long capacity;
public BytesBoundedLinkedQueue(long capacity)
{
delegate = new ConcurrentLinkedQueue<>();
this.capacity = capacity;
}
private static void checkNotNull(Object v)
{
if (v == null) {
throw new NullPointerException();
}
}
private void checkSize(E e)
{
if (getBytesSize(e) > capacity) {
throw new IllegalArgumentException(
String.format("cannot add element of size[%d] greater than capacity[%d]", getBytesSize(e), capacity)
);
}
}
public abstract long getBytesSize(E e);
public void elementAdded(E e)
{
currentSize.addAndGet(getBytesSize(e));
elementCount.getAndIncrement();
}
public void elementRemoved(E e)
{
currentSize.addAndGet(-1 * getBytesSize(e));
elementCount.getAndDecrement();
}
private void fullyUnlock()
{
takeLock.unlock();
putLock.unlock();
}
private void fullyLock()
{
takeLock.lock();
putLock.lock();
}
private void signalNotEmpty()
{
takeLock.lock();
try {
notEmpty.signal();
}
finally {
takeLock.unlock();
}
}
private void signalNotFull()
{
putLock.lock();
try {
notFull.signal();
}
finally {
putLock.unlock();
}
}
@Override
public int size()
{
return elementCount.get();
}
@Override
public void put(E e) throws InterruptedException
{
while (!offer(e, Long.MAX_VALUE, TimeUnit.NANOSECONDS)) {
// keep trying until added successfully
}
}
@Override
public boolean offer(
E e, long timeout, TimeUnit unit
) throws InterruptedException
{
checkNotNull(e);
checkSize(e);
long nanos = unit.toNanos(timeout);
boolean added = false;
putLock.lockInterruptibly();
try {
while (currentSize.get() + getBytesSize(e) > capacity) {
if (nanos <= 0) {
return false;
}
nanos = notFull.awaitNanos(nanos);
}
delegate.add(e);
elementAdded(e);
added = true;
}
finally {
putLock.unlock();
}
if (added) {
signalNotEmpty();
}
return added;
}
@Override
public E take() throws InterruptedException
{
E e;
takeLock.lockInterruptibly();
try {
while (elementCount.get() == 0) {
notEmpty.await();
}
e = delegate.remove();
elementRemoved(e);
}
finally {
takeLock.unlock();
}
if (e != null) {
signalNotFull();
}
return e;
}
@Override
public int remainingCapacity()
{
int delegateSize;
long currentByteSize;
fullyLock();
try {
delegateSize = elementCount.get();
currentByteSize = currentSize.get();
}
finally {
fullyUnlock();
}
// return approximate remaining capacity based on current data
if (delegateSize == 0) {
return (int) Math.min(capacity, Integer.MAX_VALUE);
} else if (capacity > currentByteSize) {
long averageElementSize = currentByteSize / delegateSize;
return (int) ((capacity - currentByteSize) / averageElementSize);
} else {
// queue full
return 0;
}
}
@Override
public int drainTo(Collection<? super E> c)
{
return drainTo(c, Integer.MAX_VALUE);
}
@Override
public int drainTo(Collection<? super E> c, int maxElements)
{
if (c == null) {
throw new NullPointerException();
}
if (c == this) {
throw new IllegalArgumentException();
}
int n = 0;
takeLock.lock();
try {
// elementCount.get provides visibility to first n Nodes
n = Math.min(maxElements, elementCount.get());
if (n < 0) {
return 0;
}
for (int i = 0; i < n; i++) {
E e = delegate.remove();
elementRemoved(e);
c.add(e);
}
}
finally {
takeLock.unlock();
}
if (n > 0) {
signalNotFull();
}
return n;
}
@Override
public boolean offer(E e)
{
checkNotNull(e);
checkSize(e);
boolean added = false;
putLock.lock();
try {
if (currentSize.get() + getBytesSize(e) > capacity) {
return false;
} else {
added = delegate.add(e);
if (added) {
elementAdded(e);
}
}
}
finally {
putLock.unlock();
}
if (added) {
signalNotEmpty();
}
return added;
}
@Override
public E poll()
{
E e = null;
takeLock.lock();
try {
e = delegate.poll();
if (e != null) {
elementRemoved(e);
}
}
finally {
takeLock.unlock();
}
if (e != null) {
signalNotFull();
}
return e;
}
@Override
public E poll(long timeout, TimeUnit unit) throws InterruptedException
{
long nanos = unit.toNanos(timeout);
E e = null;
takeLock.lockInterruptibly();
try {
while (elementCount.get() == 0) {
if (nanos <= 0) {
return null;
}
nanos = notEmpty.awaitNanos(nanos);
}
e = delegate.poll();
if (e != null) {
elementRemoved(e);
}
}
finally {
takeLock.unlock();
}
if (e != null) {
signalNotFull();
}
return e;
}
@Override
public E peek()
{
takeLock.lock();
try {
return delegate.peek();
}
finally {
takeLock.unlock();
}
}
@Override
public Iterator<E> iterator()
{
return new Itr(delegate.iterator());
}
private class Itr implements Iterator<E>
{
private final Iterator<E> delegate;
private E lastReturned;
Itr(Iterator<E> delegate)
{
this.delegate = delegate;
}
@Override
public boolean hasNext()
{
fullyLock();
try {
return delegate.hasNext();
}
finally {
fullyUnlock();
}
}
@Override
public E next()
{
fullyLock();
try {
this.lastReturned = delegate.next();
return lastReturned;
}
finally {
fullyUnlock();
}
}
@Override
public void remove()
{
fullyLock();
try {
if (this.lastReturned == null) {
throw new IllegalStateException();
}
delegate.remove();
elementRemoved(lastReturned);
signalNotFull();
lastReturned = null;
}
finally {
fullyUnlock();
}
}
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package br.org.fiscal65.bitmaps;
import java.io.BufferedInputStream;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
******************************************************************************
* Taken from the JB source code, can be found in:
* libcore/luni/src/main/java/libcore/io/DiskLruCache.java
* or direct link:
* https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java
******************************************************************************
*
* A cache that uses a bounded amount of space on a filesystem. Each cache
* entry has a string key and a fixed number of values. Values are byte
* sequences, accessible as streams or files. Each value must be between {@code
* 0} and {@code Integer.MAX_VALUE} bytes in length.
*
* <p>The cache stores its data in a directory on the filesystem. This
* directory must be exclusive to the cache; the cache may delete or overwrite
* files from its directory. It is an error for multiple processes to use the
* same cache directory at the same time.
*
* <p>This cache limits the number of bytes that it will store on the
* filesystem. When the number of stored bytes exceeds the limit, the cache will
* remove entries in the background until the limit is satisfied. The limit is
* not strict: the cache may temporarily exceed it while waiting for files to be
* deleted. The limit does not include filesystem overhead or the cache
* journal so space-sensitive applications should set a conservative limit.
*
* <p>Clients call {@link #edit} to create or update the values of an entry. An
* entry may have only one editor at one time; if a value is not available to be
* edited then {@link #edit} will return null.
* <ul>
* <li>When an entry is being <strong>created</strong> it is necessary to
* supply a full set of values; the empty value should be used as a
* placeholder if necessary.
* <li>When an entry is being <strong>edited</strong>, it is not necessary
* to supply data for every value; values default to their previous
* value.
* </ul>
* Every {@link #edit} call must be matched by a call to {@link Editor#commit}
* or {@link Editor#abort}. Committing is atomic: a read observes the full set
* of values as they were before or after the commit, but never a mix of values.
*
* <p>Clients call {@link #get} to read a snapshot of an entry. The read will
* observe the value at the time that {@link #get} was called. Updates and
* removals after the call do not impact ongoing reads.
*
* <p>This class is tolerant of some I/O errors. If files are missing from the
* filesystem, the corresponding entries will be dropped from the cache. If
* an error occurs while writing a cache value, the edit will fail silently.
* Callers should handle other problems by catching {@code IOException} and
* responding appropriately.
*/
public final class DiskLruCache implements Closeable {
static final String JOURNAL_FILE = "journal";
static final String JOURNAL_FILE_TMP = "journal.tmp";
static final String MAGIC = "libcore.io.DiskLruCache";
static final String VERSION_1 = "1";
static final long ANY_SEQUENCE_NUMBER = -1;
private static final String CLEAN = "CLEAN";
private static final String DIRTY = "DIRTY";
private static final String REMOVE = "REMOVE";
private static final String READ = "READ";
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final int IO_BUFFER_SIZE = 8 * 1024;
/*
* This cache uses a journal file named "journal". A typical journal file
* looks like this:
* libcore.io.DiskLruCache
* 1
* 100
* 2
*
* CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054
* DIRTY 335c4c6028171cfddfbaae1a9c313c52
* CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342
* REMOVE 335c4c6028171cfddfbaae1a9c313c52
* DIRTY 1ab96a171faeeee38496d8b330771a7a
* CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234
* READ 335c4c6028171cfddfbaae1a9c313c52
* READ 3400330d1dfc7f3f7f4b8d4d803dfcf6
*
* The first five lines of the journal form its header. They are the
* constant string "libcore.io.DiskLruCache", the disk cache's version,
* the application's version, the value count, and a blank line.
*
* Each of the subsequent lines in the file is a record of the state of a
* cache entry. Each line contains space-separated values: a state, a key,
* and optional state-specific values.
* o DIRTY lines track that an entry is actively being created or updated.
* Every successful DIRTY action should be followed by a CLEAN or REMOVE
* action. DIRTY lines without a matching CLEAN or REMOVE indicate that
* temporary files may need to be deleted.
* o CLEAN lines track a cache entry that has been successfully published
* and may be read. A publish line is followed by the lengths of each of
* its values.
* o READ lines track accesses for LRU.
* o REMOVE lines track entries that have been deleted.
*
* The journal file is appended to as cache operations occur. The journal may
* occasionally be compacted by dropping redundant lines. A temporary file named
* "journal.tmp" will be used during compaction; that file should be deleted if
* it exists when the cache is opened.
*/
private final File directory;
private final File journalFile;
private final File journalFileTmp;
private final int appVersion;
private final long maxSize;
private final int valueCount;
private long size = 0;
private Writer journalWriter;
private final LinkedHashMap<String, Entry> lruEntries
= new LinkedHashMap<String, Entry>(0, 0.75f, true);
private int redundantOpCount;
/**
* To differentiate between old and current snapshots, each entry is given
* a sequence number each time an edit is committed. A snapshot is stale if
* its sequence number is not equal to its entry's sequence number.
*/
private long nextSequenceNumber = 0;
/* From java.util.Arrays */
@SuppressWarnings("unchecked")
private static <T> T[] copyOfRange(T[] original, int start, int end) {
final int originalLength = original.length; // For exception priority compatibility.
if (start > end) {
throw new IllegalArgumentException();
}
if (start < 0 || start > originalLength) {
throw new ArrayIndexOutOfBoundsException();
}
final int resultLength = end - start;
final int copyLength = Math.min(resultLength, originalLength - start);
final T[] result = (T[]) Array
.newInstance(original.getClass().getComponentType(), resultLength);
System.arraycopy(original, start, result, 0, copyLength);
return result;
}
/**
* Returns the remainder of 'reader' as a string, closing it when done.
*/
public static String readFully(Reader reader) throws IOException {
try {
StringWriter writer = new StringWriter();
char[] buffer = new char[1024];
int count;
while ((count = reader.read(buffer)) != -1) {
writer.write(buffer, 0, count);
}
return writer.toString();
} finally {
reader.close();
}
}
/**
* Returns the ASCII characters up to but not including the next "\r\n", or
* "\n".
*
* @throws java.io.EOFException if the stream is exhausted before the next newline
* character.
*/
public static String readAsciiLine(InputStream in) throws IOException {
// TODO: support UTF-8 here instead
StringBuilder result = new StringBuilder(80);
while (true) {
int c = in.read();
if (c == -1) {
throw new EOFException();
} else if (c == '\n') {
break;
}
result.append((char) c);
}
int length = result.length();
if (length > 0 && result.charAt(length - 1) == '\r') {
result.setLength(length - 1);
}
return result.toString();
}
/**
* Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null.
*/
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (RuntimeException rethrown) {
throw rethrown;
} catch (Exception ignored) {
}
}
}
/**
* Recursively delete everything in {@code dir}.
*/
// TODO: this should specify paths as Strings rather than as Files
public static void deleteContents(File dir) throws IOException {
File[] files = dir.listFiles();
if (files == null) {
throw new IllegalArgumentException("not a directory: " + dir);
}
for (File file : files) {
if (file.isDirectory()) {
deleteContents(file);
}
if (!file.delete()) {
throw new IOException("failed to delete file: " + file);
}
}
}
/** This cache uses a single background thread to evict entries. */
private final ExecutorService executorService = new ThreadPoolExecutor(0, 1,
60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
private final Callable<Void> cleanupCallable = new Callable<Void>() {
@Override public Void call() throws Exception {
synchronized (DiskLruCache.this) {
if (journalWriter == null) {
return null; // closed
}
trimToSize();
if (journalRebuildRequired()) {
rebuildJournal();
redundantOpCount = 0;
}
}
return null;
}
};
private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) {
this.directory = directory;
this.appVersion = appVersion;
this.journalFile = new File(directory, JOURNAL_FILE);
this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP);
this.valueCount = valueCount;
this.maxSize = maxSize;
}
/**
* Opens the cache in {@code directory}, creating a cache if none exists
* there.
*
* @param directory a writable directory
* @param appVersion
* @param valueCount the number of values per cache entry. Must be positive.
* @param maxSize the maximum number of bytes this cache should use to store
* @throws java.io.IOException if reading or writing the cache directory fails
*/
public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize)
throws IOException {
if (maxSize <= 0) {
throw new IllegalArgumentException("maxSize <= 0");
}
if (valueCount <= 0) {
throw new IllegalArgumentException("valueCount <= 0");
}
// prefer to pick up where we left off
DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
if (cache.journalFile.exists()) {
try {
cache.readJournal();
cache.processJournal();
cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true),
IO_BUFFER_SIZE);
return cache;
} catch (IOException journalIsCorrupt) {
// System.logW("DiskLruCache " + directory + " is corrupt: "
// + journalIsCorrupt.getMessage() + ", removing");
cache.delete();
}
}
// create a new empty cache
directory.mkdirs();
cache = new DiskLruCache(directory, appVersion, valueCount, maxSize);
cache.rebuildJournal();
return cache;
}
private void readJournal() throws IOException {
InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE);
try {
String magic = readAsciiLine(in);
String version = readAsciiLine(in);
String appVersionString = readAsciiLine(in);
String valueCountString = readAsciiLine(in);
String blank = readAsciiLine(in);
if (!MAGIC.equals(magic)
|| !VERSION_1.equals(version)
|| !Integer.toString(appVersion).equals(appVersionString)
|| !Integer.toString(valueCount).equals(valueCountString)
|| !"".equals(blank)) {
throw new IOException("unexpected journal header: ["
+ magic + ", " + version + ", " + valueCountString + ", " + blank + "]");
}
while (true) {
try {
readJournalLine(readAsciiLine(in));
} catch (EOFException endOfJournal) {
break;
}
}
} finally {
closeQuietly(in);
}
}
private void readJournalLine(String line) throws IOException {
String[] parts = line.split(" ");
if (parts.length < 2) {
throw new IOException("unexpected journal line: " + line);
}
String key = parts[1];
if (parts[0].equals(REMOVE) && parts.length == 2) {
lruEntries.remove(key);
return;
}
Entry entry = lruEntries.get(key);
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
}
if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) {
entry.readable = true;
entry.currentEditor = null;
entry.setLengths(copyOfRange(parts, 2, parts.length));
} else if (parts[0].equals(DIRTY) && parts.length == 2) {
entry.currentEditor = new Editor(entry);
} else if (parts[0].equals(READ) && parts.length == 2) {
// this work was already done by calling lruEntries.get()
} else {
throw new IOException("unexpected journal line: " + line);
}
}
/**
* Computes the initial size and collects garbage as a part of opening the
* cache. Dirty entries are assumed to be inconsistent and will be deleted.
*/
private void processJournal() throws IOException {
deleteIfExists(journalFileTmp);
for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) {
Entry entry = i.next();
if (entry.currentEditor == null) {
for (int t = 0; t < valueCount; t++) {
size += entry.lengths[t];
}
} else {
entry.currentEditor = null;
for (int t = 0; t < valueCount; t++) {
deleteIfExists(entry.getCleanFile(t));
deleteIfExists(entry.getDirtyFile(t));
}
i.remove();
}
}
}
/**
* Creates a new journal that omits redundant information. This replaces the
* current journal if it exists.
*/
private synchronized void rebuildJournal() throws IOException {
if (journalWriter != null) {
journalWriter.close();
}
Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE);
writer.write(MAGIC);
writer.write("\n");
writer.write(VERSION_1);
writer.write("\n");
writer.write(Integer.toString(appVersion));
writer.write("\n");
writer.write(Integer.toString(valueCount));
writer.write("\n");
writer.write("\n");
for (Entry entry : lruEntries.values()) {
if (entry.currentEditor != null) {
writer.write(DIRTY + ' ' + entry.key + '\n');
} else {
writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
}
}
writer.close();
journalFileTmp.renameTo(journalFile);
journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE);
}
private static void deleteIfExists(File file) throws IOException {
// try {
// Libcore.os.remove(file.getPath());
// } catch (ErrnoException errnoException) {
// if (errnoException.errno != OsConstants.ENOENT) {
// throw errnoException.rethrowAsIOException();
// }
// }
if (file.exists() && !file.delete()) {
throw new IOException();
}
}
/**
* Returns a snapshot of the entry named {@code key}, or null if it doesn't
* exist is not currently readable. If a value is returned, it is moved to
* the head of the LRU queue.
*/
public synchronized Snapshot get(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null) {
return null;
}
if (!entry.readable) {
return null;
}
/*
* Open all streams eagerly to guarantee that we see a single published
* snapshot. If we opened streams lazily then the streams could come
* from different edits.
*/
InputStream[] ins = new InputStream[valueCount];
try {
for (int i = 0; i < valueCount; i++) {
ins[i] = new FileInputStream(entry.getCleanFile(i));
}
} catch (FileNotFoundException e) {
// a file must have been deleted manually!
return null;
}
redundantOpCount++;
journalWriter.append(READ + ' ' + key + '\n');
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return new Snapshot(key, entry.sequenceNumber, ins);
}
/**
* Returns an editor for the entry named {@code key}, or null if another
* edit is in progress.
*/
public Editor edit(String key) throws IOException {
return edit(key, ANY_SEQUENCE_NUMBER);
}
private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER
&& (entry == null || entry.sequenceNumber != expectedSequenceNumber)) {
return null; // snapshot is stale
}
if (entry == null) {
entry = new Entry(key);
lruEntries.put(key, entry);
} else if (entry.currentEditor != null) {
return null; // another edit is in progress
}
Editor editor = new Editor(entry);
entry.currentEditor = editor;
// flush the journal before creating files to prevent file leaks
journalWriter.write(DIRTY + ' ' + key + '\n');
journalWriter.flush();
return editor;
}
/**
* Returns the directory where this cache stores its data.
*/
public File getDirectory() {
return directory;
}
/**
* Returns the maximum number of bytes that this cache should use to store
* its data.
*/
public long maxSize() {
return maxSize;
}
/**
* Returns the number of bytes currently being used to store the values in
* this cache. This may be greater than the max size if a background
* deletion is pending.
*/
public synchronized long size() {
return size;
}
private synchronized void completeEdit(Editor editor, boolean success) throws IOException {
Entry entry = editor.entry;
if (entry.currentEditor != editor) {
throw new IllegalStateException();
}
// if this edit is creating the entry for the first time, every index must have a value
if (success && !entry.readable) {
for (int i = 0; i < valueCount; i++) {
if (!entry.getDirtyFile(i).exists()) {
editor.abort();
throw new IllegalStateException("edit didn't create file " + i);
}
}
}
for (int i = 0; i < valueCount; i++) {
File dirty = entry.getDirtyFile(i);
if (success) {
if (dirty.exists()) {
File clean = entry.getCleanFile(i);
dirty.renameTo(clean);
long oldLength = entry.lengths[i];
long newLength = clean.length();
entry.lengths[i] = newLength;
size = size - oldLength + newLength;
}
} else {
deleteIfExists(dirty);
}
}
redundantOpCount++;
entry.currentEditor = null;
if (entry.readable | success) {
entry.readable = true;
journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n');
if (success) {
entry.sequenceNumber = nextSequenceNumber++;
}
} else {
lruEntries.remove(entry.key);
journalWriter.write(REMOVE + ' ' + entry.key + '\n');
}
if (size > maxSize || journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
}
/**
* We only rebuild the journal when it will halve the size of the journal
* and eliminate at least 2000 ops.
*/
private boolean journalRebuildRequired() {
final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000;
return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD
&& redundantOpCount >= lruEntries.size();
}
/**
* Drops the entry for {@code key} if it exists and can be removed. Entries
* actively being edited cannot be removed.
*
* @return true if an entry was removed.
*/
public synchronized boolean remove(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null || entry.currentEditor != null) {
return false;
}
for (int i = 0; i < valueCount; i++) {
File file = entry.getCleanFile(i);
if (!file.delete()) {
throw new IOException("failed to delete " + file);
}
size -= entry.lengths[i];
entry.lengths[i] = 0;
}
redundantOpCount++;
journalWriter.append(REMOVE + ' ' + key + '\n');
lruEntries.remove(key);
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return true;
}
/**
* Returns true if this cache has been closed.
*/
public boolean isClosed() {
return journalWriter == null;
}
private void checkNotClosed() {
if (journalWriter == null) {
throw new IllegalStateException("cache is closed");
}
}
/**
* Force buffered operations to the filesystem.
*/
public synchronized void flush() throws IOException {
checkNotClosed();
trimToSize();
journalWriter.flush();
}
/**
* Closes this cache. Stored values will remain on the filesystem.
*/
@Override
public synchronized void close() throws IOException {
if (journalWriter == null) {
return; // already closed
}
for (Entry entry : new ArrayList<Entry>(lruEntries.values())) {
if (entry.currentEditor != null) {
entry.currentEditor.abort();
}
}
trimToSize();
journalWriter.close();
journalWriter = null;
}
private void trimToSize() throws IOException {
while (size > maxSize) {
// Map.Entry<String, Entry> toEvict = lruEntries.eldest();
final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next();
remove(toEvict.getKey());
}
}
/**
* Closes the cache and deletes all of its stored values. This will delete
* all files in the cache directory including files that weren't created by
* the cache.
*/
public void delete() throws IOException {
close();
deleteContents(directory);
}
private void validateKey(String key) {
if (key.contains(" ") || key.contains("\n") || key.contains("\r")) {
throw new IllegalArgumentException(
"keys must not contain spaces or newlines: \"" + key + "\"");
}
}
private static String inputStreamToString(InputStream in) throws IOException {
return readFully(new InputStreamReader(in, UTF_8));
}
/**
* A snapshot of the values for an entry.
*/
public final class Snapshot implements Closeable {
private final String key;
private final long sequenceNumber;
private final InputStream[] ins;
private Snapshot(String key, long sequenceNumber, InputStream[] ins) {
this.key = key;
this.sequenceNumber = sequenceNumber;
this.ins = ins;
}
/**
* Returns an editor for this snapshot's entry, or null if either the
* entry has changed since this snapshot was created or if another edit
* is in progress.
*/
public Editor edit() throws IOException {
return DiskLruCache.this.edit(key, sequenceNumber);
}
/**
* Returns the unbuffered stream with the value for {@code index}.
*/
public InputStream getInputStream(int index) {
return ins[index];
}
/**
* Returns the string value for {@code index}.
*/
public String getString(int index) throws IOException {
return inputStreamToString(getInputStream(index));
}
@Override public void close() {
for (InputStream in : ins) {
closeQuietly(in);
}
}
}
/**
* Edits the values for an entry.
*/
public final class Editor {
private final Entry entry;
private boolean hasErrors;
private Editor(Entry entry) {
this.entry = entry;
}
/**
* Returns an unbuffered input stream to read the last committed value,
* or null if no value has been committed.
*/
public InputStream newInputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
if (!entry.readable) {
return null;
}
return new FileInputStream(entry.getCleanFile(index));
}
}
/**
* Returns the last committed value as a string, or null if no value
* has been committed.
*/
public String getString(int index) throws IOException {
InputStream in = newInputStream(index);
return in != null ? inputStreamToString(in) : null;
}
/**
* Returns a new unbuffered output stream to write the value at
* {@code index}. If the underlying output stream encounters errors
* when writing to the filesystem, this edit will be aborted when
* {@link #commit} is called. The returned output stream does not throw
* IOExceptions.
*/
public OutputStream newOutputStream(int index) throws IOException {
synchronized (DiskLruCache.this) {
if (entry.currentEditor != this) {
throw new IllegalStateException();
}
return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index)));
}
}
/**
* Sets the value at {@code index} to {@code value}.
*/
public void set(int index, String value) throws IOException {
Writer writer = null;
try {
writer = new OutputStreamWriter(newOutputStream(index), UTF_8);
writer.write(value);
} finally {
closeQuietly(writer);
}
}
/**
* Commits this edit so it is visible to readers. This releases the
* edit lock so another edit may be started on the same key.
*/
public void commit() throws IOException {
if (hasErrors) {
completeEdit(this, false);
remove(entry.key); // the previous entry is stale
} else {
completeEdit(this, true);
}
}
/**
* Aborts this edit. This releases the edit lock so another edit may be
* started on the same key.
*/
public void abort() throws IOException {
completeEdit(this, false);
}
private class FaultHidingOutputStream extends FilterOutputStream {
private FaultHidingOutputStream(OutputStream out) {
super(out);
}
@Override public void write(int oneByte) {
try {
out.write(oneByte);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void write(byte[] buffer, int offset, int length) {
try {
out.write(buffer, offset, length);
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void close() {
try {
out.close();
} catch (IOException e) {
hasErrors = true;
}
}
@Override public void flush() {
try {
out.flush();
} catch (IOException e) {
hasErrors = true;
}
}
}
}
private final class Entry {
private final String key;
/** Lengths of this entry's files. */
private final long[] lengths;
/** True if this entry has ever been published */
private boolean readable;
/** The ongoing edit or null if this entry is not being edited. */
private Editor currentEditor;
/** The sequence number of the most recently committed edit to this entry. */
private long sequenceNumber;
private Entry(String key) {
this.key = key;
this.lengths = new long[valueCount];
}
public String getLengths() throws IOException {
StringBuilder result = new StringBuilder();
for (long size : lengths) {
result.append(' ').append(size);
}
return result.toString();
}
/**
* Set lengths using decimal numbers like "10123".
*/
private void setLengths(String[] strings) throws IOException {
if (strings.length != valueCount) {
throw invalidLengths(strings);
}
try {
for (int i = 0; i < strings.length; i++) {
lengths[i] = Long.parseLong(strings[i]);
}
} catch (NumberFormatException e) {
throw invalidLengths(strings);
}
}
private IOException invalidLengths(String[] strings) throws IOException {
throw new IOException("unexpected journal line: " + Arrays.toString(strings));
}
public File getCleanFile(int i) {
return new File(directory, key + "." + i);
}
public File getDirtyFile(int i) {
return new File(directory, key + "." + i + ".tmp");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.xa;
import javax.transaction.xa.XAException;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientProducer;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.MessageHandler;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.StoreConfiguration;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.core.transaction.impl.XidImpl;
import org.apache.activemq.artemis.ra.ActiveMQRAXAResource;
import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.apache.activemq.artemis.utils.UUIDGenerator;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class BasicXaTest extends ActiveMQTestBase {
private static IntegrationTestLogger log = IntegrationTestLogger.LOGGER;
private final Map<String, AddressSettings> addressSettings = new HashMap<>();
private ActiveMQServer messagingService;
private ClientSession clientSession;
private ClientSessionFactory sessionFactory;
private Configuration configuration;
private final SimpleString atestq = new SimpleString("BasicXaTestq");
private ServerLocator locator;
private StoreConfiguration.StoreType storeType;
public BasicXaTest(StoreConfiguration.StoreType storeType) {
this.storeType = storeType;
}
@Parameterized.Parameters(name = "storeType={0}")
public static Collection<Object[]> data() {
Object[][] params = new Object[][]{{StoreConfiguration.StoreType.FILE}, {StoreConfiguration.StoreType.DATABASE}};
return Arrays.asList(params);
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
addressSettings.clear();
if (storeType == StoreConfiguration.StoreType.DATABASE) {
configuration = createDefaultJDBCConfig(true);
} else {
configuration = createDefaultNettyConfig();
}
messagingService = createServer(true, configuration, -1, -1, addressSettings);
// start the server
messagingService.start();
locator = createInVMNonHALocator();
sessionFactory = createSessionFactory(locator);
clientSession = addClientSession(sessionFactory.createSession(true, false, false));
clientSession.createQueue(atestq, atestq, null, true);
}
@Test
public void testSendWithoutXID() throws Exception {
// Since both resources have same RM, TM will probably use 1PC optimization
ServerLocator locator = createInVMNonHALocator();
ClientSessionFactory factory = createSessionFactory(locator);
ClientSession session = addClientSession(factory.createSession(true, false, false));
session.createQueue("Test", "Test");
ClientProducer prod = session.createProducer("Test");
prod.send(session.createMessage(true));
session.start();
ClientConsumer cons = session.createConsumer("Test");
assertNotNull("Send went through an invalid XA Session", cons.receiveImmediate());
}
@Test
public void testACKWithoutXID() throws Exception {
// Since both resources have same RM, TM will probably use 1PC optimization
ClientSessionFactory factory = createSessionFactory(locator);
ClientSession session = addClientSession(factory.createSession(false, true, true));
session.createQueue("Test", "Test");
ClientProducer prod = session.createProducer("Test");
prod.send(session.createMessage(true));
session.close();
session = addClientSession(factory.createSession(true, false, false));
session.start();
ClientConsumer cons = session.createConsumer("Test");
ClientMessage msg = cons.receive(5000);
assertNotNull(msg);
msg.acknowledge();
session.close();
session = addClientSession(factory.createSession(false, false, false));
session.start();
cons = session.createConsumer("Test");
msg = cons.receiveImmediate();
assertNull("Acknowledge went through invalid XA Session", msg);
}
@Test
public void testIsSameRM() throws Exception {
try (ServerLocator locator = createNettyNonHALocator();
ServerLocator locator2 = createNettyNonHALocator()) {
ClientSessionFactory nettyFactory = createSessionFactory(locator);
ClientSessionFactory nettyFactory2 = createSessionFactory(locator2);
ClientSession session1 = nettyFactory.createSession(true, false, false);
ClientSession session2 = nettyFactory2.createSession(true, false, false);
assertTrue(session1.isSameRM(session2));
ActiveMQRAXAResource activeMQRAXAResource = new ActiveMQRAXAResource(null, session2);
assertTrue(session1.isSameRM(activeMQRAXAResource));
}
}
@Test
public void testXAInterleaveResourceSuspendWorkCommit() throws Exception {
Xid xid = newXID();
Xid xid2 = newXID();
ClientProducer clientProducer = clientSession.createProducer(atestq);
ClientSession recSession = sessionFactory.createSession();
recSession.start();
ClientConsumer clientConsumer = recSession.createConsumer(atestq);
ClientMessage m1 = createTextMessage(clientSession, "m1");
ClientMessage m2 = createTextMessage(clientSession, "m2");
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientSession.end(xid, XAResource.TMSUSPEND);
clientSession.start(xid2, XAResource.TMNOFLAGS);
clientProducer.send(m2);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.commit(xid, true);
ClientMessage message = clientConsumer.receiveImmediate();
assertNotNull(message);
message = clientConsumer.receiveImmediate();
assertNull(message);
clientSession.end(xid2, XAResource.TMSUCCESS);
clientSession.commit(xid2, true);
message = clientConsumer.receiveImmediate();
assertNotNull(message);
}
@Test
public void testXAInterleaveResourceRollbackAfterPrepare() throws Exception {
Xid xid = newXID();
Xid xid2 = newXID();
Xid xid3 = newXID();
ClientProducer clientProducer = clientSession.createProducer(atestq);
ClientConsumer clientConsumer = clientSession.createConsumer(atestq);
ClientMessage m1 = createTextMessage(clientSession, "m1");
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
clientSession.commit(xid, false);
clientSession.start();
clientSession.start(xid2, XAResource.TMNOFLAGS);
ClientMessage m2 = clientConsumer.receiveImmediate();
assertNotNull(m2);
clientSession.end(xid2, XAResource.TMSUCCESS);
clientSession.prepare(xid2);
clientSession.rollback(xid2);
clientSession.start(xid3, XAResource.TMNOFLAGS);
m2 = clientConsumer.receiveImmediate();
assertNotNull(m2);
clientSession.end(xid3, XAResource.TMSUCCESS);
clientSession.prepare(xid3);
clientSession.commit(xid3, false);
}
@Test
public void testSendPrepareDoesntRollbackOnClose() throws Exception {
Xid xid = newXID();
ClientMessage m1 = createTextMessage(clientSession, "m1");
ClientMessage m2 = createTextMessage(clientSession, "m2");
ClientMessage m3 = createTextMessage(clientSession, "m3");
ClientMessage m4 = createTextMessage(clientSession, "m4");
ClientProducer clientProducer = clientSession.createProducer(atestq);
clientSession.start(xid, XAResource.TMNOFLAGS);
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
clientSession.close();
clientSession = sessionFactory.createSession(true, false, false);
log.info("committing");
clientSession.commit(xid, false);
clientSession.start();
ClientConsumer clientConsumer = clientSession.createConsumer(atestq);
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
}
@Test
public void testReceivePrepareDoesntRollbackOnClose() throws Exception {
Xid xid = newXID();
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer = clientSession2.createProducer(atestq);
ClientMessage m1 = createTextMessage(clientSession2, "m1");
ClientMessage m2 = createTextMessage(clientSession2, "m2");
ClientMessage m3 = createTextMessage(clientSession2, "m3");
ClientMessage m4 = createTextMessage(clientSession2, "m4");
clientProducer.send(m1);
clientProducer.send(m2);
clientProducer.send(m3);
clientProducer.send(m4);
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.start();
ClientConsumer clientConsumer = clientSession.createConsumer(atestq);
ClientMessage m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m1");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m2");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m3");
m = clientConsumer.receive(1000);
Assert.assertNotNull(m);
m.acknowledge();
Assert.assertEquals(m.getBodyBuffer().readString(), "m4");
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
clientSession.close();
clientSession = sessionFactory.createSession(true, false, false);
clientSession.commit(xid, false);
clientSession.start();
clientConsumer = clientSession.createConsumer(atestq);
m = clientConsumer.receiveImmediate();
Assert.assertNull(m);
clientSession2.close();
}
@Test
public void testReceiveRollback() throws Exception {
int numSessions = 100;
ClientSession clientSession2 = sessionFactory.createSession(false, true, true);
ClientProducer clientProducer = clientSession2.createProducer(atestq);
for (int i = 0; i < numSessions; i++) {
clientProducer.send(createTextMessage(clientSession2, "m" + i));
}
ClientSession[] clientSessions = new ClientSession[numSessions];
ClientConsumer[] clientConsumers = new ClientConsumer[numSessions];
TxMessageHandler[] handlers = new TxMessageHandler[numSessions];
CountDownLatch latch = new CountDownLatch(numSessions * AddressSettings.DEFAULT_MAX_DELIVERY_ATTEMPTS);
for (int i = 0; i < clientSessions.length; i++) {
clientSessions[i] = sessionFactory.createSession(true, false, false);
clientConsumers[i] = clientSessions[i].createConsumer(atestq);
handlers[i] = new TxMessageHandler(clientSessions[i], latch);
clientConsumers[i].setMessageHandler(handlers[i]);
}
for (ClientSession session : clientSessions) {
session.start();
}
boolean ok = latch.await(10, TimeUnit.SECONDS);
Assert.assertTrue(ok);
for (TxMessageHandler messageHandler : handlers) {
Assert.assertFalse(messageHandler.failedToAck);
}
clientSession2.close();
for (ClientSession session : clientSessions) {
session.stop();
session.close();
}
}
@Test
public void testSendMultipleQueues() throws Exception {
multipleQueuesInternalTest(true, false, false, false, false);
}
@Test
public void testSendMultipleQueuesOnePhase() throws Exception {
multipleQueuesInternalTest(true, false, false, false, true);
multipleQueuesInternalTest(false, false, true, false, true);
}
@Test
public void testSendMultipleQueuesOnePhaseJoin() throws Exception {
multipleQueuesInternalTest(true, false, false, true, true);
multipleQueuesInternalTest(false, false, true, true, true);
}
@Test
public void testSendMultipleQueuesTwoPhaseJoin() throws Exception {
multipleQueuesInternalTest(true, false, false, true, false);
multipleQueuesInternalTest(false, false, true, true, false);
}
@Test
public void testSendMultipleQueuesRecreate() throws Exception {
multipleQueuesInternalTest(true, false, true, false, false);
}
@Test
public void testSendMultipleSuspend() throws Exception {
multipleQueuesInternalTest(true, true, false, false, false);
}
@Test
public void testSendMultipleSuspendRecreate() throws Exception {
multipleQueuesInternalTest(true, true, true, false, false);
}
@Test
public void testSendMultipleSuspendErrorCheck() throws Exception {
ClientSession session = null;
session = sessionFactory.createSession(true, false, false);
Xid xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
try {
session.start(xid, XAResource.TMRESUME);
Assert.fail("XAException expected");
} catch (XAException e) {
Assert.assertEquals(XAException.XAER_PROTO, e.errorCode);
}
session.close();
}
@Test
public void testEmptyXID() throws Exception {
Xid xid = newXID();
ClientSession session = sessionFactory.createSession(true, false, false);
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMSUCCESS);
session.rollback(xid);
session.close();
messagingService.stop();
// do the same test with a file persistence now
messagingService = createServer(true, configuration, -1, -1, addressSettings);
messagingService.start();
sessionFactory = createSessionFactory(locator);
xid = newXID();
session = sessionFactory.createSession(true, false, false);
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMSUCCESS);
session.rollback(xid);
xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMSUCCESS);
session.prepare(xid);
session.commit(xid, false);
session.close();
xid = newXID();
session = sessionFactory.createSession(true, false, false);
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMSUCCESS);
session.prepare(xid);
session.rollback(xid);
session.close();
messagingService.start();
sessionFactory = createSessionFactory(locator);
xid = newXID();
session = sessionFactory.createSession(true, false, false);
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMSUCCESS);
session.rollback(xid);
session.close();
messagingService.stop();
messagingService.start();
// This is not really necessary... But since the server has stopped, I would prefer to keep recreating the factory
sessionFactory = createSessionFactory(locator);
session = sessionFactory.createSession(true, false, false);
Xid[] xids = session.recover(XAResource.TMSTARTRSCAN);
Assert.assertEquals(0, xids.length);
session.close();
}
@Test
public void testFailXID() throws Exception {
Xid xid = newXID();
ClientSession session = sessionFactory.createSession(true, false, false);
session.start(xid, XAResource.TMNOFLAGS);
session.end(xid, XAResource.TMFAIL);
session.rollback(xid);
session.close();
}
@Test
public void testForgetUnknownXID() throws Exception {
try {
clientSession.forget(newXID());
Assert.fail("should throw a XAERR_NOTA XAException");
} catch (XAException e) {
Assert.assertEquals(XAException.XAER_NOTA, e.errorCode);
}
}
@Test
public void testForgetHeuristicallyCommittedXID() throws Exception {
Xid xid = newXID();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
String[] preparedTransactions = messagingService.getActiveMQServerControl().listPreparedTransactions();
Assert.assertEquals(1, preparedTransactions.length);
System.out.println(preparedTransactions[0]);
Assert.assertTrue(messagingService.getActiveMQServerControl().commitPreparedTransaction(XidImpl.toBase64String(xid)));
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicCommittedTransactions().length);
clientSession.forget(xid);
Assert.assertEquals(0, messagingService.getActiveMQServerControl().listHeuristicCommittedTransactions().length);
}
@Test
public void testForgetHeuristicallyRolledBackXID() throws Exception {
Xid xid = newXID();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
String[] preparedTransactions = messagingService.getActiveMQServerControl().listPreparedTransactions();
Assert.assertEquals(1, preparedTransactions.length);
System.out.println(preparedTransactions[0]);
Assert.assertTrue(messagingService.getActiveMQServerControl().rollbackPreparedTransaction(XidImpl.toBase64String(xid)));
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicRolledBackTransactions().length);
clientSession.forget(xid);
Assert.assertEquals(0, messagingService.getActiveMQServerControl().listHeuristicRolledBackTransactions().length);
}
@Test
public void testCommitHeuristicallyCommittedXID() throws Exception {
doCompleteHeuristicallyCompletedXID(true, true);
}
@Test
public void testCommitHeuristicallyRolledBackXID() throws Exception {
doCompleteHeuristicallyCompletedXID(true, false);
}
@Test
public void testRollbacktHeuristicallyCommittedXID() throws Exception {
doCompleteHeuristicallyCompletedXID(false, true);
}
@Test
public void testRollbackHeuristicallyRolledBackXID() throws Exception {
doCompleteHeuristicallyCompletedXID(false, false);
}
@Test
public void testSimpleJoin() throws Exception {
SimpleString ADDRESS1 = new SimpleString("Address-1");
SimpleString ADDRESS2 = new SimpleString("Address-2");
clientSession.createQueue(ADDRESS1, ADDRESS1, true);
clientSession.createQueue(ADDRESS2, ADDRESS2, true);
Xid xid = newXID();
ClientSession sessionA = sessionFactory.createSession(true, false, false);
sessionA.start(xid, XAResource.TMNOFLAGS);
ClientSession sessionB = sessionFactory.createSession(true, false, false);
sessionB.start(xid, XAResource.TMJOIN);
ClientProducer prodA = sessionA.createProducer(ADDRESS1);
ClientProducer prodB = sessionB.createProducer(ADDRESS2);
for (int i = 0; i < 100; i++) {
prodA.send(createTextMessage(sessionA, "A" + i));
prodB.send(createTextMessage(sessionB, "B" + i));
}
sessionA.end(xid, XAResource.TMSUCCESS);
sessionB.end(xid, XAResource.TMSUCCESS);
sessionB.close();
sessionA.commit(xid, true);
sessionA.close();
xid = newXID();
clientSession.start(xid, XAResource.TMNOFLAGS);
ClientConsumer cons1 = clientSession.createConsumer(ADDRESS1);
ClientConsumer cons2 = clientSession.createConsumer(ADDRESS2);
clientSession.start();
for (int i = 0; i < 100; i++) {
ClientMessage msg = cons1.receive(1000);
Assert.assertNotNull(msg);
Assert.assertEquals("A" + i, getTextMessage(msg));
msg.acknowledge();
msg = cons2.receive(1000);
Assert.assertNotNull(msg);
Assert.assertEquals("B" + i, getTextMessage(msg));
msg.acknowledge();
}
Assert.assertNull(cons1.receiveImmediate());
Assert.assertNull(cons2.receiveImmediate());
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.commit(xid, true);
clientSession.close();
}
/**
* @throws ActiveMQException
* @throws XAException
*/
protected void multipleQueuesInternalTest(final boolean createQueues,
final boolean suspend,
final boolean recreateSession,
final boolean isJoinSession,
final boolean onePhase) throws Exception {
int NUMBER_OF_MSGS = 100;
int NUMBER_OF_QUEUES = 10;
ClientSession session = null;
SimpleString ADDRESS = new SimpleString("Address");
ClientSession newJoinSession = null;
try {
session = sessionFactory.createSession(true, false, false);
if (createQueues) {
for (int i = 0; i < NUMBER_OF_QUEUES; i++) {
session.createQueue(ADDRESS, ADDRESS.concat(Integer.toString(i)), true);
if (isJoinSession) {
clientSession.createQueue(ADDRESS.concat("-join"), ADDRESS.concat("-join." + i), true);
}
}
}
for (int tr = 0; tr < 2; tr++) {
Xid xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
ClientProducer prod = session.createProducer(ADDRESS);
for (int nmsg = 0; nmsg < NUMBER_OF_MSGS; nmsg++) {
ClientMessage msg = createTextMessage(session, "SimpleMessage" + nmsg);
prod.send(msg);
}
if (suspend) {
session.end(xid, XAResource.TMSUSPEND);
session.start(xid, XAResource.TMRESUME);
}
prod.send(createTextMessage(session, "one more"));
prod.close();
if (isJoinSession) {
newJoinSession = sessionFactory.createSession(true, false, false);
// This is a basic condition, or a real TM wouldn't be able to join both sessions in a single
// transactions
Assert.assertTrue(session.isSameRM(newJoinSession));
newJoinSession.start(xid, XAResource.TMJOIN);
// The Join Session will have its own queue, as it's not possible to guarantee ordering since this
// producer will be using a different session
ClientProducer newProd = newJoinSession.createProducer(ADDRESS.concat("-join"));
newProd.send(createTextMessage(newJoinSession, "After Join"));
}
session.end(xid, XAResource.TMSUCCESS);
if (isJoinSession) {
newJoinSession.end(xid, XAResource.TMSUCCESS);
newJoinSession.close();
}
if (!onePhase) {
session.prepare(xid);
}
if (recreateSession) {
session.close();
session = sessionFactory.createSession(true, false, false);
}
if (tr == 0) {
session.rollback(xid);
} else {
session.commit(xid, onePhase);
}
}
for (int i = 0; i < 2; i++) {
Xid xid = newXID();
session.start(xid, XAResource.TMNOFLAGS);
for (int nqueues = 0; nqueues < NUMBER_OF_QUEUES; nqueues++) {
ClientConsumer consumer = session.createConsumer(ADDRESS.concat(Integer.toString(nqueues)));
session.start();
for (int nmsg = 0; nmsg < NUMBER_OF_MSGS; nmsg++) {
ClientMessage msg = consumer.receive(1000);
Assert.assertNotNull(msg);
Assert.assertEquals("SimpleMessage" + nmsg, getTextMessage(msg));
msg.acknowledge();
}
ClientMessage msg = consumer.receive(1000);
Assert.assertNotNull(msg);
Assert.assertEquals("one more", getTextMessage(msg));
msg.acknowledge();
if (suspend) {
session.end(xid, XAResource.TMSUSPEND);
session.start(xid, XAResource.TMRESUME);
}
Assert.assertEquals("one more", getTextMessage(msg));
if (isJoinSession) {
ClientSession newSession = sessionFactory.createSession(true, false, false);
newSession.start(xid, XAResource.TMJOIN);
newSession.start();
ClientConsumer newConsumer = newSession.createConsumer(ADDRESS.concat("-join." + nqueues));
msg = newConsumer.receive(1000);
Assert.assertNotNull(msg);
Assert.assertEquals("After Join", getTextMessage(msg));
msg.acknowledge();
newSession.end(xid, XAResource.TMSUCCESS);
newSession.close();
}
Assert.assertNull(consumer.receiveImmediate());
consumer.close();
}
session.end(xid, XAResource.TMSUCCESS);
session.prepare(xid);
if (recreateSession) {
session.close();
session = sessionFactory.createSession(true, false, false);
}
if (i == 0) {
session.rollback(xid);
} else {
session.commit(xid, false);
}
}
} finally {
if (session != null) {
session.close();
}
}
}
private void doCompleteHeuristicallyCompletedXID(final boolean isCommit,
final boolean heuristicCommit) throws Exception {
Xid xid = newXID();
clientSession.start(xid, XAResource.TMNOFLAGS);
clientSession.end(xid, XAResource.TMSUCCESS);
clientSession.prepare(xid);
String[] preparedTransactions = messagingService.getActiveMQServerControl().listPreparedTransactions();
Assert.assertEquals(1, preparedTransactions.length);
if (heuristicCommit) {
Assert.assertTrue(messagingService.getActiveMQServerControl().commitPreparedTransaction(XidImpl.toBase64String(xid)));
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicCommittedTransactions().length);
} else {
Assert.assertTrue(messagingService.getActiveMQServerControl().rollbackPreparedTransaction(XidImpl.toBase64String(xid)));
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicRolledBackTransactions().length);
}
Assert.assertEquals(0, messagingService.getActiveMQServerControl().listPreparedTransactions().length);
try {
if (isCommit) {
clientSession.commit(xid, false);
} else {
clientSession.rollback(xid);
}
Assert.fail("neither commit not rollback must succeed on a heuristically completed tx");
} catch (XAException e) {
if (heuristicCommit) {
Assert.assertEquals(XAException.XA_HEURCOM, e.errorCode);
} else {
Assert.assertEquals(XAException.XA_HEURRB, e.errorCode);
}
}
if (heuristicCommit) {
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicCommittedTransactions().length);
} else {
Assert.assertEquals(1, messagingService.getActiveMQServerControl().listHeuristicRolledBackTransactions().length);
}
}
class TxMessageHandler implements MessageHandler {
boolean failedToAck = false;
final ClientSession session;
private final CountDownLatch latch;
TxMessageHandler(final ClientSession session, final CountDownLatch latch) {
this.latch = latch;
this.session = session;
}
@Override
public void onMessage(final ClientMessage message) {
Xid xid = new XidImpl(UUIDGenerator.getInstance().generateStringUUID().getBytes(), 1, UUIDGenerator.getInstance().generateStringUUID().getBytes());
try {
session.start(xid, XAResource.TMNOFLAGS);
} catch (XAException e) {
e.printStackTrace();
}
try {
message.acknowledge();
} catch (ActiveMQException e) {
BasicXaTest.log.error("Failed to process message", e);
}
try {
session.end(xid, XAResource.TMSUCCESS);
session.rollback(xid);
} catch (Exception e) {
e.printStackTrace();
failedToAck = true;
try {
session.close();
} catch (ActiveMQException e1) {
//
}
}
latch.countDown();
}
}
}
| |
package pttravis;
import java.awt.EventQueue;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.net.SocketTimeoutException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.X509TrustManager;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Properties;
import java.util.TreeSet;
import javax.net.ssl.TrustManager;
import javax.swing.JFrame;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import javax.swing.JList;
import javax.swing.JLabel;
import javax.swing.JTextField;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JScrollPane;
import javax.swing.SwingConstants;
import javax.swing.ListSelectionModel;
import javax.swing.UIManager;
import javax.swing.UIManager.LookAndFeelInfo;
import java.awt.GridBagLayout;
import java.awt.GridBagConstraints;
import java.awt.Insets;
public class TiVoNowPlaying {
private static final String URL_GROUP_FORMAT = "https://%s:%s/nowplaying/index.html";
private String m_cfgFilename = null;
private String m_pass = null;
private String m_tivoIP = null;
private String m_tivoPort = null;
private TreeSet<String> m_shows = new TreeSet<String>();
private JFrame frmTivoNowPlaying;
private JTextField m_ipTextField;
private JTextField m_passTextField;
private JList<Object> m_showsList;
private void parseURL(String url) throws IOException {
Document doc = Jsoup.connect(url).get();
Elements trElements = doc.getElementsByTag("tr");
for( Element tr : trElements){
try{
Elements tdElements = tr.getElementsByTag("td");
Element titleElement = tdElements.get(2);
Element linkElement = tdElements.get(5);
String title = titleElement.getElementsByTag("b").get(0).text();
if("folder".equals(linkElement.text())){
String link_url = linkElement.getElementsByTag("a").get(0).attr("abs:href");
parseURL(link_url);
}else{
m_shows.add(title);
}
}catch(IndexOutOfBoundsException e){
}catch(SocketTimeoutException e){
}
}
}
private void getNowPlaying() {
try {
m_tivoIP = m_ipTextField.getText();
m_pass = m_passTextField.getText();
m_shows.clear();
parseURL(String.format( URL_GROUP_FORMAT, m_tivoIP, m_tivoPort ));
m_showsList.setListData( m_shows.toArray() );
} catch (Exception e) {
e.printStackTrace();
}
}
private void setupAuthenticationAndSSL() throws NoSuchAlgorithmException, KeyManagementException{
TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() {
public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; }
public void checkClientTrusted(X509Certificate[] certs, String authType) {}
public void checkServerTrusted(X509Certificate[] certs, String authType) {}
}
};
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
HostnameVerifier allHostsValid = new HostnameVerifier() {
public boolean verify(String hostname, SSLSession session) { return true; }
};
HttpsURLConnection.setDefaultHostnameVerifier(allHostsValid);
Authenticator.setDefault( new Authenticator(){
public PasswordAuthentication getPasswordAuthentication() {
return (new PasswordAuthentication( "tivo", m_pass.toCharArray() ) );
}
} );
}
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
TiVoNowPlaying window = new TiVoNowPlaying();
window.launchApplication();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public TiVoNowPlaying() {
this(System.getProperty("user.home")==null?".":System.getProperty("user.home") + File.separator + ".TiVo.xml");
}
public TiVoNowPlaying(String cfgFilename){
m_cfgFilename = cfgFilename;
}
public void launchApplication() throws KeyManagementException, NoSuchAlgorithmException{
loadConfig(m_cfgFilename);
setupShutdownHook(m_cfgFilename);
setupGUI();
setupAuthenticationAndSSL();
if( m_tivoIP.length() > 0 && m_pass.length() > 0 ) getNowPlaying();
frmTivoNowPlaying.setVisible(true);
}
private void setupShutdownHook(String filename){
final String cfg_filename = filename;
//since on the mac you can exit the program via the menu outside of the app use a hook
//to catch the shutdown so the Broadcaster and FlingThread can be closed properly
Runnable runner = new Runnable() {
public void run() {
try{
Properties p = new Properties();
p.setProperty( "ip", m_tivoIP );
p.setProperty( "mak", m_pass );
p.setProperty( "port", m_tivoPort);
p.storeToXML( new FileOutputStream( cfg_filename ), null );
}catch(Exception e){
}
}
};
Runtime.getRuntime().addShutdownHook(new Thread(runner, "Shutdown"));
}
private void loadConfig(String filename) {
Properties p = new Properties();
try {
p.loadFromXML( new FileInputStream( filename ) );
}catch(Exception e){}
m_tivoIP = p.getProperty( "ip", "" );
m_pass = p.getProperty( "mak", "" );
m_tivoPort = p.getProperty( "port", "443");
}
/**
* Initialize the contents of the frame.
*/
private void setupGUI() {
try {
for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) {
if("Nimbus".equals(info.getName())) {
UIManager.setLookAndFeel(info.getClassName());
break;
}
}
}catch(Exception e){}
frmTivoNowPlaying = new JFrame();
frmTivoNowPlaying.setTitle("TiVo Now Playing List");
frmTivoNowPlaying.setBounds(100, 100, 500, 450);
frmTivoNowPlaying.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
GridBagLayout gridBagLayout = new GridBagLayout();
gridBagLayout.columnWidths = new int[]{31, 134, 40, 134, 128, 0, 0};
gridBagLayout.rowHeights = new int[]{28, 347, 0};
gridBagLayout.columnWeights = new double[]{0.0, 0.0, 0.0, 0.0, 0.0, 1.0, Double.MIN_VALUE};
gridBagLayout.rowWeights = new double[]{0.0, 1.0, Double.MIN_VALUE};
frmTivoNowPlaying.getContentPane().setLayout(gridBagLayout);
JLabel lblTivoIp = new JLabel("IP:");
lblTivoIp.setHorizontalAlignment(SwingConstants.TRAILING);
GridBagConstraints gbc_lblTivoIp = new GridBagConstraints();
gbc_lblTivoIp.fill = GridBagConstraints.HORIZONTAL;
gbc_lblTivoIp.insets = new Insets(0, 0, 5, 5);
gbc_lblTivoIp.gridx = 0;
gbc_lblTivoIp.gridy = 0;
frmTivoNowPlaying.getContentPane().add(lblTivoIp, gbc_lblTivoIp);
m_ipTextField = new JTextField();
m_ipTextField.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
try{
getNowPlaying();
}catch(Exception e){}
}
});
m_ipTextField.setText( m_tivoIP );
GridBagConstraints gbc_ipTextField = new GridBagConstraints();
gbc_ipTextField.fill = GridBagConstraints.BOTH;
gbc_ipTextField.insets = new Insets(0, 0, 5, 5);
gbc_ipTextField.gridx = 1;
gbc_ipTextField.gridy = 0;
frmTivoNowPlaying.getContentPane().add(m_ipTextField, gbc_ipTextField);
m_ipTextField.setColumns(10);
JLabel lblPassword = new JLabel("MAK:");
lblPassword.setHorizontalAlignment(SwingConstants.TRAILING);
GridBagConstraints gbc_lblPassword = new GridBagConstraints();
gbc_lblPassword.fill = GridBagConstraints.HORIZONTAL;
gbc_lblPassword.insets = new Insets(0, 0, 5, 5);
gbc_lblPassword.gridx = 2;
gbc_lblPassword.gridy = 0;
frmTivoNowPlaying.getContentPane().add(lblPassword, gbc_lblPassword);
m_passTextField = new JTextField();
m_passTextField.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
try{
getNowPlaying();
}catch(Exception e){}
}
});
m_passTextField.setText( m_pass );
GridBagConstraints gbc_passTextField = new GridBagConstraints();
gbc_passTextField.fill = GridBagConstraints.BOTH;
gbc_passTextField.insets = new Insets(0, 0, 5, 5);
gbc_passTextField.gridx = 3;
gbc_passTextField.gridy = 0;
frmTivoNowPlaying.getContentPane().add(m_passTextField, gbc_passTextField);
m_passTextField.setColumns(10);
JScrollPane scrollPane = new JScrollPane();
GridBagConstraints gbc_scrollPane = new GridBagConstraints();
gbc_scrollPane.insets = new Insets(0, 0, 0, 5);
gbc_scrollPane.fill = GridBagConstraints.BOTH;
gbc_scrollPane.gridwidth = 6;
gbc_scrollPane.gridx = 0;
gbc_scrollPane.gridy = 1;
frmTivoNowPlaying.getContentPane().add(scrollPane, gbc_scrollPane);
m_showsList = new JList<Object>();
m_showsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
scrollPane.setViewportView(m_showsList);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DeprecatedUTF8;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectorySnapshottable;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat.ReferenceMap;
import org.apache.hadoop.hdfs.util.XMLUtils;
import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
import org.apache.hadoop.hdfs.util.XMLUtils.Stanza;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.ShortWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import com.google.common.base.Preconditions;
/**
* Static utility functions for serializing various pieces of data in the correct
* format for the FSImage file.
*
* Some members are currently public for the benefit of the Offline Image Viewer
* which is located outside of this package. These members should be made
* package-protected when the OIV is refactored.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FSImageSerialization {
// Static-only class
private FSImageSerialization() {}
/**
* In order to reduce allocation, we reuse some static objects. However, the methods
* in this class should be thread-safe since image-saving is multithreaded, so
* we need to keep the static objects in a thread-local.
*/
static private final ThreadLocal<TLData> TL_DATA =
new ThreadLocal<TLData>() {
@Override
protected TLData initialValue() {
return new TLData();
}
};
/**
* Simple container "struct" for threadlocal data.
*/
static private final class TLData {
final DeprecatedUTF8 U_STR = new DeprecatedUTF8();
final ShortWritable U_SHORT = new ShortWritable();
final IntWritable U_INT = new IntWritable();
final LongWritable U_LONG = new LongWritable();
final FsPermission FILE_PERM = new FsPermission((short) 0);
}
private static void writePermissionStatus(INodeAttributes inode,
DataOutput out) throws IOException {
final FsPermission p = TL_DATA.get().FILE_PERM;
p.fromShort(inode.getFsPermissionShort());
PermissionStatus.write(out, inode.getUserName(), inode.getGroupName(), p);
}
private static void writeBlocks(final Block[] blocks,
final DataOutput out) throws IOException {
if (blocks == null) {
out.writeInt(0);
} else {
out.writeInt(blocks.length);
for (Block blk : blocks) {
blk.write(out);
}
}
}
// Helper function that reads in an INodeUnderConstruction
// from the input stream
//
static INodeFile readINodeUnderConstruction(
DataInput in, FSNamesystem fsNamesys, int imgVersion)
throws IOException {
byte[] name = readBytes(in);
long inodeId = NameNodeLayoutVersion.supports(
LayoutVersion.Feature.ADD_INODE_ID, imgVersion) ? in.readLong()
: fsNamesys.allocateNewInodeId();
short blockReplication = in.readShort();
long modificationTime = in.readLong();
long preferredBlockSize = in.readLong();
int numBlocks = in.readInt();
BlockInfo[] blocks = new BlockInfo[numBlocks];
Block blk = new Block();
int i = 0;
for (; i < numBlocks-1; i++) {
blk.readFields(in);
blocks[i] = new BlockInfo(blk, blockReplication);
}
// last block is UNDER_CONSTRUCTION
if(numBlocks > 0) {
blk.readFields(in);
blocks[i] = new BlockInfoUnderConstruction(
blk, blockReplication, BlockUCState.UNDER_CONSTRUCTION, null);
}
PermissionStatus perm = PermissionStatus.read(in);
String clientName = readString(in);
String clientMachine = readString(in);
// We previously stored locations for the last block, now we
// just record that there are none
int numLocs = in.readInt();
assert numLocs == 0 : "Unexpected block locations";
INodeFile file = new INodeFile(inodeId, name, perm, modificationTime,
modificationTime, blocks, blockReplication, preferredBlockSize);
file.toUnderConstruction(clientName, clientMachine);
return file;
}
// Helper function that writes an INodeUnderConstruction
// into the input stream
//
static void writeINodeUnderConstruction(DataOutputStream out, INodeFile cons,
String path) throws IOException {
writeString(path, out);
out.writeLong(cons.getId());
out.writeShort(cons.getFileReplication());
out.writeLong(cons.getModificationTime());
out.writeLong(cons.getPreferredBlockSize());
writeBlocks(cons.getBlocks(), out);
cons.getPermissionStatus().write(out);
FileUnderConstructionFeature uc = cons.getFileUnderConstructionFeature();
writeString(uc.getClientName(), out);
writeString(uc.getClientMachine(), out);
out.writeInt(0); // do not store locations of last block
}
/**
* Serialize a {@link INodeFile} node
* @param node The node to write
* @param out The {@link DataOutputStream} where the fields are written
* @param writeBlock Whether to write block information
*/
public static void writeINodeFile(INodeFile file, DataOutput out,
boolean writeUnderConstruction) throws IOException {
writeLocalName(file, out);
out.writeLong(file.getId());
out.writeShort(file.getFileReplication());
out.writeLong(file.getModificationTime());
out.writeLong(file.getAccessTime());
out.writeLong(file.getPreferredBlockSize());
writeBlocks(file.getBlocks(), out);
SnapshotFSImageFormat.saveFileDiffList(file, out);
if (writeUnderConstruction) {
if (file.isUnderConstruction()) {
out.writeBoolean(true);
final FileUnderConstructionFeature uc = file.getFileUnderConstructionFeature();
writeString(uc.getClientName(), out);
writeString(uc.getClientMachine(), out);
} else {
out.writeBoolean(false);
}
}
writePermissionStatus(file, out);
}
/** Serialize an {@link INodeFileAttributes}. */
public static void writeINodeFileAttributes(INodeFileAttributes file,
DataOutput out) throws IOException {
writeLocalName(file, out);
writePermissionStatus(file, out);
out.writeLong(file.getModificationTime());
out.writeLong(file.getAccessTime());
out.writeShort(file.getFileReplication());
out.writeLong(file.getPreferredBlockSize());
}
private static void writeQuota(Quota.Counts quota, DataOutput out)
throws IOException {
out.writeLong(quota.get(Quota.NAMESPACE));
out.writeLong(quota.get(Quota.DISKSPACE));
}
/**
* Serialize a {@link INodeDirectory}
* @param node The node to write
* @param out The {@link DataOutput} where the fields are written
*/
public static void writeINodeDirectory(INodeDirectory node, DataOutput out)
throws IOException {
writeLocalName(node, out);
out.writeLong(node.getId());
out.writeShort(0); // replication
out.writeLong(node.getModificationTime());
out.writeLong(0); // access time
out.writeLong(0); // preferred block size
out.writeInt(-1); // # of blocks
writeQuota(node.getQuotaCounts(), out);
if (node instanceof INodeDirectorySnapshottable) {
out.writeBoolean(true);
} else {
out.writeBoolean(false);
out.writeBoolean(node.isWithSnapshot());
}
writePermissionStatus(node, out);
}
/**
* Serialize a {@link INodeDirectory}
* @param a The node to write
* @param out The {@link DataOutput} where the fields are written
*/
public static void writeINodeDirectoryAttributes(
INodeDirectoryAttributes a, DataOutput out) throws IOException {
writeLocalName(a, out);
writePermissionStatus(a, out);
out.writeLong(a.getModificationTime());
writeQuota(a.getQuotaCounts(), out);
}
/**
* Serialize a {@link INodeSymlink} node
* @param node The node to write
* @param out The {@link DataOutput} where the fields are written
*/
private static void writeINodeSymlink(INodeSymlink node, DataOutput out)
throws IOException {
writeLocalName(node, out);
out.writeLong(node.getId());
out.writeShort(0); // replication
out.writeLong(0); // modification time
out.writeLong(0); // access time
out.writeLong(0); // preferred block size
out.writeInt(-2); // # of blocks
Text.writeString(out, node.getSymlinkString());
writePermissionStatus(node, out);
}
/** Serialize a {@link INodeReference} node */
private static void writeINodeReference(INodeReference ref, DataOutput out,
boolean writeUnderConstruction, ReferenceMap referenceMap
) throws IOException {
writeLocalName(ref, out);
out.writeLong(ref.getId());
out.writeShort(0); // replication
out.writeLong(0); // modification time
out.writeLong(0); // access time
out.writeLong(0); // preferred block size
out.writeInt(-3); // # of blocks
final boolean isWithName = ref instanceof INodeReference.WithName;
out.writeBoolean(isWithName);
if (!isWithName) {
Preconditions.checkState(ref instanceof INodeReference.DstReference);
// dst snapshot id
out.writeInt(((INodeReference.DstReference) ref).getDstSnapshotId());
} else {
out.writeInt(((INodeReference.WithName) ref).getLastSnapshotId());
}
final INodeReference.WithCount withCount
= (INodeReference.WithCount)ref.getReferredINode();
referenceMap.writeINodeReferenceWithCount(withCount, out,
writeUnderConstruction);
}
/**
* Save one inode's attributes to the image.
*/
public static void saveINode2Image(INode node, DataOutput out,
boolean writeUnderConstruction, ReferenceMap referenceMap)
throws IOException {
if (node.isReference()) {
writeINodeReference(node.asReference(), out, writeUnderConstruction,
referenceMap);
} else if (node.isDirectory()) {
writeINodeDirectory(node.asDirectory(), out);
} else if (node.isSymlink()) {
writeINodeSymlink(node.asSymlink(), out);
} else if (node.isFile()) {
writeINodeFile(node.asFile(), out, writeUnderConstruction);
}
}
// This should be reverted to package private once the ImageLoader
// code is moved into this package. This method should not be called
// by other code.
@SuppressWarnings("deprecation")
public static String readString(DataInput in) throws IOException {
DeprecatedUTF8 ustr = TL_DATA.get().U_STR;
ustr.readFields(in);
return ustr.toStringChecked();
}
static String readString_EmptyAsNull(DataInput in) throws IOException {
final String s = readString(in);
return s.isEmpty()? null: s;
}
@SuppressWarnings("deprecation")
public static void writeString(String str, DataOutput out) throws IOException {
DeprecatedUTF8 ustr = TL_DATA.get().U_STR;
ustr.set(str);
ustr.write(out);
}
/** read the long value */
static long readLong(DataInput in) throws IOException {
LongWritable uLong = TL_DATA.get().U_LONG;
uLong.readFields(in);
return uLong.get();
}
/** write the long value */
static void writeLong(long value, DataOutputStream out) throws IOException {
LongWritable uLong = TL_DATA.get().U_LONG;
uLong.set(value);
uLong.write(out);
}
/** read the int value */
static int readInt(DataInput in) throws IOException {
IntWritable uInt = TL_DATA.get().U_INT;
uInt.readFields(in);
return uInt.get();
}
/** write the int value */
static void writeInt(int value, DataOutputStream out) throws IOException {
IntWritable uInt = TL_DATA.get().U_INT;
uInt.set(value);
uInt.write(out);
}
/** read short value */
static short readShort(DataInput in) throws IOException {
ShortWritable uShort = TL_DATA.get().U_SHORT;
uShort.readFields(in);
return uShort.get();
}
/** write short value */
static void writeShort(short value, DataOutputStream out) throws IOException {
ShortWritable uShort = TL_DATA.get().U_SHORT;
uShort.set(value);
uShort.write(out);
}
// Same comments apply for this method as for readString()
@SuppressWarnings("deprecation")
public static byte[] readBytes(DataInput in) throws IOException {
DeprecatedUTF8 ustr = TL_DATA.get().U_STR;
ustr.readFields(in);
int len = ustr.getLength();
byte[] bytes = new byte[len];
System.arraycopy(ustr.getBytes(), 0, bytes, 0, len);
return bytes;
}
/**
* Reading the path from the image and converting it to byte[][] directly
* this saves us an array copy and conversions to and from String
* @param in input to read from
* @return the array each element of which is a byte[] representation
* of a path component
* @throws IOException
*/
@SuppressWarnings("deprecation")
public static byte[][] readPathComponents(DataInput in)
throws IOException {
DeprecatedUTF8 ustr = TL_DATA.get().U_STR;
ustr.readFields(in);
return DFSUtil.bytes2byteArray(ustr.getBytes(),
ustr.getLength(), (byte) Path.SEPARATOR_CHAR);
}
public static byte[] readLocalName(DataInput in) throws IOException {
byte[] createdNodeName = new byte[in.readShort()];
in.readFully(createdNodeName);
return createdNodeName;
}
private static void writeLocalName(INodeAttributes inode, DataOutput out)
throws IOException {
final byte[] name = inode.getLocalNameBytes();
writeBytes(name, out);
}
public static void writeBytes(byte[] data, DataOutput out)
throws IOException {
out.writeShort(data.length);
out.write(data);
}
/**
* Write an array of blocks as compactly as possible. This uses
* delta-encoding for the generation stamp and size, following
* the principle that genstamp increases relatively slowly,
* and size is equal for all but the last block of a file.
*/
public static void writeCompactBlockArray(
Block[] blocks, DataOutputStream out) throws IOException {
WritableUtils.writeVInt(out, blocks.length);
Block prev = null;
for (Block b : blocks) {
long szDelta = b.getNumBytes() -
(prev != null ? prev.getNumBytes() : 0);
long gsDelta = b.getGenerationStamp() -
(prev != null ? prev.getGenerationStamp() : 0);
out.writeLong(b.getBlockId()); // blockid is random
WritableUtils.writeVLong(out, szDelta);
WritableUtils.writeVLong(out, gsDelta);
prev = b;
}
}
public static Block[] readCompactBlockArray(
DataInput in, int logVersion) throws IOException {
int num = WritableUtils.readVInt(in);
if (num < 0) {
throw new IOException("Invalid block array length: " + num);
}
Block prev = null;
Block[] ret = new Block[num];
for (int i = 0; i < num; i++) {
long id = in.readLong();
long sz = WritableUtils.readVLong(in) +
((prev != null) ? prev.getNumBytes() : 0);
long gs = WritableUtils.readVLong(in) +
((prev != null) ? prev.getGenerationStamp() : 0);
ret[i] = new Block(id, sz, gs);
prev = ret[i];
}
return ret;
}
public static void writeCacheDirectiveInfo(DataOutputStream out,
CacheDirectiveInfo directive) throws IOException {
writeLong(directive.getId(), out);
int flags =
((directive.getPath() != null) ? 0x1 : 0) |
((directive.getReplication() != null) ? 0x2 : 0) |
((directive.getPool() != null) ? 0x4 : 0) |
((directive.getExpiration() != null) ? 0x8 : 0);
out.writeInt(flags);
if (directive.getPath() != null) {
writeString(directive.getPath().toUri().getPath(), out);
}
if (directive.getReplication() != null) {
writeShort(directive.getReplication(), out);
}
if (directive.getPool() != null) {
writeString(directive.getPool(), out);
}
if (directive.getExpiration() != null) {
writeLong(directive.getExpiration().getMillis(), out);
}
}
public static CacheDirectiveInfo readCacheDirectiveInfo(DataInput in)
throws IOException {
CacheDirectiveInfo.Builder builder =
new CacheDirectiveInfo.Builder();
builder.setId(readLong(in));
int flags = in.readInt();
if ((flags & 0x1) != 0) {
builder.setPath(new Path(readString(in)));
}
if ((flags & 0x2) != 0) {
builder.setReplication(readShort(in));
}
if ((flags & 0x4) != 0) {
builder.setPool(readString(in));
}
if ((flags & 0x8) != 0) {
builder.setExpiration(
CacheDirectiveInfo.Expiration.newAbsolute(readLong(in)));
}
if ((flags & ~0xF) != 0) {
throw new IOException("unknown flags set in " +
"ModifyCacheDirectiveInfoOp: " + flags);
}
return builder.build();
}
public static CacheDirectiveInfo readCacheDirectiveInfo(Stanza st)
throws InvalidXmlException {
CacheDirectiveInfo.Builder builder =
new CacheDirectiveInfo.Builder();
builder.setId(Long.parseLong(st.getValue("ID")));
String path = st.getValueOrNull("PATH");
if (path != null) {
builder.setPath(new Path(path));
}
String replicationString = st.getValueOrNull("REPLICATION");
if (replicationString != null) {
builder.setReplication(Short.parseShort(replicationString));
}
String pool = st.getValueOrNull("POOL");
if (pool != null) {
builder.setPool(pool);
}
String expiryTime = st.getValueOrNull("EXPIRATION");
if (expiryTime != null) {
builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(
Long.parseLong(expiryTime)));
}
return builder.build();
}
public static void writeCacheDirectiveInfo(ContentHandler contentHandler,
CacheDirectiveInfo directive) throws SAXException {
XMLUtils.addSaxString(contentHandler, "ID",
Long.toString(directive.getId()));
if (directive.getPath() != null) {
XMLUtils.addSaxString(contentHandler, "PATH",
directive.getPath().toUri().getPath());
}
if (directive.getReplication() != null) {
XMLUtils.addSaxString(contentHandler, "REPLICATION",
Short.toString(directive.getReplication()));
}
if (directive.getPool() != null) {
XMLUtils.addSaxString(contentHandler, "POOL", directive.getPool());
}
if (directive.getExpiration() != null) {
XMLUtils.addSaxString(contentHandler, "EXPIRATION",
"" + directive.getExpiration().getMillis());
}
}
public static void writeCachePoolInfo(DataOutputStream out, CachePoolInfo info)
throws IOException {
writeString(info.getPoolName(), out);
final String ownerName = info.getOwnerName();
final String groupName = info.getGroupName();
final Long limit = info.getLimit();
final FsPermission mode = info.getMode();
final Long maxRelativeExpiry = info.getMaxRelativeExpiryMs();
boolean hasOwner, hasGroup, hasMode, hasLimit, hasMaxRelativeExpiry;
hasOwner = ownerName != null;
hasGroup = groupName != null;
hasMode = mode != null;
hasLimit = limit != null;
hasMaxRelativeExpiry = maxRelativeExpiry != null;
int flags =
(hasOwner ? 0x1 : 0) |
(hasGroup ? 0x2 : 0) |
(hasMode ? 0x4 : 0) |
(hasLimit ? 0x8 : 0) |
(hasMaxRelativeExpiry ? 0x10 : 0);
writeInt(flags, out);
if (hasOwner) {
writeString(ownerName, out);
}
if (hasGroup) {
writeString(groupName, out);
}
if (hasMode) {
mode.write(out);
}
if (hasLimit) {
writeLong(limit, out);
}
if (hasMaxRelativeExpiry) {
writeLong(maxRelativeExpiry, out);
}
}
public static CachePoolInfo readCachePoolInfo(DataInput in)
throws IOException {
String poolName = readString(in);
CachePoolInfo info = new CachePoolInfo(poolName);
int flags = readInt(in);
if ((flags & 0x1) != 0) {
info.setOwnerName(readString(in));
}
if ((flags & 0x2) != 0) {
info.setGroupName(readString(in));
}
if ((flags & 0x4) != 0) {
info.setMode(FsPermission.read(in));
}
if ((flags & 0x8) != 0) {
info.setLimit(readLong(in));
}
if ((flags & 0x10) != 0) {
info.setMaxRelativeExpiryMs(readLong(in));
}
if ((flags & ~0x1F) != 0) {
throw new IOException("Unknown flag in CachePoolInfo: " + flags);
}
return info;
}
public static void writeCachePoolInfo(ContentHandler contentHandler,
CachePoolInfo info) throws SAXException {
XMLUtils.addSaxString(contentHandler, "POOLNAME", info.getPoolName());
final String ownerName = info.getOwnerName();
final String groupName = info.getGroupName();
final Long limit = info.getLimit();
final FsPermission mode = info.getMode();
final Long maxRelativeExpiry = info.getMaxRelativeExpiryMs();
if (ownerName != null) {
XMLUtils.addSaxString(contentHandler, "OWNERNAME", ownerName);
}
if (groupName != null) {
XMLUtils.addSaxString(contentHandler, "GROUPNAME", groupName);
}
if (mode != null) {
FSEditLogOp.fsPermissionToXml(contentHandler, mode);
}
if (limit != null) {
XMLUtils.addSaxString(contentHandler, "LIMIT",
Long.toString(limit));
}
if (maxRelativeExpiry != null) {
XMLUtils.addSaxString(contentHandler, "MAXRELATIVEEXPIRY",
Long.toString(maxRelativeExpiry));
}
}
public static CachePoolInfo readCachePoolInfo(Stanza st)
throws InvalidXmlException {
String poolName = st.getValue("POOLNAME");
CachePoolInfo info = new CachePoolInfo(poolName);
if (st.hasChildren("OWNERNAME")) {
info.setOwnerName(st.getValue("OWNERNAME"));
}
if (st.hasChildren("GROUPNAME")) {
info.setGroupName(st.getValue("GROUPNAME"));
}
if (st.hasChildren("MODE")) {
info.setMode(FSEditLogOp.fsPermissionFromXml(st));
}
if (st.hasChildren("LIMIT")) {
info.setLimit(Long.parseLong(st.getValue("LIMIT")));
}
if (st.hasChildren("MAXRELATIVEEXPIRY")) {
info.setMaxRelativeExpiryMs(
Long.parseLong(st.getValue("MAXRELATIVEEXPIRY")));
}
return info;
}
}
| |
/*
* <!--
* ~ Copyright 2015-2017 OpenCB
* ~
* ~ Licensed under the Apache License, Version 2.0 (the "License");
* ~ you may not use this file except in compliance with the License.
* ~ You may obtain a copy of the License at
* ~
* ~ http://www.apache.org/licenses/LICENSE-2.0
* ~
* ~ Unless required by applicable law or agreed to in writing, software
* ~ distributed under the License is distributed on an "AS IS" BASIS,
* ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* ~ See the License for the specific language governing permissions and
* ~ limitations under the License.
* -->
*
*/
package org.opencb.biodata.models.variant;
import org.opencb.biodata.models.feature.Genotype;
import org.opencb.biodata.models.variant.exceptions.NonStandardCompliantSampleField;
import org.apache.commons.lang3.StringUtils;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Alejandro Aleman Ramos <aaleman@cipf.es>
* @author Cristina Yenyxe Gonzalez Garcia <cyenyxe@ebi.ac.uk>
* @author Jose Miguel Mut Lopez <jmmut@ebi.ac.uk>
*/
public class VariantAggregatedVcfFactory extends VariantVcfFactory {
private final static Pattern singleNuc = Pattern.compile("^[ACTG]$");
private final static Pattern singleRef = Pattern.compile("^R$");
private final static Pattern refAlt = Pattern.compile("^([ACTG])([ACTG])$");
private final static Pattern refRef = Pattern.compile("^R{2}$");
private final static Pattern altNum = Pattern.compile("^A(\\d+)$");
private final static Pattern altNumaltNum = Pattern.compile("^A(\\d+)A(\\d+)$");
private final static Pattern altNumRef = Pattern.compile("^A(\\d+)R$");
protected final static String COMMA = ",";
protected final static String DOT = "\\."; // a literal dot. extracted to avoid confusion and avoid using the wrong "." with split()
@Override
protected void parseSplitSampleData(StudyEntry variant, VariantSource source, String[] fields,
String reference, String[] alternateAlleles, VariantNormalizer.VariantKeyFields variantKeyFields)
throws NonStandardCompliantSampleField {
// Nothing to do
variant.setSamplesPosition(Collections.emptyMap());
}
@Override
protected void setOtherFields(Variant variant, VariantSource source, List<String> ids, float quality, String filter,
String info, String format, int numAllele, String[] alternateAlleles, String line) {
// Fields not affected by the structure of REF and ALT fields
variant.setIds(ids);
StudyEntry sourceEntry = variant.getSourceEntry(source.getFileId(), source.getStudyId());
if (quality > -1) {
sourceEntry.addAttribute(source.getFileId(), "QUAL", String.valueOf(quality));
}
if (!filter.isEmpty()) {
sourceEntry.addAttribute(source.getFileId(), "FILTER", filter);
}
Map<String, String> infoMap = getInfoMap(info);
addInfo(variant, sourceEntry, numAllele, infoMap);
sourceEntry.setFormatAsString(format);
sourceEntry.addAttribute(source.getFileId(), "src", line);
}
public static Map<String, String> getInfoMap(String info) {
String[] splittedInfo = info.split(";");
Map<String, String> map = new HashMap<>(splittedInfo.length);
for (String attribute : splittedInfo) {
String[] assignment = attribute.split("=");
if (assignment.length == 2) {
map.put(assignment[0], assignment[1]);
} else {
map.put(assignment[0], "");
}
}
return map;
}
protected void addInfo(Variant variant, StudyEntry file, int numAllele, Map<String, String> info) {
for (Map.Entry<String, String> infoElement : info.entrySet()) {
String infoTag = infoElement.getKey();
String infoValue = infoElement.getValue();
switch (infoTag) {
case "ACC":
// Managing accession ID for the allele
String[] ids = infoValue.split(COMMA);
file.addAttribute(infoTag, ids[numAllele]);
break;
// next is commented to store the AC, AF and AN as-is, to be able to compute stats from the DB using the attributes, and "ori" tag
// case "AC":
// String[] counts = infoValue.split(COMMA);
// file.addAttribute(infoTag, counts[numAllele]);
// break;
// case "AF":
// String[] frequencies = infoValue.split(COMMA);
// file.addAttribute(infoTag, frequencies[numAllele]);
// break;
// case "AN":
// file.addAttribute(infoTag, "2");
// break;
case "NS":
// Count the number of samples that are associated with the allele
file.addAttribute(infoTag, String.valueOf(file.getSamplesData().size()));
break;
case "DP":
int dp = 0;
for (String sampleName : file.getSamplesName()) {
String sampleDp = file.getSampleData(sampleName, "DP");
if (StringUtils.isNumeric(sampleDp)) {
dp += Integer.parseInt(sampleDp);
}
}
file.addAttribute(infoTag, String.valueOf(dp));
break;
case "MQ":
case "MQ0":
int mq = 0;
int mq0 = 0;
for (String sampleName : file.getSamplesName()) {
if (StringUtils.isNumeric(file.getSampleData(sampleName, "GQ"))) {
int gq = Integer.parseInt(file.getSampleData(sampleName, "GQ"));
mq += gq * gq;
if (gq == 0) {
mq0++;
}
}
}
file.addAttribute("MQ", String.valueOf(mq));
file.addAttribute("MQ0", String.valueOf(mq0));
break;
default:
file.addAttribute(infoTag, infoValue);
break;
}
}
}
protected void addAttributes(Variant variant, StudyEntry sourceEntry, int numAllele, String[] alternateAlleles,
Map<String, String> infoMap) {
}
public static Genotype parseGenotype(String gt, int numAllele, String reference, String[] alternateAlleles) {
Genotype g;
Matcher m;
List<String> alternates = Arrays.asList(alternateAlleles);
// String alternates = variant.getAlternate();
m = singleNuc.matcher(gt);
if (m.matches()) { // A,C,T,G
g = new Genotype(gt, reference, alternates);
return g;
}
m = singleRef.matcher(gt);
if (m.matches()) { // R
g = new Genotype("0", reference, alternates);
return g;
}
m = refAlt.matcher(gt);
if (m.matches()) { // AA,AC,TT,GT,...
String ref = m.group(1);
String alt = m.group(2);
int allele1 = (alternates.indexOf(ref) + 1);
int allele2 = (alternates.indexOf(alt) + 1);
int val1 = mapToMultiallelicIndex(allele1, numAllele);
int val2 = mapToMultiallelicIndex(allele2, numAllele);
return new Genotype(val1 + "/" + val2, reference, alternates);
// if ((allele1 == 0 || allele1 == (numAllele + 1)) && (allele2 == 0 || allele2 == (numAllele + 1))) {
//
// allele1 = allele1 > 1 ? 1 : allele1;
// allele2 = allele2 > 1 ? 1 : allele2;
// g = new Genotype(allele1 + "/" + allele2, variant.getReference(), variant.getAlternate());
//
// return g;
// } else {
// return new Genotype("./.", variant.getReference(), variant.getAlternate());
// }
}
m = refRef.matcher(gt);
if (m.matches()) { // RR
g = new Genotype(reference + "/" + reference, reference, alternates);
return g;
}
m = altNum.matcher(gt);
if (m.matches()) { // A1,A2,A3
int val = Integer.parseInt(m.group(1));
val = mapToMultiallelicIndex(val, numAllele);
return new Genotype(Integer.toString(val), reference, alternates);
}
m = altNumaltNum.matcher(gt);
if (m.matches()) { // A1A2,A1A3...
int val1 = Integer.parseInt(m.group(1));
int val2 = Integer.parseInt(m.group(2));
val1 = mapToMultiallelicIndex(val1, numAllele);
val2 = mapToMultiallelicIndex(val2, numAllele);
return new Genotype(val1 + "/" + val2, reference, alternates);
}
m = altNumRef.matcher(gt);
if (m.matches()) { // A1R, A2R
int val1 = Integer.parseInt(m.group(1));
val1 = mapToMultiallelicIndex(val1, numAllele);
return new Genotype(val1 + "/" + 0, reference, alternates);
}
return null;
}
}
| |
/*
* Copyright 2015 Goldman Sachs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.impl.map.mutable.primitive;
import java.io.IOException;
import com.gs.collections.api.BooleanIterable;
import com.gs.collections.api.LazyBooleanIterable;
import com.gs.collections.api.bag.primitive.MutableBooleanBag;
import com.gs.collections.api.block.function.primitive.BooleanToObjectFunction;
import com.gs.collections.api.block.function.primitive.ObjectBooleanToObjectFunction;
import com.gs.collections.api.block.predicate.primitive.BooleanPredicate;
import com.gs.collections.api.block.procedure.primitive.BooleanProcedure;
import com.gs.collections.api.collection.MutableCollection;
import com.gs.collections.api.collection.primitive.ImmutableBooleanCollection;
import com.gs.collections.api.collection.primitive.MutableBooleanCollection;
import com.gs.collections.api.iterator.BooleanIterator;
import com.gs.collections.api.list.primitive.MutableBooleanList;
import com.gs.collections.api.map.primitive.MutableBooleanValuesMap;
import com.gs.collections.api.set.primitive.MutableBooleanSet;
import com.gs.collections.impl.collection.mutable.primitive.SynchronizedBooleanCollection;
import com.gs.collections.impl.collection.mutable.primitive.UnmodifiableBooleanCollection;
import com.gs.collections.impl.factory.primitive.BooleanLists;
import com.gs.collections.impl.lazy.primitive.LazyBooleanIterableAdapter;
import com.gs.collections.impl.list.mutable.FastList;
import com.gs.collections.impl.primitive.AbstractBooleanIterable;
import com.gs.collections.impl.set.mutable.primitive.BooleanHashSet;
public abstract class AbstractMutableBooleanValuesMap extends AbstractBooleanIterable implements MutableBooleanValuesMap
{
protected abstract int getOccupiedWithData();
protected abstract SentinelValues getSentinelValues();
protected abstract void setSentinelValuesNull();
protected abstract boolean getEmptyValue();
protected abstract boolean getValueAtIndex(int index);
protected abstract int getTableSize();
protected abstract boolean isNonSentinelAtIndex(int index);
protected void addEmptyKeyValue(boolean value)
{
this.getSentinelValues().containsZeroKey = true;
this.getSentinelValues().zeroValue = value;
}
protected void removeEmptyKey()
{
if (this.getSentinelValues().containsOneKey)
{
this.getSentinelValues().containsZeroKey = false;
this.getSentinelValues().zeroValue = this.getEmptyValue();
}
else
{
this.setSentinelValuesNull();
}
}
protected void addRemovedKeyValue(boolean value)
{
this.getSentinelValues().containsOneKey = true;
this.getSentinelValues().oneValue = value;
}
protected void removeRemovedKey()
{
if (this.getSentinelValues().containsZeroKey)
{
this.getSentinelValues().containsOneKey = false;
this.getSentinelValues().oneValue = this.getEmptyValue();
}
else
{
this.setSentinelValuesNull();
}
}
public boolean contains(boolean value)
{
return this.containsValue(value);
}
@Override
public boolean containsAll(BooleanIterable source)
{
return source.allSatisfy(new BooleanPredicate()
{
public boolean accept(boolean value)
{
return AbstractMutableBooleanValuesMap.this.contains(value);
}
});
}
public int size()
{
return this.getOccupiedWithData() + (this.getSentinelValues() == null ? 0 : this.getSentinelValues().size());
}
@Override
public boolean isEmpty()
{
return this.getOccupiedWithData() == 0 && (this.getSentinelValues() == null || this.getSentinelValues().size() == 0);
}
@Override
public boolean notEmpty()
{
return this.getOccupiedWithData() != 0 || (this.getSentinelValues() != null && this.getSentinelValues().size() != 0);
}
public void forEach(BooleanProcedure procedure)
{
this.forEachValue(procedure);
}
public void forEachValue(BooleanProcedure procedure)
{
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey)
{
procedure.value(this.getSentinelValues().zeroValue);
}
if (this.getSentinelValues().containsOneKey)
{
procedure.value(this.getSentinelValues().oneValue);
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i))
{
procedure.value(this.getValueAtIndex(i));
}
}
}
public <V> V injectInto(V injectedValue, ObjectBooleanToObjectFunction<? super V, ? extends V> function)
{
V result = injectedValue;
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey)
{
result = function.valueOf(result, this.getSentinelValues().zeroValue);
}
if (this.getSentinelValues().containsOneKey)
{
result = function.valueOf(result, this.getSentinelValues().oneValue);
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i))
{
result = function.valueOf(result, this.getValueAtIndex(i));
}
}
return result;
}
public void appendString(Appendable appendable, String start, String separator, String end)
{
try
{
appendable.append(start);
boolean first = true;
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey)
{
appendable.append(String.valueOf(this.getSentinelValues().zeroValue));
first = false;
}
if (this.getSentinelValues().containsOneKey)
{
if (!first)
{
appendable.append(separator);
}
appendable.append(String.valueOf(this.getSentinelValues().oneValue));
first = false;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i))
{
if (!first)
{
appendable.append(separator);
}
appendable.append(String.valueOf(this.getValueAtIndex(i)));
first = false;
}
}
appendable.append(end);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
public MutableBooleanCollection select(BooleanPredicate predicate)
{
MutableBooleanList result = BooleanLists.mutable.empty();
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && predicate.accept(this.getSentinelValues().zeroValue))
{
result.add(this.getSentinelValues().zeroValue);
}
if (this.getSentinelValues().containsOneKey && predicate.accept(this.getSentinelValues().oneValue))
{
result.add(this.getSentinelValues().oneValue);
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && predicate.accept(this.getValueAtIndex(i)))
{
result.add(this.getValueAtIndex(i));
}
}
return result;
}
public MutableBooleanCollection reject(BooleanPredicate predicate)
{
MutableBooleanList result = BooleanLists.mutable.empty();
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && !predicate.accept(this.getSentinelValues().zeroValue))
{
result.add(this.getSentinelValues().zeroValue);
}
if (this.getSentinelValues().containsOneKey && !predicate.accept(this.getSentinelValues().oneValue))
{
result.add(this.getSentinelValues().oneValue);
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && !predicate.accept(this.getValueAtIndex(i)))
{
result.add(this.getValueAtIndex(i));
}
}
return result;
}
public boolean detectIfNone(BooleanPredicate predicate, boolean value)
{
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && predicate.accept(this.getSentinelValues().zeroValue))
{
return this.getSentinelValues().zeroValue;
}
if (this.getSentinelValues().containsOneKey && predicate.accept(this.getSentinelValues().oneValue))
{
return this.getSentinelValues().oneValue;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && predicate.accept(this.getValueAtIndex(i)))
{
return this.getValueAtIndex(i);
}
}
return value;
}
public <V> MutableCollection<V> collect(BooleanToObjectFunction<? extends V> function)
{
FastList<V> target = FastList.newList(this.size());
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey)
{
target.add(function.valueOf(this.getSentinelValues().zeroValue));
}
if (this.getSentinelValues().containsOneKey)
{
target.add(function.valueOf(this.getSentinelValues().oneValue));
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i))
{
target.add(function.valueOf(this.getValueAtIndex(i)));
}
}
return target;
}
public int count(BooleanPredicate predicate)
{
int count = 0;
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && predicate.accept(this.getSentinelValues().zeroValue))
{
count++;
}
if (this.getSentinelValues().containsOneKey && predicate.accept(this.getSentinelValues().oneValue))
{
count++;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && predicate.accept(this.getValueAtIndex(i)))
{
count++;
}
}
return count;
}
public boolean anySatisfy(BooleanPredicate predicate)
{
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && predicate.accept(this.getSentinelValues().zeroValue))
{
return true;
}
if (this.getSentinelValues().containsOneKey && predicate.accept(this.getSentinelValues().oneValue))
{
return true;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && predicate.accept(this.getValueAtIndex(i)))
{
return true;
}
}
return false;
}
public boolean allSatisfy(BooleanPredicate predicate)
{
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey && !predicate.accept(this.getSentinelValues().zeroValue))
{
return false;
}
if (this.getSentinelValues().containsOneKey && !predicate.accept(this.getSentinelValues().oneValue))
{
return false;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i) && !predicate.accept(this.getValueAtIndex(i)))
{
return false;
}
}
return true;
}
public boolean noneSatisfy(BooleanPredicate predicate)
{
return !this.anySatisfy(predicate);
}
public boolean[] toArray()
{
boolean[] array = new boolean[this.size()];
int index = 0;
if (this.getSentinelValues() != null)
{
if (this.getSentinelValues().containsZeroKey)
{
array[index] = this.getSentinelValues().zeroValue;
index++;
}
if (this.getSentinelValues().containsOneKey)
{
array[index] = this.getSentinelValues().oneValue;
index++;
}
}
for (int i = 0; i < this.getTableSize(); i++)
{
if (this.isNonSentinelAtIndex(i))
{
array[index] = this.getValueAtIndex(i);
index++;
}
}
return array;
}
protected static class SentinelValues extends AbstractSentinelValues
{
protected boolean zeroValue;
protected boolean oneValue;
public boolean containsValue(boolean value)
{
boolean valueEqualsZeroValue = this.containsZeroKey && this.zeroValue == value;
boolean valueEqualsOneValue = this.containsOneKey && this.oneValue == value;
return valueEqualsZeroValue || valueEqualsOneValue;
}
}
protected abstract class AbstractBooleanValuesCollection implements MutableBooleanCollection
{
public void clear()
{
AbstractMutableBooleanValuesMap.this.clear();
}
public MutableBooleanCollection select(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.select(predicate);
}
public MutableBooleanCollection reject(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.reject(predicate);
}
public boolean detectIfNone(BooleanPredicate predicate, boolean ifNone)
{
return AbstractMutableBooleanValuesMap.this.detectIfNone(predicate, ifNone);
}
public <V> MutableCollection<V> collect(BooleanToObjectFunction<? extends V> function)
{
return AbstractMutableBooleanValuesMap.this.collect(function);
}
public <T> T injectInto(T injectedValue, ObjectBooleanToObjectFunction<? super T, ? extends T> function)
{
return AbstractMutableBooleanValuesMap.this.injectInto(injectedValue, function);
}
public MutableBooleanCollection with(boolean element)
{
throw new UnsupportedOperationException("Cannot call with() on " + this.getClass().getSimpleName());
}
public MutableBooleanCollection without(boolean element)
{
throw new UnsupportedOperationException("Cannot call without() on " + this.getClass().getSimpleName());
}
public MutableBooleanCollection withAll(BooleanIterable elements)
{
throw new UnsupportedOperationException("Cannot call withAll() on " + this.getClass().getSimpleName());
}
public MutableBooleanCollection withoutAll(BooleanIterable elements)
{
throw new UnsupportedOperationException("Cannot call withoutAll() on " + this.getClass().getSimpleName());
}
public MutableBooleanCollection asUnmodifiable()
{
return UnmodifiableBooleanCollection.of(this);
}
public MutableBooleanCollection asSynchronized()
{
return SynchronizedBooleanCollection.of(this);
}
public ImmutableBooleanCollection toImmutable()
{
return BooleanLists.immutable.withAll(this);
}
public boolean contains(boolean value)
{
return AbstractMutableBooleanValuesMap.this.containsValue(value);
}
public boolean containsAll(boolean... source)
{
return AbstractMutableBooleanValuesMap.this.containsAll(source);
}
public boolean containsAll(BooleanIterable source)
{
return AbstractMutableBooleanValuesMap.this.containsAll(source);
}
public MutableBooleanList toList()
{
return AbstractMutableBooleanValuesMap.this.toList();
}
public MutableBooleanSet toSet()
{
return AbstractMutableBooleanValuesMap.this.toSet();
}
public MutableBooleanBag toBag()
{
return AbstractMutableBooleanValuesMap.this.toBag();
}
public LazyBooleanIterable asLazy()
{
return new LazyBooleanIterableAdapter(this);
}
public boolean isEmpty()
{
return AbstractMutableBooleanValuesMap.this.isEmpty();
}
public boolean notEmpty()
{
return AbstractMutableBooleanValuesMap.this.notEmpty();
}
public String makeString()
{
return AbstractMutableBooleanValuesMap.this.makeString();
}
public String makeString(String separator)
{
return AbstractMutableBooleanValuesMap.this.makeString(separator);
}
public String makeString(String start, String separator, String end)
{
return AbstractMutableBooleanValuesMap.this.makeString(start, separator, end);
}
public void appendString(Appendable appendable)
{
AbstractMutableBooleanValuesMap.this.appendString(appendable);
}
public void appendString(Appendable appendable, String separator)
{
AbstractMutableBooleanValuesMap.this.appendString(appendable, separator);
}
public void forEach(BooleanProcedure procedure)
{
AbstractMutableBooleanValuesMap.this.forEach(procedure);
}
public int count(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.count(predicate);
}
public boolean anySatisfy(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.anySatisfy(predicate);
}
public boolean allSatisfy(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.allSatisfy(predicate);
}
public boolean noneSatisfy(BooleanPredicate predicate)
{
return AbstractMutableBooleanValuesMap.this.noneSatisfy(predicate);
}
public boolean add(boolean element)
{
throw new UnsupportedOperationException("Cannot call add() on " + this.getClass().getSimpleName());
}
public boolean addAll(boolean... source)
{
throw new UnsupportedOperationException("Cannot call addAll() on " + this.getClass().getSimpleName());
}
public boolean addAll(BooleanIterable source)
{
throw new UnsupportedOperationException("Cannot call addAll() on " + this.getClass().getSimpleName());
}
public boolean removeAll(BooleanIterable source)
{
int oldSize = AbstractMutableBooleanValuesMap.this.size();
BooleanIterator iterator = source.booleanIterator();
while (iterator.hasNext())
{
this.remove(iterator.next());
}
return oldSize != AbstractMutableBooleanValuesMap.this.size();
}
public boolean removeAll(boolean... source)
{
int oldSize = AbstractMutableBooleanValuesMap.this.size();
for (boolean item : source)
{
this.remove(item);
}
return oldSize != AbstractMutableBooleanValuesMap.this.size();
}
public boolean retainAll(boolean... source)
{
return this.retainAll(BooleanHashSet.newSetWith(source));
}
public int size()
{
return AbstractMutableBooleanValuesMap.this.size();
}
public boolean[] toArray()
{
return AbstractMutableBooleanValuesMap.this.toArray();
}
}
}
| |
package com.sksamuel.jqm4gwt.list;
import java.util.ArrayList;
import java.util.List;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.EventTarget;
import com.google.gwt.dom.client.FieldSetElement;
import com.google.gwt.dom.client.ImageElement;
import com.google.gwt.dom.client.InputElement;
import com.google.gwt.dom.client.LIElement;
import com.google.gwt.dom.client.Node;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.google.gwt.event.shared.GwtEvent.Type;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.uibinder.client.UiConstructor;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.ComplexPanel;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.Widget;
import com.sksamuel.jqm4gwt.DataIcon;
import com.sksamuel.jqm4gwt.HasRel;
import com.sksamuel.jqm4gwt.HasText;
import com.sksamuel.jqm4gwt.HasTransition;
import com.sksamuel.jqm4gwt.IconPos;
import com.sksamuel.jqm4gwt.JQMCommon;
import com.sksamuel.jqm4gwt.Mobile;
import com.sksamuel.jqm4gwt.Transition;
import com.sksamuel.jqm4gwt.events.HasTapHandlers;
import com.sksamuel.jqm4gwt.events.JQMComponentEvents;
import com.sksamuel.jqm4gwt.events.JQMHandlerRegistration;
import com.sksamuel.jqm4gwt.events.JQMHandlerRegistration.WidgetHandlerCounter;
import com.sksamuel.jqm4gwt.events.TapEvent;
import com.sksamuel.jqm4gwt.events.TapHandler;
import com.sksamuel.jqm4gwt.form.elements.JQMFilterable;
import com.sksamuel.jqm4gwt.html.CustomFlowPanel;
import com.sksamuel.jqm4gwt.panel.JQMControlGroup;
/**
* @author Stephen K Samuel samspade79@gmail.com 5 May 2011 11:21:29
*/
public class JQMListItem extends CustomFlowPanel implements HasText<JQMListItem>, HasClickHandlers,
HasTapHandlers, HasRel<JQMListItem>, HasTransition<JQMListItem> {
public static final String STYLE_UI_LI_HAS_THUMB = "ui-li-has-thumb";
/**
* Element to hold the count bubble
*/
private Element countElem;
/**
* Element to hold the image (thumbnail or icon)
*/
private Element imageElem;
/**
* The element that contains the link, if any
*/
private Element anchor;
private CustomFlowPanel anchorPanel;
/** Split button element */
private Element split;
private String splitTheme;
/**
* The element that holds the aside content
*/
private Element asideElem;
/**
* The element that holds the content of the "main" text
*/
private Element headerElem;
private JQMList list;
public class LiControlGroup extends JQMControlGroup {
protected LiControlGroup(Element element, String styleName) {
super(element, styleName);
}
}
private LiControlGroup controlGroup;
private ComplexPanel controlGroupRoot;
private TextBox checkBoxInput;
private HandlerRegistration clickHandler;
private HandlerRegistration tapHandler;
private Object tag;
/**
* Create empty {@link JQMListItem}
*/
@UiConstructor
public JQMListItem() {
super(DOM.createElement(LIElement.TAG));
setStyleName("jqm4gwt-listitem");
setId();
}
/**
* Create {@link JQMListItem} with the initial content set to the value of the text.
*/
public JQMListItem(String text) {
this();
setText(text);
}
/**
* Create a linked {@link JQMListItem} with the inital content set to the
* value of the param text and the link set to the value of the param url.
*/
public JQMListItem(String text, String url) {
this(text);
if (url != null) setUrl(url);
}
public JQMListItem(String text, IconPos checkBox) {
this(text);
setCheckBox(checkBox);
}
@Override
public HandlerRegistration addClickHandler(ClickHandler handler) {
return addDomHandler(handler, ClickEvent.getType());
}
@Override
public HandlerRegistration addTapHandler(TapHandler handler) {
// this is not a native browser event so we will have to manage it via JS
return JQMHandlerRegistration.registerJQueryHandler(new WidgetHandlerCounter() {
@Override
public int getHandlerCountForWidget(Type<?> type) {
return getHandlerCount(type);
}
}, this, handler, JQMComponentEvents.TAP_EVENT, TapEvent.getType());
}
private boolean isSplitClicked(Element elt) {
if (split == null || elt == null) return false;
Element element = elt;
while (element != null) {
if (element == split) return true;
element = element.getParentElement();
}
return false;
}
public boolean isSplitClicked(EventTarget target) {
if (target == null) return false;
Element element = Element.as(target);
return isSplitClicked(element);
}
/**
* Adds a header element containing the given text.
*
* @param n - the Hn element to use, e.g. if n is 2 then a {@code <h2>} element is created.
* @param html - the value to set as the inner html of the {@code <hn>} element.
*/
public JQMListItem addHeaderText(int n, String html) {
Element e = Document.get().createHElement(n);
e.setInnerHTML(html);
attachChild(e);
return this;
}
/**
* Adds a paragraph element containing the given text.
*
* @param html - the value to set as the inner html of the p element.
*/
public JQMListItem addText(String html) {
Element e = Document.get().createPElement();
e.setInnerHTML(html);
attachChild(e);
return this;
}
/**
* Adds a div element containing the given text.
*
* @param html - the value to set as the inner html of the div element.
*/
public JQMListItem addDiv(String html) {
Element e = Document.get().createDivElement();
e.setInnerHTML(html);
attachChild(e);
return this;
}
private void insertFirstChild(Element elem) {
if (anchor == null) getElement().insertFirst(elem);
else if (controlGroup != null) controlGroup.getElement().insertFirst(elem);
else anchor.insertFirst(elem);
}
private void attachChild(Element elem) {
if (anchor == null) getElement().appendChild(elem);
else if (controlGroup != null) controlGroup.getElement().appendChild(elem);
else anchor.appendChild(elem);
}
private void removeChild(Element elem) {
if (anchor == null) getElement().removeChild(elem);
else if (controlGroup != null) controlGroup.getElement().removeChild(elem);
else anchor.removeChild(elem);
}
private void createAndAttachAsideElem() {
asideElem = Document.get().createPElement();
asideElem.setClassName("ui-li-aside");
attachChild(asideElem);
}
private void createAndAttachCountElement() {
countElem = Document.get().createSpanElement();
countElem.setClassName("ui-li-count");
attachChild(countElem);
}
/**
* Returns the value of the count bubble or null if no count has been set
*/
public Integer getCount() {
if (countElem == null) return null;
return Integer.parseInt(countElem.getInnerText());
}
/**
* Returns the value of the "main" text element
*/
@Override
public String getText() {
return headerElem != null ? headerElem.getInnerText() : null;
}
private void moveAnchorChildrenTo(Element elt, Element... excludes) {
List<Node> move = new ArrayList<Node>();
for (int k = 0; k < anchor.getChildCount(); k++) {
Node node = anchor.getChild(k);
if (excludes.length > 0) {
boolean exclude = false;
for (int n = 0; n < excludes.length; n++) {
if (node == excludes[n]) {
exclude = true;
break;
}
}
if (exclude) continue;
}
move.add(node);
}
for (int i = 0; i < move.size(); i++) {
Node node = move.get(i);
anchor.removeChild(node);
elt.appendChild(node);
}
}
private void moveAnchorChildrenToThis() {
moveAnchorChildrenTo(getElement());
}
private void moveThisChildrenToAnchor() {
Element elt = getElement();
int cnt = elt.getChildCount();
if (cnt == 0) return;
List<Node> move = new ArrayList<Node>(cnt);
for (int i = 0; i < cnt; i++) {
move.add(elt.getChild(i));
}
for (int i = 0; i < move.size(); i++) {
Node node = move.get(i);
elt.removeChild(node);
anchor.appendChild(node);
}
}
/**
* Removes the value of the aside element, if any. It is safe to call this
* method regardless of if an aside has been set or not.
*/
public JQMListItem removeAside() {
if (asideElem != null) {
getElement().removeChild(asideElem);
asideElem = null;
}
return this;
}
/**
* Removes the value of the count element if any. It is safe to call this
* method regardless of if a count has been set or not.
*/
public JQMListItem removeCount() {
if (countElem != null) {
getElement().removeChild(countElem);
countElem = null;
}
return this;
}
/**
* Removes the value of the image element if any. It is safe to call this
* method regardless of if an image has been set or not.
*/
public JQMListItem removeImage() {
if (imageElem != null) {
imageElem.removeFromParent();
imageElem = null;
}
getElement().removeClassName(STYLE_UI_LI_HAS_THUMB);
return this;
}
/**
* Remove the url from this list item changing the item into a read only
* item.
*/
public JQMListItem removeUrl() {
if (anchor == null) return this;
if (anchorPanel != null) {
List<Widget> lst = new ArrayList<Widget>();
for (int i = anchorPanel.getWidgetCount() - 1; i >= 0; i--) {
Widget w = anchorPanel.getWidget(i);
anchorPanel.remove(i);
lst.add(0, w);
}
remove(anchorPanel);
cleanUpLI();
for (Widget w : lst) this.add(w);
} else {
moveAnchorChildrenToThis();
getElement().removeChild(anchor);
}
anchor = null;
anchorPanel = null;
setSplitHref(null);
return this;
}
private void cleanUpLI() {
Element elt = getElement();
for (int i = elt.getChildCount() - 1; i >= 0; i--) {
elt.removeChild(elt.getChild(i));
}
setStyleName("jqm4gwt-listitem");
}
/**
* Sets the content of the aside. The aside is supplemental content that
* is positioned to the right of the main content.
*/
public JQMListItem setAside(String text) {
if (text == null)
throw new RuntimeException("Cannot set aside to null. Call removeAside() if you wanted to remove the aside text");
if (asideElem == null)
createAndAttachAsideElem();
asideElem.setInnerText(text);
return this;
}
/**
* Set the count bubble value. If null this will throw a runtime
* exception. To remove a count bubble call removeCount()
*/
public JQMListItem setCount(Integer count) {
if (count == null)
throw new RuntimeException("Cannot set count to null. Call removeCount() if you wanted to remove the bubble");
if (countElem == null)
createAndAttachCountElement();
countElem.setInnerText(count.toString());
return this;
}
private JQMListItem setId() {
getElement().setId(Document.get().createUniqueId());
return this;
}
public String getId() {
return getElement().getId();
}
/**
* Sets the image to be used to the given source url.
* <br> The same as setImage(), but image is marked as icon class.
*/
public void setIcon(String src) {
setImage(src);
if (imageElem != null) {
imageElem.removeClassName("jqm4gwt-listitem-thumb");
imageElem.addClassName("jqm4gwt-listitem-icon");
}
}
/** The same as {@link JQMListItem#setIcon(String)} */
public JQMListItem withIcon(String src) {
setIcon(src);
return this;
}
/**
* Sets the image to be used to the given source url.
* <br> The same as setImage(), but image is marked as thumbnail class.
*/
public void setThumbnail(String src) {
setImage(src);
if (imageElem != null) {
imageElem.removeClassName("jqm4gwt-listitem-icon");
imageElem.addClassName("jqm4gwt-listitem-thumb");
}
}
/** The same as {@link JQMListItem#setThumbnail(String)} */
public JQMListItem withThumbnail(String src) {
setThumbnail(src);
return this;
}
/**
* Sets the image on this list item to the given source url.
* <br> Neither 'jqm4gwt-listitem-thumb' nor 'jqm4gwt-listitem-icon' class is added.
*/
public void setImage(String src) {
if (src == null) {
throw new RuntimeException("Cannot set image to null. Call removeImage() if you wanted to remove the image");
}
if (imageElem == null) {
imageElem = Document.get().createImageElement();
// must be first child according to jquery.mobile-1.4.x.css
if (anchor != null) anchor.insertFirst(imageElem);
else insertFirstChild(imageElem);
}
imageElem.setAttribute("src", src);
getElement().addClassName(STYLE_UI_LI_HAS_THUMB);
}
/** The same as {@link JQMListItem#setImage(String)} */
public JQMListItem withImage(String src) {
setImage(src);
return this;
}
/**
* Adds secondary image to this list item. It's forcefully added directly to <li> element.
* <br> Additional CSS is needed to control appearance of this image, for example right side
* icon on the static band can be implemented, see <b>jqm4gwt-list-static-item-img-right</b> CSS rule.
*/
public ImageElement addSecondaryImage(String src) {
if (src == null) {
throw new RuntimeException("Cannot set secondary image to null.");
}
ImageElement img = Document.get().createImageElement();
img.setAttribute("src", src);
getElement().appendChild(img);
return img;
}
/** For UiBinder, the same as {@link JQMListItem#addSecondaryImage(String)} */
public void setSecondaryImage(String src) {
addSecondaryImage(src);
}
@Override
protected void onLoad() {
super.onLoad();
addItemActivationHandlers();
}
@Override
protected void onUnload() {
removeItemActivationHandlers();
super.onUnload();
}
public JQMList getList() {
return list;
}
protected JQMListItem setList(JQMList jqmList) {
removeItemActivationHandlers();
this.list = jqmList;
addItemActivationHandlers();
return this;
}
private void removeItemActivationHandlers() {
if (clickHandler != null)
clickHandler.removeHandler();
if (tapHandler != null)
tapHandler.removeHandler();
}
private void addItemActivationHandlers() {
if (list != null && anchor != null) {
// why 2 handlers for this?
// 'tap' bubbles correctly but is not generated on all child widget types for bubbling usage;
// on some devices 'tap' happens sooner then click event and can trigger actions
// 'click' is native - generated by more widgets but it might come too late sometimes
if (clickHandler == null) {
clickHandler = addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
boolean isSplit = (event != null)
? isSplitClicked(event.getNativeEvent().getEventTarget()) : false;
list.setClickItem(JQMListItem.this, isSplit);
}
});
}
if (tapHandler == null) {
tapHandler = addTapHandler(new TapHandler() {
@Override
public void onTap(TapEvent event) {
boolean isSplit = (event != null) ? isSplitClicked(event
.getJQueryEvent().getEventTarget()) : false;
list.setClickItem(JQMListItem.this, isSplit);
}
});
}
}
}
/**
* Sets the content of the "main" text to the given value.
*/
@Override
public void setText(String text) {
if (text == null) {
if (headerElem != null) {
removeChild(headerElem);
headerElem = null;
}
return;
}
if (headerElem == null) {
headerElem = Document.get().createHElement(3);
attachChild(headerElem);
}
headerElem.setInnerText(text);
}
/**
* Sets the url to link to for this item. If this item was a read only
* item it automatically becomes linkable.
*/
public JQMListItem setUrl(String url) {
if (url == null)
throw new RuntimeException("Cannot set URL to null. Call removeUrl() if you wanted to remove the URL");
if (anchor == null) {
if (controlGroupRoot != null) {
//!!! following code is semi-working, it's not reconstructed item correctly
remove(controlGroupRoot);
anchor = Document.get().createAnchorElement();
anchor.setAttribute("href", url);
moveThisChildrenToAnchor();
cleanUpLI();
prepareAnchorForControlGroup();
getElement().appendChild(anchor);
checkAnchorPanel();
} else {
// need to make anchor and move children to it
anchor = Document.get().createAnchorElement();
moveThisChildrenToAnchor();
getElement().appendChild(anchor);
}
addItemActivationHandlers();
}
anchor.setAttribute("href", url);
return this;
}
/** Can be used in UiBinder */
public void setHref(String url) {
setUrl(url);
}
public String getHref() {
return anchor != null ? anchor.getAttribute("href") : null;
}
public void setSplitHref(String url) {
if (url == null) {
if (split != null) {
getElement().removeChild(split);
split = null;
checkSplitPadding();
}
return;
}
if (split != null) return;
if (anchor == null) setUrl("#");
split = Document.get().createAnchorElement();
split.setAttribute("href", url);
getElement().insertAfter(split, anchor);
setSplitTheme(splitTheme);
checkSplitPadding();
}
public void setDataIcon(DataIcon icon) {
JQMCommon.setIcon(getElement(), icon);
}
public DataIcon getDataIcon() {
return JQMCommon.getIcon(getElement());
}
public void setSplitIcon(DataIcon icon) {
JQMCommon.setIcon(getElement(), icon);
}
public DataIcon getSplitIcon() {
return JQMCommon.getIcon(getElement());
}
public void setSplitTheme(String theme) {
splitTheme = theme;
if (split != null) JQMCommon.setTheme(split, theme);
}
public String getSplitTheme() {
if (split == null) return splitTheme;
splitTheme = JQMCommon.getTheme(split);
return splitTheme;
}
@Override
public JQMListItem withText(String text) {
setText(text);
return this;
}
private void checkSplitPadding() {
// if (anchor == null || controlGroup == null) return;
// Not needed anymore in jqm 1.4.x
// anchor.getStyle().setPaddingRight(split == null ? 0 : 42, Unit.PX);
}
private void prepareAnchorForControlGroup() {
if (anchor == null) return;
anchor.getStyle().setPadding(0, Unit.PX);
checkSplitPadding();
}
private void createControlGroup(boolean linkable) {
if (controlGroup != null) return;
if (linkable) {
if (anchor == null) setUrl("#");
prepareAnchorForControlGroup();
} else {
removeUrl();
}
// groupRoot needs to be either "label" for checkbox or "div" for other elements (radio group for example)
CustomFlowPanel groupRoot = new CustomFlowPanel(checkBoxInput == null
? DOM.createDiv() : DOM.createLabel());
setStyleName(groupRoot.getElement(), "jqm4gwt-li-band");
JQMCommon.setCorners(groupRoot, false);
Style st = groupRoot.getElement().getStyle();
st.setBorderWidth(0, Unit.PX);
st.setMarginTop(0, Unit.PX);
st.setMarginBottom(0, Unit.PX);
st.setPaddingTop(0, Unit.PX);
st.setPaddingBottom(0, Unit.PX);
FieldSetElement fldSet = Document.get().createFieldSetElement();
LiControlGroup grp = new LiControlGroup(fldSet, "jqm4gwt-li-controls");
groupRoot.add(grp);
if (anchor != null) {
if (imageElem != null && anchor.equals(imageElem.getParentElement())) {
moveAnchorChildrenTo(fldSet, imageElem/*exclude*/);
} else {
moveAnchorChildrenTo(fldSet);
}
}
controlGroupRoot = groupRoot;
controlGroup = grp;
if (anchor != null) checkAnchorPanel();
else add(controlGroupRoot);
}
private void checkAnchorPanel() {
if (anchorPanel == null) {
anchorPanel = new CustomFlowPanel(anchor);
int anchorIdx = -1;
Node parent = anchor.getParentNode();
if (parent != null && parent == getElement()) {
for (int i = 0; i < parent.getChildCount(); i++) {
if (parent.getChild(i) == anchor) {
anchorIdx = i;
break;
}
}
}
if (anchorIdx >= 0) insert(anchorPanel, anchorIdx);
else add(anchorPanel);
}
if (controlGroupRoot != null && controlGroupRoot.getParent() != anchorPanel) {
anchorPanel.add(controlGroupRoot);
}
}
/**
* true - prepare and allow to add widgets to this list box item.
*
* @param linkable - if true <a> will be forcefully created, so row will be clickable.
*/
public void setControlGroup(boolean value, boolean linkable) {
if (value) {
createControlGroup(linkable);
} else if (controlGroup != null) {
if (anchorPanel != null) remove(anchorPanel);
else if (anchor != null) getElement().removeChild(anchor);
anchor = null;
anchorPanel = null;
setSplitHref(null);
controlGroupRoot = null;
controlGroup = null;
checkBoxInput = null;
}
}
public void setControlGroup(boolean value) {
setControlGroup(value, true/*linkable*/);
}
public boolean isControlGroup() {
return controlGroup != null;
}
public LiControlGroup getControlGroup() {
return controlGroup;
}
/**
* CheckBox will be created for this list item.
* <p>See <a href="http://stackoverflow.com/a/13931919">Checkbox in ListView</a></p>
*/
public void setCheckBox(IconPos iconPos) {
if (checkBoxInput != null) {
if (iconPos == null) {
controlGroup.remove(checkBoxInput);
checkBoxInput = null;
// refresh control group
setControlGroup(false);
setControlGroup(true);
} else {
JQMCommon.setIconPos(controlGroupRoot, iconPos);
}
return;
}
if (iconPos == null) return;
TextBox cb = new TextBox();
cb.getElement().setAttribute("type", "checkbox");
checkBoxInput = cb;
// controlGroupRoot needs to be either "label" for checkbox or "div" for other elements (radio group for example)
setControlGroup(false);
setControlGroup(true);
JQMCommon.setIconPos(controlGroupRoot, iconPos);
controlGroup.insert(cb, 0);
}
public IconPos getCheckBox() {
if (checkBoxInput == null) return null;
return JQMCommon.getIconPos(controlGroupRoot);
}
public boolean isCheckBox() {
return checkBoxInput != null;
}
public boolean isChecked() {
if (checkBoxInput == null) return false;
InputElement cb = checkBoxInput.getElement().cast();
return cb.isChecked();
}
private static native void setChecked(InputElement elt, boolean value) /*-{
var w = $wnd.$(elt);
if (w.data('mobile-checkboxradio') !== undefined) {
w.prop('checked', value).checkboxradio('refresh');
} else {
w.prop('checked', value);
}
}-*/;
public void setChecked(boolean value) {
if (checkBoxInput == null || isChecked() == value) return;
InputElement cb = checkBoxInput.getElement().cast();
setChecked(cb, value);
}
/**
* Currently possible only in checkBox and control group modes.
* <p>See {@link JQMListItem#setCheckBox(IconPos)} and {@link JQMListItem#setControlGroup(boolean)}
*/
public void addWidget(Widget w) {
if (w == null || controlGroup == null) return;
controlGroup.add(w);
}
@Override
public int getWidgetCount() {
if (controlGroup == null) return 0;
return controlGroup.getWidgetCount();
}
@Override
public Widget getWidget(int index) {
if (controlGroup == null) return null;
return controlGroup.getWidget(index);
}
public boolean isActiveHighlight() {
if (anchor == null) return false;
return JQMCommon.isBtnActive(anchor);
}
public void setActiveHighlight(boolean value) {
if (anchor == null) return;
JQMCommon.setBtnActive(anchor, value);
}
/**
* @return - true if this item was filtered out by {@link JQMFilterable}.
*/
public boolean isFilteredOut() {
return JQMCommon.hasStyle(this, "ui-screen-hidden");
}
@Override
public String getRel() {
return anchor != null ? JQMCommon.getAttribute(anchor, "data-rel") : null;
}
@Override
public void setRel(String rel) {
if (anchor == null) {
if (rel != null && !rel.isEmpty()) {
setUrl("#");
} else {
return;
}
}
if (anchor != null) JQMCommon.setAttribute(anchor, "data-rel", rel);
}
@Override
public JQMListItem withRel(String rel) {
setRel(rel);
return this;
}
public String getSplitRel() {
return split != null ? JQMCommon.getAttribute(split, "data-rel") : null;
}
public void setSplitRel(String rel) {
if (split == null) {
if (rel != null && !rel.isEmpty()) {
setSplitHref("#");
} else {
return;
}
}
if (split != null) JQMCommon.setAttribute(split, "data-rel", rel);
}
/**
* Returns true if this list item is set to load a popup
*/
public boolean isPopup() {
return "popup".equals(getRel());
}
public void setPopup(boolean popup) {
setRel(popup ? "popup" : null);
}
public JQMListItem withPopup(boolean popup) {
setPopup(popup);
return this;
}
public String getPopupPos() {
return anchor != null ? JQMCommon.getPopupPos(anchor) : null;
}
public void setPopupPos(String pos) {
if (anchor == null) {
if (pos != null && !pos.isEmpty()) {
setUrl("#");
} else {
return;
}
}
if (anchor != null) JQMCommon.setPopupPos(anchor, pos);
}
/**
* Returns true if this list item is set to load the linked page as a dialog page
*/
public boolean isDialog() {
return Mobile.DATA_ROLE_DIALOG.equals(getRel());
}
/**
* Sets this list item to call a dialog item. This changes the look and feel
* of the page that is loaded as a consequence of clicking on this item.
*/
public void setDialog(boolean dialog) {
setRel(dialog ? Mobile.DATA_ROLE_DIALOG : null);
}
public JQMListItem withDialog(boolean dialog) {
setDialog(dialog);
return this;
}
@Override
public Transition getTransition() {
return anchor != null ? JQMCommon.getTransition(anchor) : null;
}
/**
* Sets the transition to be used by this list item when loading the URL.
*/
@Override
public void setTransition(Transition transition) {
if (anchor == null) {
if (transition != null) {
setUrl("#");
} else {
return;
}
}
if (anchor != null) JQMCommon.setTransition(anchor, transition);
}
@Override
public JQMListItem withTransition(Transition transition) {
setTransition(transition);
return this;
}
/**
* Returns true if this list item split part is set to load a popup
*/
public boolean isSplitPopup() {
return "popup".equals(getSplitRel());
}
public void setSplitPopup(boolean popup) {
setSplitRel(popup ? "popup" : null);
}
public JQMListItem withSplitPopup(boolean popup) {
setSplitPopup(popup);
return this;
}
public String getSplitPopupPos() {
return split != null ? JQMCommon.getPopupPos(split) : null;
}
public void setSplitPopupPos(String pos) {
if (split == null) {
if (pos != null && !pos.isEmpty()) {
setSplitHref("#");
} else {
return;
}
}
if (split != null) JQMCommon.setPopupPos(split, pos);
}
/**
* Returns true if this list item split part is set to load the linked page as a dialog page
*/
public boolean isSplitDialog() {
return Mobile.DATA_ROLE_DIALOG.equals(getSplitRel());
}
/**
* Sets this list item split part to call a dialog item. This changes the look and feel
* of the page that is loaded as a consequence of clicking on this item.
*/
public void setSplitDialog(boolean dialog) {
setSplitRel(dialog ? Mobile.DATA_ROLE_DIALOG : null);
}
public JQMListItem withSplitDialog(boolean dialog) {
setSplitDialog(dialog);
return this;
}
public Transition getSplitTransition() {
return split != null ? JQMCommon.getTransition(split) : null;
}
/**
* Sets the transition to be used by this list item split part when loading the URL.
*/
public void setSplitTransition(Transition transition) {
if (split == null) {
if (transition != null) {
setSplitHref("#");
} else {
return;
}
}
if (split != null) JQMCommon.setTransition(split, transition);
}
public JQMListItem withSplitTransition(Transition transition) {
setSplitTransition(transition);
return this;
}
public Object getTag() {
return tag;
}
/**
* Additional information can be attached to list item (for example linked JQMListDivider).
*/
public void setTag(Object tag) {
this.tag = tag;
}
public String getTagStr() {
return tag != null ? tag.toString() : null;
}
public void setTagStr(String value) {
setTag(value);
}
}
| |
package com.futurologeek.smartcrossing;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.text.InputFilter;
import android.text.Spanned;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TableRow;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
public class SignUpActivity extends AppCompatActivity {
public static SignUpActivity instance;
private EditText emailView;
private EditText emailConfirmationView;
private EditText passwordView;
private EditText passwordConfirmationView;
private EditText usernameView;
private TableRow registerButton;
private TextValidator.ValidText email;
private TextValidator.ValidText emailConfirmation;
private TextValidator.ValidText password;
private TextValidator.ValidText passwordConfirmation;
private TextValidator.ValidText username;
private boolean usernameEdited = false;
private TableRow loadingTableRow;
private RelativeLayout mainLinearLayout;
TextView goToSignInButton;
DBHandler db;
JSONObject ob;
private class SignUpActionListener implements TextView.OnEditorActionListener {
@Override
public boolean onEditorAction(TextView view, int i, KeyEvent keyEvent) {
Boolean handled = false;
if (i == EditorInfo.IME_ACTION_NEXT) {
if (!email.valid) {
handled = true;
if (email.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
emailView.requestFocus();
} else if (!emailConfirmation.valid) {
handled = true;
if (emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
emailConfirmationView.requestFocus();
} else if (!emailConfirmation.text.equals(email.text)) {
handled = true;
if (email.text.length() > 0 && emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_MATCH), Toast.LENGTH_LONG).show();
}
if (email.text.length() == 0) {
emailView.requestFocus();
} else {
emailConfirmationView.requestFocus();
}
} else if (!password.valid) {
handled = true;
if (password.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
passwordView.requestFocus();
} else if (!passwordConfirmation.valid) {
handled = true;
if (passwordConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
passwordConfirmationView.requestFocus();
} else if (!passwordConfirmation.text.equals(password.text)) {
handled = true;
if (password.text.length() > 0 && passwordConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_MATCH), Toast.LENGTH_LONG).show();
}
if (password.text.length() == 0) {
passwordView.requestFocus();
} else {
passwordConfirmationView.requestFocus();
}
} else if (!username.valid) {
handled = true;
if (username.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_USERNAME_INVALID), Toast.LENGTH_LONG).show();
}
usernameView.requestFocus();
} else if (!usernameEdited) {
handled = true;
usernameEdited = true;
usernameView.requestFocus();
} else if (emailConfirmation.text.equals(email.text) && passwordConfirmation.text.equals(password.text)) {
handled = true;
Toast.makeText(SignUpActivity.this, "succes", Toast.LENGTH_SHORT).show();
}
}
return handled;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_sign_up);
findViews();
setListeners();
instance = this;
email = new TextValidator.ValidText();
emailConfirmation = new TextValidator.ValidText();
password = new TextValidator.ValidText();
passwordConfirmation = new TextValidator.ValidText();
username = new TextValidator.ValidText();
setupActivity();
}
public void setupActivity() {
emailView = (EditText) findViewById(R.id.sign_up_email_input);
emailConfirmationView = (EditText) findViewById(R.id.sign_up_email_confirmation_input);
passwordView = (EditText) findViewById(R.id.sign_up_password_input);
passwordConfirmationView = (EditText) findViewById(R.id.sign_up_password_confirmation_input);
usernameView = (EditText) findViewById(R.id.sign_up_username_input);
registerButton = (TableRow) findViewById(R.id.sign_up_button);
emailView.setText(email.text);
emailConfirmationView.setText(emailConfirmation.text);
passwordView.setText(password.text);
passwordConfirmationView.setText(passwordConfirmation.text);
usernameView.setText(username.text);
loadingTableRow = (TableRow) findViewById(R.id.loading_table_row);
mainLinearLayout = (RelativeLayout) findViewById(R.id.mainlinearlayout);
mainLinearLayout.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent ev) {
return false;
}
});
InputFilter filter = new InputFilter() {
public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) {
String filtered = "";
for (int i = start; i < end; i++) {
char character = source.charAt(i);
if (!Character.isWhitespace(character)) {
filtered += character;
}
}
return filtered;
}
};
passwordConfirmationView.setFilters(new InputFilter[]{filter});
passwordView.setFilters(new InputFilter[]{filter});
emailView.setFilters(new InputFilter[]{filter});
emailConfirmationView.setFilters(new InputFilter[]{filter});
//Email
emailView.addTextChangedListener(new TextValidator(emailView, email, Constants.EMAIL_VALIDATOR_PATTERN, Constants.EMAIL_VALIDATOR_MODE, Constants.EMAIL_VALIDATOR_MIN_LEN, Constants.EMAIL_VALIDATOR_MAX_LEN));
emailView.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean b) {
if (!view.isFocused()) {
if (!email.valid && email.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
}
}
});
emailConfirmationView.addTextChangedListener(new TextValidator(emailConfirmationView, emailConfirmation, Constants.EMAIL_VALIDATOR_PATTERN, Constants.EMAIL_VALIDATOR_MODE, Constants.EMAIL_VALIDATOR_MIN_LEN, Constants.EMAIL_VALIDATOR_MAX_LEN));
emailConfirmationView.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean b) {
if (!view.isFocused()) {
if (!emailConfirmation.valid) {
if (emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
} else if (!emailConfirmation.text.equals(email.text)) {
if (emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_MATCH), Toast.LENGTH_LONG).show();
}
}
}
}
});
//Password
passwordView.addTextChangedListener(new TextValidator(passwordView, password, Constants.PASSWORD_VALIDATOR_PATTERN, Constants.PASSWORD_VALIDATOR_MODE, Constants.PASSWORD_VALIDATOR_MIN_LEN, Constants.PASSWORD_VALIDATOR_MAX_LEN));
passwordView.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean b) {
if (!view.isFocused()) {
if (!password.valid && password.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
}
}
});
passwordConfirmationView.addTextChangedListener(new TextValidator(passwordConfirmationView, passwordConfirmation, Constants.PASSWORD_VALIDATOR_PATTERN, Constants.PASSWORD_VALIDATOR_MODE, Constants.PASSWORD_VALIDATOR_MIN_LEN, Constants.PASSWORD_VALIDATOR_MAX_LEN));
passwordConfirmationView.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean b) {
if (!view.isFocused()) {
if (!passwordConfirmation.valid) {
if (passwordConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
} else if (!passwordConfirmation.text.equals(password.text)) {
if (passwordConfirmation.text.length() > 0) {
}
}
}
}
});
//Username
usernameView.addTextChangedListener(new TextValidator(usernameView, username, Constants.USERNAME_VALIDATOR_PATTERN, Constants.USERNAME_VALIDATOR_MODE, Constants.USERNAME_VALIDATOR_MIN_LEN, Constants.USERNAME_VALIDATOR_MAX_LEN));
usernameView.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View view, boolean b) {
if (!view.isFocused()) {
if (!username.valid && username.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_USERNAME_INVALID), Toast.LENGTH_LONG).show();
}
}
}
});
emailView.setOnEditorActionListener(new SignUpActionListener());
emailConfirmationView.setOnEditorActionListener(new SignUpActionListener());
passwordView.setOnEditorActionListener(new SignUpActionListener());
passwordConfirmationView.setOnEditorActionListener(new SignUpActionListener());
usernameView.setOnEditorActionListener(new SignUpActionListener());
registerButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (NetworkStatus.checkNetworkStatus(SignUpActivity.this)) {
if (!email.valid) {
if (email.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
emailView.requestFocus();
} else if (!emailConfirmation.valid) {
if (emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_INVALID), Toast.LENGTH_LONG).show();
}
emailConfirmationView.requestFocus();
} else if (!emailConfirmation.text.equals(email.text)) {
if (email.text.length() > 0 && emailConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_EMAIL_MATCH), Toast.LENGTH_LONG).show();
}
if (email.text.length() == 0) {
emailView.requestFocus();
} else {
emailConfirmationView.requestFocus();
}
} else if (!password.valid) {
if (password.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
passwordView.requestFocus();
} else if (!passwordConfirmation.valid) {
if (passwordConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_INVALID), Toast.LENGTH_LONG).show();
}
passwordConfirmationView.requestFocus();
} else if (!passwordConfirmation.text.equals(password.text)) {
if (password.text.length() > 0 && passwordConfirmation.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_PASSWORD_MATCH), Toast.LENGTH_LONG).show();
}
if (password.text.length() == 0) {
passwordView.requestFocus();
} else {
passwordConfirmationView.requestFocus();
}
} else if (!username.valid) {
if (username.text.length() > 0) {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.ERROR_USERNAME_INVALID), Toast.LENGTH_LONG).show();
}
usernameView.requestFocus();
} else if (!usernameEdited) {
usernameEdited = true;
signUP();
} else if (emailConfirmation.text.equals(email.text) && passwordConfirmation.text.equals(password.text)) {
signUP();
}
}else {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.no_network), Toast.LENGTH_LONG).show();
}
}
});
}
public void signUP() {
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
POSTHandler han = new POSTHandler();
JSONObject par = new JSONObject();
try {
par.put("user_password", passwordView.getText().toString());
par.put("user_email", emailView.getText().toString());
par.put("user_name",usernameView.getText().toString());
} catch (JSONException e) {
e.printStackTrace();
}
ob = han.handlePOSTmethod("/user", par, true);
SignUpActivity.this.runOnUiThread(new Runnable() {
@Override
public void run() {
if (ob.has("error")) {
if(ob.has("sub_error")) {
int sub_error = 0;
try {
sub_error = ob.getInt("sub_error");
} catch (JSONException e) {
e.printStackTrace();
}
sub_error = sub_error*-1;
try {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.JUST_ERROR)+" "+ GetStringCode.getErrorResource(ob.getInt("error"), SignUpActivity.this) + getResources().getString(R.string.ADDITIONAL_ERROR_INFO)+" "+ GetStringCode.getErrorResource(sub_error, SignUpActivity.this), Toast.LENGTH_SHORT).show();
} catch (JSONException e) {
e.printStackTrace();
}
} else {
try {
Toast.makeText(SignUpActivity.this, getResources().getString(R.string.JUST_ERROR) + " " + GetStringCode.getErrorResource(ob.getInt("error"), SignUpActivity.this), Toast.LENGTH_SHORT).show();
} catch (JSONException e) {
e.printStackTrace();
}
}
//Toast.makeText(SignUpActivity.this, signInPassword.getText().toString() + " " +signInLogin.getText().toString(), Toast.LENGTH_SHORT).show();
} else {
try {
Toast.makeText(SignUpActivity.this, GetStringCode.getSuccessCode(ob.getInt("success"), SignUpActivity.this), Toast.LENGTH_SHORT).show();
} catch (JSONException e) {
e.printStackTrace();
}
db = new DBHandler(SignUpActivity.this);
db.addRecord(emailView.getText().toString());
Intent i = new Intent(SignUpActivity.this, SignInActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK|Intent.FLAG_ACTIVITY_CLEAR_TOP);
db.close();
startActivity(i);
finish();
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
});
thread.start();
}
public void findViews(){
goToSignInButton = (TextView) findViewById(R.id.have_an_acc_textview);
}
public void setListeners(){
goToSignInButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
goToSignIn();
}
});
}
public void goToSignIn(){
Intent i = new Intent(this,SignInActivity.class);
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK|Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(i);
finish();
}
}
| |
package butterknife;
import butterknife.compiler.ButterKnifeProcessor;
import com.google.common.base.Joiner;
import com.google.testing.compile.JavaFileObjects;
import javax.tools.JavaFileObject;
import org.junit.Test;
import static com.google.common.truth.Truth.assertAbout;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
/** This augments {@link OnClickTest} with tests that exercise callbacks with parameters. */
public class OnItemClickTest {
@Test public void onItemClickBinding() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.app.Activity;",
"import butterknife.OnItemClick;",
"public class Test extends Activity {",
" @OnItemClick(1) void doStuff() {}",
"}"));
JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder",
Joiner.on('\n').join(
"package test;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import butterknife.internal.Finder;",
"import butterknife.internal.ViewBinder;",
"import java.lang.Object;",
"import java.lang.Override;",
"public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {",
" @Override public void bind(final Finder finder, final T target, Object source) {",
" View view;",
" view = finder.findRequiredView(source, 1, \"method 'doStuff'\");",
" ((AdapterView<?>) view).setOnItemClickListener(new AdapterView.OnItemClickListener() {",
" @Override public void onItemClick(AdapterView<?> p0, View p1, int p2, long p3) {",
" target.doStuff();",
" }",
" });",
" }",
"}"
));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test public void onItemClickBindingWithParameters() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.app.Activity;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import butterknife.OnItemClick;",
"public class Test extends Activity {",
" @OnItemClick(1) void doStuff(",
" AdapterView<?> parent,",
" View view,",
" int position,",
" long id",
" ) {}",
"}"));
JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder",
Joiner.on('\n').join(
"package test;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import butterknife.internal.Finder;",
"import butterknife.internal.ViewBinder;",
"import java.lang.Object;",
"import java.lang.Override;",
"public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {",
" @Override public void bind(final Finder finder, final T target, Object source) {",
" View view;",
" view = finder.findRequiredView(source, 1, \"method 'doStuff'\");",
" ((AdapterView<?>) view).setOnItemClickListener(new AdapterView.OnItemClickListener() {",
" @Override public void onItemClick(AdapterView<?> p0, View p1, int p2, long p3) {",
" target.doStuff(p0, p1, p2, p3);",
" }",
" });",
" }",
"}"
));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test public void onItemClickBindingWithParameterSubset() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.app.Activity;",
"import android.view.View;",
"import android.widget.ListView;",
"import butterknife.OnItemClick;",
"public class Test extends Activity {",
" @OnItemClick(1) void doStuff(",
" ListView parent,",
" int position",
" ) {}",
"}"));
JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder",
Joiner.on('\n').join(
"package test;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import android.widget.ListView;",
"import butterknife.internal.Finder;",
"import butterknife.internal.ViewBinder;",
"import java.lang.Object;",
"import java.lang.Override;",
"public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {",
" @Override public void bind(final Finder finder, final T target, Object source) {",
" View view;",
" view = finder.findRequiredView(source, 1, \"method 'doStuff'\");",
" ((AdapterView<?>) view).setOnItemClickListener(new AdapterView.OnItemClickListener() {",
" @Override public void onItemClick(AdapterView<?> p0, View p1, int p2, long p3) {",
" target.doStuff(finder.<ListView>castParam(p0, \"onItemClick\", 0, \"doStuff\", 0), p2);",
" }",
" });",
" }",
"}"
));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test public void onItemClickBindingWithParameterSubsetAndGenerics() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.app.Activity;",
"import android.view.View;",
"import android.widget.ListView;",
"import butterknife.OnItemClick;",
"public class Test<T extends ListView> extends Activity {",
" @OnItemClick(1) void doStuff(",
" T parent,",
" int position",
" ) {}",
"}"));
JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder",
Joiner.on('\n').join(
"package test;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import android.widget.ListView;",
"import butterknife.internal.Finder;",
"import butterknife.internal.ViewBinder;",
"import java.lang.Object;",
"import java.lang.Override;",
"public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {",
" @Override public void bind(final Finder finder, final T target, Object source) {",
" View view;",
" view = finder.findRequiredView(source, 1, \"method 'doStuff'\");",
" ((AdapterView<?>) view).setOnItemClickListener(new AdapterView.OnItemClickListener() {",
" @Override public void onItemClick(AdapterView<?> p0, View p1, int p2, long p3) {",
" target.doStuff(finder.<ListView>castParam(p0, \"onItemClick\", 0, \"doStuff\", 0), p2);",
" }",
" });",
" }",
"}"
));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test public void onClickRootViewBinding() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.content.Context;",
"import android.widget.ListView;",
"import butterknife.OnItemClick;",
"public class Test extends ListView {",
" @OnItemClick void doStuff() {}",
" public Test(Context context) {",
" super(context);",
" }",
"}"));
JavaFileObject expectedSource = JavaFileObjects.forSourceString("test/Test$$ViewBinder",
Joiner.on('\n').join(
"package test;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import butterknife.internal.Finder;",
"import butterknife.internal.ViewBinder;",
"import java.lang.Object;",
"import java.lang.Override;",
"public class Test$$ViewBinder<T extends Test> implements ViewBinder<T> {",
" @Override public void bind(final Finder finder, final T target, Object source) {",
" View view;",
" view = target;",
" ((AdapterView<?>) view).setOnItemClickListener(new AdapterView.OnItemClickListener() {",
" @Override public void onItemClick(AdapterView<?> p0, View p1, int p2, long p3) {",
" target.doStuff();",
" }",
" });",
" }",
"}"
));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.compilesWithoutError()
.and()
.generatesSources(expectedSource);
}
@Test public void failsWithInvalidId() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.content.Context;",
"import android.app.Activity;",
"import butterknife.OnItemClick;",
"public class Test extends Activity {",
" @OnItemClick({1, -1}) void doStuff() {}",
"}"));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.failsToCompile()
.withErrorContaining("@OnItemClick annotation contains invalid ID -1. (test.Test.doStuff)")
.in(source).onLine(6);
}
@Test public void failsWithInvalidParameterConfiguration() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n').join(
"package test;",
"import android.app.Activity;",
"import android.view.View;",
"import android.widget.AdapterView;",
"import butterknife.OnItemClick;",
"public class Test extends Activity {",
" @OnItemClick(1) void doStuff(",
" AdapterView<?> parent,",
" View view,",
" View whatIsThis",
" ) {}",
"}"));
assertAbout(javaSource()).that(source)
.processedWith(new ButterKnifeProcessor())
.failsToCompile()
.withErrorContaining(Joiner.on('\n').join(
"Unable to match @OnItemClick method arguments. (test.Test.doStuff)",
" ",
" Parameter #1: android.widget.AdapterView<?>",
" matched listener parameter #1: android.widget.AdapterView<?>",
" ",
" Parameter #2: android.view.View",
" matched listener parameter #2: android.view.View",
" ",
" Parameter #3: android.view.View",
" did not match any listener parameters",
" ",
" Methods may have up to 4 parameter(s):",
" ",
" android.widget.AdapterView<?>",
" android.view.View",
" int",
" long",
" ",
" These may be listed in any order but will be searched for from top to bottom."))
.in(source).onLine(7);
}
}
| |
/*
* (c) Copyright 2018 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.logging;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.palantir.atlasdb.AtlasDbConstants;
import com.palantir.atlasdb.encoding.PtBytes;
import com.palantir.atlasdb.keyvalue.api.BatchColumnRangeSelection;
import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.atlasdb.keyvalue.api.TableReference;
import com.palantir.atlasdb.keyvalue.impl.AbstractKeyValueService;
import com.palantir.atlasdb.protos.generated.TableMetadataPersistence;
import com.palantir.atlasdb.table.description.TableMetadata;
import com.palantir.logsafe.Arg;
import com.palantir.logsafe.SafeArg;
import com.palantir.logsafe.UnsafeArg;
import java.util.List;
import java.util.Map;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
public class LoggingArgsTest {
private static final String ARG_NAME = "argName";
private static final TableReference SAFE_TABLE_REFERENCE = TableReference.createFromFullyQualifiedName("foo.safe");
private static final TableReference UNSAFE_TABLE_REFERENCE = TableReference.createFromFullyQualifiedName("foo.bar");
private static final List<TableReference> LIST_OF_SAFE_AND_UNSAFE_TABLE_REFERENCES =
Lists.newArrayList(SAFE_TABLE_REFERENCE, UNSAFE_TABLE_REFERENCE);
private static final byte[] SAFE_TABLE_METADATA = AtlasDbConstants.GENERIC_TABLE_METADATA;
private static final byte[] UNSAFE_TABLE_METADATA = TableMetadata.builder()
.nameLogSafety(TableMetadataPersistence.LogSafety.UNSAFE)
.build()
.persistToBytes();
private static final Map<TableReference, byte[]> TABLE_REF_TO_METADATA = ImmutableMap.of(
SAFE_TABLE_REFERENCE, SAFE_TABLE_METADATA,
UNSAFE_TABLE_REFERENCE, UNSAFE_TABLE_METADATA);
private static final String SAFE_ROW_NAME = "saferow";
private static final String UNSAFE_ROW_NAME = "row";
private static final String SAFE_COLUMN_NAME = "safecolumn";
private static final String SAFE_COLUMN_NAME_2 = "safecolumn2";
private static final byte[] SAFE_ROW_NAME_BYTES = PtBytes.toBytes(SAFE_ROW_NAME);
private static final byte[] UNSAFE_ROW_NAME_BYTES = PtBytes.toBytes(UNSAFE_ROW_NAME);
private static final byte[] SAFE_COLUMN_NAME_BYTES = PtBytes.toBytes(SAFE_COLUMN_NAME);
private static final byte[] SAFE_COLUMN_NAME_BYTES_2 = PtBytes.toBytes(SAFE_COLUMN_NAME_2);
private static final RangeRequest SAFE_RANGE_REQUEST = RangeRequest.builder()
.retainColumns(ImmutableList.of(SAFE_ROW_NAME_BYTES))
.build();
private static final RangeRequest UNSAFE_RANGE_REQUEST = RangeRequest.builder()
.retainColumns(ImmutableList.of(UNSAFE_ROW_NAME_BYTES))
.build();
private static final RangeRequest MIXED_RANGE_REQUEST = RangeRequest.builder()
.retainColumns(ImmutableList.of(SAFE_ROW_NAME_BYTES, UNSAFE_ROW_NAME_BYTES))
.build();
private static final ColumnRangeSelection SAFE_COLUMN_RANGE =
new ColumnRangeSelection(SAFE_COLUMN_NAME_BYTES, SAFE_COLUMN_NAME_BYTES_2);
private static final BatchColumnRangeSelection SAFE_BATCH_COLUMN_RANGE =
BatchColumnRangeSelection.create(SAFE_COLUMN_RANGE, 1);
public static final boolean ALL_SAFE_FOR_LOGGING = true;
public static final boolean NOT_ALL_SAFE_FOR_LOGGING = false;
private static final KeyValueServiceLogArbitrator arbitrator = Mockito.mock(KeyValueServiceLogArbitrator.class);
@BeforeClass
public static void setUpMocks() {
when(arbitrator.isTableReferenceSafe(any())).thenAnswer(invocation -> {
TableReference tableReference = (TableReference) invocation.getArguments()[0];
return tableReference.getQualifiedName().contains("safe");
});
// Technically this may be inconsistent with the above, but this will do for our testing purposes
when(arbitrator.isInternalTableReferenceSafe(any())).thenAnswer(invocation -> {
String internalTableReference = (String) invocation.getArguments()[0];
return internalTableReference.contains("safe");
});
when(arbitrator.isRowComponentNameSafe(any(), any(String.class))).thenAnswer(invocation -> {
String rowName = (String) invocation.getArguments()[1];
return rowName.contains("safe");
});
when(arbitrator.isColumnNameSafe(any(), any(String.class))).thenAnswer(invocation -> {
String columnName = (String) invocation.getArguments()[1];
return columnName.contains("safe");
});
LoggingArgs.setLogArbitrator(arbitrator);
}
@AfterClass
public static void tearDownClass() {
LoggingArgs.setLogArbitrator(KeyValueServiceLogArbitrator.ALL_UNSAFE);
}
@Test
public void returnsSafeInternalTableNameCorrectly() {
Arg<String> internalTableNameArg = LoggingArgs.internalTableName(SAFE_TABLE_REFERENCE);
assertThat(internalTableNameArg.getName()).isEqualTo("tableRef");
assertThat(internalTableNameArg.getValue())
.isEqualTo(AbstractKeyValueService.internalTableName(SAFE_TABLE_REFERENCE));
assertThat(internalTableNameArg).isInstanceOf(SafeArg.class);
}
@Test
public void returnsUnsafeInternalTableNameCorrectly() {
Arg<String> internalTableNameArg = LoggingArgs.internalTableName(UNSAFE_TABLE_REFERENCE);
assertThat(internalTableNameArg.getName()).isEqualTo("unsafeTableRef");
assertThat(internalTableNameArg.getValue())
.isEqualTo(AbstractKeyValueService.internalTableName(UNSAFE_TABLE_REFERENCE));
assertThat(internalTableNameArg).isInstanceOf(UnsafeArg.class);
}
@Test
public void propagatesNameAndTableReferenceIfSafe() {
Arg<String> tableReferenceArg = LoggingArgs.tableRef(ARG_NAME, SAFE_TABLE_REFERENCE);
assertThat(tableReferenceArg.getName()).isEqualTo(ARG_NAME);
assertThat(tableReferenceArg.getValue()).isEqualTo(SAFE_TABLE_REFERENCE.toString());
}
@Test
public void canReturnBothSafeAndUnsafeTableReferences() {
assertThat(LoggingArgs.tableRef(ARG_NAME, SAFE_TABLE_REFERENCE)).isInstanceOf(SafeArg.class);
assertThat(LoggingArgs.tableRef(ARG_NAME, UNSAFE_TABLE_REFERENCE)).isInstanceOf(UnsafeArg.class);
}
@Test
@SuppressWarnings("CheckReturnValue") // We test that returnedArgs will contain both a safe and unsafe references.
public void canReturnListOfSafeTableReferences() {
LoggingArgs.SafeAndUnsafeTableReferences returnedArgs =
LoggingArgs.tableRefs(LIST_OF_SAFE_AND_UNSAFE_TABLE_REFERENCES);
assertThat(returnedArgs.safeTableRefs().getValue()).contains(SAFE_TABLE_REFERENCE);
assertThat(returnedArgs.unsafeTableRefs().getValue()).contains(UNSAFE_TABLE_REFERENCE);
}
@Test
public void returnsSafeRangeWhenAllSafe() {
assertThat(LoggingArgs.range(SAFE_TABLE_REFERENCE, SAFE_RANGE_REQUEST)).isInstanceOf(SafeArg.class);
}
@Test
public void returnsUnsafeRangeWhenAllColumnsUnsafe() {
assertThat(LoggingArgs.range(SAFE_TABLE_REFERENCE, UNSAFE_RANGE_REQUEST))
.isInstanceOf(UnsafeArg.class);
}
@Test
public void returnsUnsafeRangeEvenWhenContainsSafeColumns() {
assertThat(LoggingArgs.range(SAFE_TABLE_REFERENCE, MIXED_RANGE_REQUEST)).isInstanceOf(UnsafeArg.class);
}
@Test
public void returnsUnsafeColumnRangeEvenWhenContainsSafeColumns() {
assertThat(LoggingArgs.columnRangeSelection(SAFE_COLUMN_RANGE)).isInstanceOf(UnsafeArg.class);
}
@Test
public void returnsUnsafeBatchColumnRangeEvenWhenContainsSafeColumns() {
assertThat(LoggingArgs.batchColumnRangeSelection(SAFE_BATCH_COLUMN_RANGE))
.isInstanceOf(UnsafeArg.class);
}
@Test
public void returnsSafeTableWhenTableIsSafe() {
assertThat(LoggingArgs.safeTableOrPlaceholder(SAFE_TABLE_REFERENCE)).isEqualTo(SAFE_TABLE_REFERENCE);
}
@Test
public void returnsPlaceholderWhenTableIsUnsafe() {
assertThat(LoggingArgs.safeTableOrPlaceholder(UNSAFE_TABLE_REFERENCE))
.isEqualTo(LoggingArgs.PLACEHOLDER_TABLE_REFERENCE);
}
@Test
public void returnsTablesAndPlaceholderWhenTablesAreSafeAndUnsafe() {
List<TableReference> tables = ImmutableList.of(SAFE_TABLE_REFERENCE, UNSAFE_TABLE_REFERENCE);
List<TableReference> returnedList = Lists.newArrayList(LoggingArgs.safeTablesOrPlaceholder(tables));
List<TableReference> expectedList =
Lists.newArrayList(SAFE_TABLE_REFERENCE, LoggingArgs.PLACEHOLDER_TABLE_REFERENCE);
assertThat(returnedList).containsOnly(expectedList.toArray(new TableReference[expectedList.size()]));
}
@Test
public void hydrateDoesNotThrowOnInvalidMetadata() {
LoggingArgs.hydrate(ImmutableMap.of(SAFE_TABLE_REFERENCE, AtlasDbConstants.EMPTY_TABLE_METADATA));
LoggingArgs.setLogArbitrator(arbitrator);
}
@Test
public void allSafeForLoggingIsOnlyTrueWhenAllKeyValueServicesAreTrue() {
assertThat(LoggingArgs.isSafe(SAFE_TABLE_REFERENCE)).isTrue();
assertThat(LoggingArgs.isSafe(UNSAFE_TABLE_REFERENCE)).isFalse();
// If the initial allSafeForLogging is true, use ALL_SAFE log arbitrator
LoggingArgs.combineAndSetNewAllSafeForLoggingFlag(ALL_SAFE_FOR_LOGGING);
assertThat(LoggingArgs.isSafe(SAFE_TABLE_REFERENCE)).isTrue();
assertThat(LoggingArgs.isSafe(UNSAFE_TABLE_REFERENCE)).isTrue();
// If a new keyValueService is not all safe for logging, take the safe data info during hydrate
LoggingArgs.combineAndSetNewAllSafeForLoggingFlag(NOT_ALL_SAFE_FOR_LOGGING);
LoggingArgs.hydrate(TABLE_REF_TO_METADATA);
assertThat(LoggingArgs.isSafe(SAFE_TABLE_REFERENCE)).isTrue();
assertThat(LoggingArgs.isSafe(UNSAFE_TABLE_REFERENCE)).isFalse();
// Even if a new keyValueService is all safe for logging now, if won't change the safe data info
LoggingArgs.combineAndSetNewAllSafeForLoggingFlag(ALL_SAFE_FOR_LOGGING);
assertThat(LoggingArgs.isSafe(SAFE_TABLE_REFERENCE)).isTrue();
assertThat(LoggingArgs.isSafe(UNSAFE_TABLE_REFERENCE)).isFalse();
LoggingArgs.setLogArbitrator(arbitrator);
}
}
| |
package io.georocket.commands;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Paths;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.FileSet;
import com.google.common.base.Splitter;
import de.undercouch.underline.InputReader;
import de.undercouch.underline.Option.ArgumentType;
import de.undercouch.underline.OptionDesc;
import de.undercouch.underline.OptionParserException;
import de.undercouch.underline.UnknownAttributes;
import io.georocket.client.GeoRocketClient;
import io.georocket.util.DurationFormat;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.file.AsyncFile;
import io.vertx.core.file.OpenOptions;
import io.vertx.core.streams.Pump;
import io.vertx.core.streams.WriteStream;
import io.vertx.rx.java.ObservableFuture;
import io.vertx.rx.java.RxHelper;
import io.vertx.rxjava.core.Vertx;
import io.vertx.rxjava.core.file.FileSystem;
import rx.Observable;
/**
* Import one or more files into GeoRocket
* @author Michel Kraemer
*/
public class ImportCommand extends AbstractGeoRocketCommand {
protected List<String> patterns;
protected List<String> tags;
protected List<String> properties;
protected String layer;
protected String fallbackCRS;
/**
* Set the patterns of the files to import
* @param patterns the file patterns
*/
@UnknownAttributes("FILE PATTERN")
public void setPatterns(List<String> patterns) {
this.patterns = patterns;
}
/**
* Set the tags to attach to the imported file
* @param tags the tags
*/
@OptionDesc(longName = "tags", shortName = "t",
description = "comma-separated list of tags to attach to the file(s)",
argumentName = "TAGS", argumentType = ArgumentType.STRING)
public void setTags(String tags) {
if (tags == null || tags.isEmpty()) {
this.tags = null;
} else {
this.tags = Stream.of(tags.split(","))
.map(t -> t.trim())
.collect(Collectors.toList());
}
}
/**
* Set the properties to attach to the imported file
* @param properties the properties
*/
@OptionDesc(longName = "properties", shortName = "props",
description = "comma-separated list of properties (key:value) to attach to the file(s)",
argumentName = "PROPERTIES", argumentType = ArgumentType.STRING)
public void setProperties(String properties) {
if (properties == null || properties.isEmpty()) {
this.properties = null;
} else {
this.properties = Splitter.on(",").trimResults().splitToList(properties);
}
}
/**
* Set the absolute path to the layer to search
* @param layer the layer
*/
@OptionDesc(longName = "layer", shortName = "l",
description = "absolute path to the destination layer",
argumentName = "PATH", argumentType = ArgumentType.STRING)
public void setLayer(String layer) {
this.layer = layer;
}
/**
* Set the fallback crs if the file does not specify a CRS
* @param fallbackCRS the fallback CRS
*/
@OptionDesc(longName = "fallbackCRS", shortName = "c",
description = "the CRS to use for indexing if the file does not specify one",
argumentName = "CRS", argumentType = ArgumentType.STRING)
public void setFallbackCRS(String fallbackCRS) {
this.fallbackCRS = fallbackCRS;
}
@Override
public String getUsageName() {
return "import";
}
@Override
public String getUsageDescription() {
return "Import one or more files into GeoRocket";
}
@Override
public boolean checkArguments() {
if (patterns == null || patterns.isEmpty()) {
error("no file pattern given. provide at least one file to import.");
return false;
}
return super.checkArguments();
}
/**
* Check if the given string contains a glob character ('*', '{', '?', or '[')
* @param s the string
* @return true if the string contains a glob character, false otherwise
*/
private boolean hasGlobCharacter(String s) {
for (int i = 0; i < s.length(); ++i) {
char c = s.charAt(i);
if (c == '\\') {
++i;
continue;
}
if (c == '*' || c == '{' || c == '?' || c == '[') {
return true;
}
}
return false;
}
@Override
public void doRun(String[] remainingArgs, InputReader in, PrintWriter out,
Handler<Integer> handler) throws OptionParserException, IOException {
long start = System.currentTimeMillis();
// resolve file patterns
Queue<String> queue = new ArrayDeque<>();
for (String p : patterns) {
// convert Windows backslashes to slashes (necessary for Files.newDirectoryStream())
if (SystemUtils.IS_OS_WINDOWS) {
p = FilenameUtils.separatorsToUnix(p);
}
// collect paths and glob patterns
List<String> roots = new ArrayList<>();
List<String> globs = new ArrayList<>();
String[] parts = p.split("/");
boolean rootParsed = false;
for (String part : parts) {
if (!rootParsed) {
if (hasGlobCharacter(part)) {
globs.add(part);
rootParsed = true;
} else {
roots.add(part);
}
} else {
globs.add(part);
}
}
if (globs.isEmpty()) {
// string does not contain a glob pattern at all
queue.add(p);
} else {
// string contains a glob pattern
if (roots.isEmpty()) {
// there are not paths in the string. start from the current
// working directory
roots.add(".");
}
// add all files matching the pattern
String root = String.join("/", roots);
String glob = String.join("/", globs);
Project project = new Project();
FileSet fs = new FileSet();
fs.setDir(new File(root));
fs.setIncludes(glob);
DirectoryScanner ds = fs.getDirectoryScanner(project);
Arrays.stream(ds.getIncludedFiles())
.map(path -> Paths.get(root, path).toString())
.forEach(queue::add);
}
}
if (queue.isEmpty()) {
error("given pattern didn't match any files");
return;
}
Vertx vertx = new Vertx(this.vertx);
GeoRocketClient client = createClient();
int queueSize = queue.size();
doImport(queue, client, vertx, exitCode -> {
client.close();
if (exitCode == 0) {
String m = "file";
if (queueSize > 1) {
m += "s";
}
System.out.println("Successfully imported " + queueSize + " " +
m + " in " + DurationFormat.formatUntilNow(start));
}
handler.handle(exitCode);
});
}
/**
* Import files using a HTTP client and finally call a handler
* @param files the files to import
* @param client the GeoRocket client
* @param vertx the Vert.x instance
* @param handler the handler to call when all files have been imported
*/
private void doImport(Queue<String> files, GeoRocketClient client,
Vertx vertx, Handler<Integer> handler) {
if (files.isEmpty()) {
handler.handle(0);
return;
}
// get the first file to import
String path = files.poll();
// print file name
System.out.print("Importing " + Paths.get(path).getFileName() + " ... ");
// import file
importFile(path, client, vertx)
.subscribe(v -> {
System.out.println("done");
// import next file in the queue
doImport(files, client, vertx, handler);
}, err -> {
System.out.println("error");
error(err.getMessage());
handler.handle(1);
});
}
/**
* Upload a file to GeoRocket
* @param path path to file to import
* @param client the GeoRocket client
* @param vertx the Vert.x instance
* @return an observable that will emit when the file has been uploaded
*/
protected Observable<Void> importFile(String path, GeoRocketClient client, Vertx vertx) {
// open file
FileSystem fs = vertx.fileSystem();
OpenOptions openOptions = new OpenOptions().setCreate(false).setWrite(false);
return fs.rxOpen(path, openOptions)
// get file size
.flatMap(f -> fs.rxProps(path).map(props -> Pair.of(f, props.size())))
// import file
.flatMapObservable(f -> {
ObservableFuture<Void> o = RxHelper.observableFuture();
Handler<AsyncResult<Void>> handler = o.toHandler();
AsyncFile file = f.getLeft().getDelegate();
WriteStream<Buffer> out = client.getStore()
.startImport(layer, tags, properties, Optional.of(f.getRight()),
fallbackCRS, handler);
AtomicBoolean fileClosed = new AtomicBoolean();
Pump pump = Pump.pump(file, out);
file.endHandler(v -> {
file.close();
out.end();
fileClosed.set(true);
});
Handler<Throwable> exceptionHandler = t -> {
if (!fileClosed.get()) {
file.endHandler(null);
file.close();
}
handler.handle(Future.failedFuture(t));
};
file.exceptionHandler(exceptionHandler);
out.exceptionHandler(exceptionHandler);
pump.start();
return o;
});
}
}
| |
/*
* Copyright (c) 2016 Uber Technologies, Inc. (hoodie-dev-group@uber.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.uber.hoodie.io;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import com.uber.hoodie.HoodieWriteClient;
import com.uber.hoodie.WriteStatus;
import com.uber.hoodie.common.HoodieClientTestUtils;
import com.uber.hoodie.common.HoodieTestDataGenerator;
import com.uber.hoodie.common.model.HoodieRecord;
import com.uber.hoodie.common.model.HoodieTestUtils;
import com.uber.hoodie.common.model.HoodieWriteStat;
import com.uber.hoodie.common.table.HoodieTableMetaClient;
import com.uber.hoodie.common.table.HoodieTimeline;
import com.uber.hoodie.common.table.timeline.HoodieActiveTimeline;
import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.config.HoodieCompactionConfig;
import com.uber.hoodie.config.HoodieIndexConfig;
import com.uber.hoodie.config.HoodieStorageConfig;
import com.uber.hoodie.config.HoodieWriteConfig;
import com.uber.hoodie.index.HoodieIndex;
import com.uber.hoodie.table.HoodieTable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
@SuppressWarnings("unchecked")
public class TestHoodieMergeHandle {
protected transient JavaSparkContext jsc = null;
protected transient SQLContext sqlContext;
protected transient FileSystem fs;
protected String basePath = null;
protected transient HoodieTestDataGenerator dataGen = null;
@Before
public void init() throws IOException {
// Initialize a local spark env
jsc = new JavaSparkContext(HoodieClientTestUtils.getSparkConfForTest("TestHoodieMergeHandle"));
//SQLContext stuff
sqlContext = new SQLContext(jsc);
// Create a temp folder as the base path
TemporaryFolder folder = new TemporaryFolder();
folder.create();
basePath = folder.getRoot().getAbsolutePath();
fs = FSUtils.getFs(basePath, jsc.hadoopConfiguration());
HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath);
dataGen = new HoodieTestDataGenerator();
}
@After
public void clean() {
if (basePath != null) {
new File(basePath).delete();
}
if (jsc != null) {
jsc.stop();
}
}
@Test
public void testUpsertsForMultipleRecordsInSameFile() throws Exception {
// Create records in a single partition
String partitionPath = HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[0];
dataGen = new HoodieTestDataGenerator(new String[]{partitionPath});
// Build a write config with bulkinsertparallelism set
HoodieWriteConfig cfg = getConfigBuilder().build();
HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);
FileSystem fs = FSUtils.getFs(basePath, jsc.hadoopConfiguration());
/**
* Write 1 (only inserts)
* This will do a bulk insert of 44 records of which there are 2 records repeated 21 times each.
* id1 (21 records), id2 (21 records), id3, id4
*/
String newCommitTime = "001";
client.startCommitWithTime(newCommitTime);
List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, 4);
HoodieRecord record1 = records.get(0);
HoodieRecord record2 = records.get(1);
for (int i = 0; i < 20; i++) {
HoodieRecord dup = dataGen.generateUpdateRecord(record1.getKey(), newCommitTime);
records.add(dup);
}
for (int i = 0; i < 20; i++) {
HoodieRecord dup = dataGen.generateUpdateRecord(record2.getKey(), newCommitTime);
records.add(dup);
}
JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records, 1);
List<WriteStatus> statuses = client.bulkInsert(writeRecords, newCommitTime).collect();
assertNoWriteErrors(statuses);
// verify that there is a commit
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTimeline timeline = new HoodieActiveTimeline(metaClient).getCommitTimeline();
assertEquals("Expecting a single commit.", 1, timeline.findInstantsAfter("000", Integer.MAX_VALUE).countInstants());
assertEquals("Latest commit should be 001", newCommitTime, timeline.lastInstant().get().getTimestamp());
assertEquals("Must contain 44 records",
records.size(),
HoodieClientTestUtils.readCommit(basePath, sqlContext, timeline, newCommitTime).count());
/**
* Write 2 (insert)
* This will do a bulk insert of 1 record with the same row_key as record1 in the previous insert - id1.
* At this point, we will have 2 files with the row_keys as shown here -
* File 1 - id1 (21 records), id2 (21 records), id3, id4
* File 2 - id1
*/
newCommitTime = "002";
client.startCommitWithTime(newCommitTime);
// Do 1 more bulk insert with the same dup record1
List<HoodieRecord> newRecords = new ArrayList<>();
HoodieRecord sameAsRecord1 = dataGen.generateUpdateRecord(record1.getKey(), newCommitTime);
newRecords.add(sameAsRecord1);
writeRecords = jsc.parallelize(newRecords, 1);
statuses = client.bulkInsert(writeRecords, newCommitTime).collect();
assertNoWriteErrors(statuses);
// verify that there are 2 commits
metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
timeline = new HoodieActiveTimeline(metaClient).getCommitTimeline();
assertEquals("Expecting two commits.", 2, timeline.findInstantsAfter("000", Integer.MAX_VALUE)
.countInstants());
assertEquals("Latest commit should be 002", newCommitTime, timeline.lastInstant().get().getTimestamp());
Dataset<Row> dataSet = getRecords();
assertEquals("Must contain 45 records", 45, dataSet.count());
/**
* Write 3 (insert)
* This will bulk insert 2 new completely new records.
* At this point, we will have 2 files with the row_keys as shown here -
* File 1 - id1 (21 records), id2 (21 records), id3, id4
* File 2 - id1
* File 3 - id5, id6
*/
newCommitTime = "003";
client.startCommitWithTime(newCommitTime);
newRecords = dataGen.generateInserts(newCommitTime, 2);
writeRecords = jsc.parallelize(newRecords, 1);
statuses = client.bulkInsert(writeRecords, newCommitTime).collect();
assertNoWriteErrors(statuses);
// verify that there are now 3 commits
metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
timeline = new HoodieActiveTimeline(metaClient).getCommitTimeline();
assertEquals("Expecting three commits.", 3, timeline.findInstantsAfter("000", Integer.MAX_VALUE)
.countInstants());
assertEquals("Latest commit should be 003", newCommitTime, timeline.lastInstant().get().getTimestamp());
dataSet = getRecords();
assertEquals("Must contain 47 records", 47, dataSet.count());
/**
* Write 4 (updates)
* This will generate 2 upsert records with id1 and id2. The rider and driver names in the update records
* will be rider-004 and driver-004.
* After the upsert is complete, all the records with id1 in File 1 and File 2 must be updated, all the records
* with id2 in File 2 must also be updated.
* Also, none of the other records in File 1, File 2 and File 3 must be updated.
*/
newCommitTime = "004";
client.startCommitWithTime(newCommitTime);
List<HoodieRecord> updateRecords = new ArrayList<>();
// This exists in 001 and 002 and should be updated in both
sameAsRecord1 = dataGen.generateUpdateRecord(record1.getKey(), newCommitTime);
updateRecords.add(sameAsRecord1);
// This exists in 001 and should be updated
HoodieRecord sameAsRecord2 = dataGen.generateUpdateRecord(record2.getKey(), newCommitTime);
updateRecords.add(sameAsRecord2);
JavaRDD<HoodieRecord> updateRecordsRDD = jsc.parallelize(updateRecords, 1);
statuses = client.upsert(updateRecordsRDD, newCommitTime).collect();
// Verify there are no errors
assertNoWriteErrors(statuses);
// verify there are now 4 commits
timeline = new HoodieActiveTimeline(metaClient).getCommitTimeline();
assertEquals("Expecting four commits.", 4, timeline.findInstantsAfter("000", Integer.MAX_VALUE)
.countInstants());
assertEquals("Latest commit should be 004", timeline.lastInstant().get().getTimestamp(), newCommitTime);
// Check the entire dataset has 47 records still
dataSet = getRecords();
assertEquals("Must contain 47 records", 47, dataSet.count());
Row[] rows = (Row[]) dataSet.collect();
int record1Count = 0;
int record2Count = 0;
for (Row row : rows) {
if (row.getAs("_hoodie_record_key").equals(record1.getKey().getRecordKey())) {
record1Count++;
// assert each duplicate record is updated
assertEquals(row.getAs("rider"), "rider-004");
assertEquals(row.getAs("driver"), "driver-004");
} else if (row.getAs("_hoodie_record_key").equals(record2.getKey().getRecordKey())) {
record2Count++;
// assert each duplicate record is updated
assertEquals(row.getAs("rider"), "rider-004");
assertEquals(row.getAs("driver"), "driver-004");
} else {
assertNotEquals(row.getAs("rider"), "rider-004");
assertNotEquals(row.getAs("driver"), "rider-004");
}
}
// Assert that id1 record count which has been updated to rider-004 and driver-004 is 22, which is the total
// number of records with row_key id1
assertEquals(22, record1Count);
// Assert that id2 record count which has been updated to rider-004 and driver-004 is 21, which is the total
// number of records with row_key id2
assertEquals(21, record2Count);
}
@Test
public void testHoodieMergeHandleWriteStatMetrics() throws Exception {
// insert 100 records
HoodieWriteConfig config = getConfigBuilder().build();
HoodieWriteClient writeClient = new HoodieWriteClient(jsc, config);
String newCommitTime = "100";
writeClient.startCommitWithTime(newCommitTime);
List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, 100);
JavaRDD<HoodieRecord> recordsRDD = jsc.parallelize(records, 1);
List<WriteStatus> statuses = writeClient.insert(recordsRDD, newCommitTime).collect();
// All records should be inserts into new parquet
Assert.assertTrue(statuses.stream()
.filter(status -> status.getStat().getPrevCommit() != HoodieWriteStat.NULL_COMMIT).count() > 0);
// Num writes should be equal to the number of records inserted
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumWrites()).reduce((a,b) -> a + b).get(), 100);
// Num update writes should be equal to the number of records updated
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumUpdateWrites()).reduce((a,b) -> a + b).get(), 0);
// Num update writes should be equal to the number of insert records converted to updates as part of small file
// handling
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumInserts()).reduce((a,b) -> a + b).get(), 100);
// Update all the 100 records
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTable table = HoodieTable.getHoodieTable(metaClient, config, jsc);
newCommitTime = "101";
writeClient.startCommitWithTime(newCommitTime);
List<HoodieRecord> updatedRecords = dataGen.generateUpdates(newCommitTime, records);
JavaRDD<HoodieRecord> updatedRecordsRDD = jsc.parallelize(updatedRecords, 1);
statuses = writeClient.upsert(updatedRecordsRDD, newCommitTime).collect();
// All records should be upserts into existing parquet
Assert.assertEquals(statuses.stream()
.filter(status -> status.getStat().getPrevCommit() == HoodieWriteStat.NULL_COMMIT).count(), 0);
// Num writes should be equal to the number of records inserted
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumWrites()).reduce((a,b) -> a + b).get(), 100);
// Num update writes should be equal to the number of records updated
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumUpdateWrites()).reduce((a,b) -> a + b).get(), 100);
// Num update writes should be equal to the number of insert records converted to updates as part of small file
// handling
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumInserts()).reduce((a,b) -> a + b).get(), 0);
newCommitTime = "102";
writeClient.startCommitWithTime(newCommitTime);
List<HoodieRecord> allRecords = dataGen.generateInserts(newCommitTime, 100);
allRecords.addAll(updatedRecords);
JavaRDD<HoodieRecord> allRecordsRDD = jsc.parallelize(allRecords, 1);
statuses = writeClient.upsert(allRecordsRDD, newCommitTime).collect();
// All records should be upserts into existing parquet (with inserts as updates small file handled)
Assert.assertEquals((long) statuses.stream()
.filter(status -> status.getStat().getPrevCommit() == HoodieWriteStat.NULL_COMMIT).count(), 0);
// Num writes should be equal to the total number of records written
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumWrites()).reduce((a,b) -> a + b).get(), 200);
// Num update writes should be equal to the number of records updated (including inserts converted as updates)
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumUpdateWrites()).reduce((a,b) -> a + b).get(), 100);
// Num update writes should be equal to the number of insert records converted to updates as part of small file
// handling
Assert.assertEquals((long) statuses.stream()
.map(status -> status.getStat().getNumInserts()).reduce((a,b) -> a + b).get(), 100);
}
private Dataset<Row> getRecords() {
// Check the entire dataset has 8 records still
String[] fullPartitionPaths = new String[dataGen.getPartitionPaths().length];
for (int i = 0; i < fullPartitionPaths.length; i++) {
fullPartitionPaths[i] = String.format("%s/%s/*", basePath, dataGen.getPartitionPaths()[i]);
}
Dataset<Row> dataSet = HoodieClientTestUtils.read(jsc, basePath, sqlContext, fs,
fullPartitionPaths);
return dataSet;
}
/**
* Assert no failures in writing hoodie files
*
* @param statuses List of Write Status
*/
void assertNoWriteErrors(List<WriteStatus> statuses) {
// Verify there are no errors
for (WriteStatus status : statuses) {
assertFalse("Errors found in write of " + status.getFileId(), status.hasErrors());
}
}
HoodieWriteConfig.Builder getConfigBuilder() {
return HoodieWriteConfig.newBuilder().withPath(basePath).withSchema(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA)
.withParallelism(2, 2)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().compactionSmallFileSize(1024 * 1024).build())
.withStorageConfig(HoodieStorageConfig.newBuilder().limitFileSize(1024 * 1024).build())
.forTable("test-trip-table")
.withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BLOOM).build())
.withBulkInsertParallelism(2);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.saml;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.core.TimeValue;
import org.opensaml.core.xml.XMLObject;
import org.opensaml.saml.saml2.core.Assertion;
import org.opensaml.saml.saml2.core.Attribute;
import org.opensaml.saml.saml2.core.AttributeStatement;
import org.opensaml.saml.saml2.core.Audience;
import org.opensaml.saml.saml2.core.AudienceRestriction;
import org.opensaml.saml.saml2.core.AuthnStatement;
import org.opensaml.saml.saml2.core.Conditions;
import org.opensaml.saml.saml2.core.EncryptedAssertion;
import org.opensaml.saml.saml2.core.EncryptedAttribute;
import org.opensaml.saml.saml2.core.Response;
import org.opensaml.saml.saml2.core.Subject;
import org.opensaml.saml.saml2.core.SubjectConfirmation;
import org.opensaml.saml.saml2.core.SubjectConfirmationData;
import org.opensaml.xmlsec.encryption.support.DecryptionException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.time.Clock;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.security.authc.saml.SamlUtils.samlException;
import static org.opensaml.saml.saml2.core.SubjectConfirmation.METHOD_BEARER;
/**
* Processes the IdP's SAML Response for our AuthnRequest, validates it, and extracts the relevant properties.
*/
class SamlAuthenticator extends SamlResponseHandler {
private static final String RESPONSE_TAG_NAME = "Response";
SamlAuthenticator(Clock clock,
IdpConfiguration idp,
SpConfiguration sp,
TimeValue maxSkew) {
super(clock, idp, sp, maxSkew);
}
/**
* Processes the provided SAML response within the provided token and, if valid, extracts the relevant attributes from it.
*
* @throws org.elasticsearch.ElasticsearchSecurityException If the SAML is invalid for this realm/configuration
*/
SamlAttributes authenticate(SamlToken token) {
final Element root = parseSamlMessage(token.getContent());
if (RESPONSE_TAG_NAME.equals(root.getLocalName()) && SAML_NAMESPACE.equals(root.getNamespaceURI())) {
try {
return authenticateResponse(root, token.getAllowedSamlRequestIds());
} catch (ElasticsearchSecurityException e) {
logger.trace("Rejecting SAML response [{}...] because {}", Strings.cleanTruncate(SamlUtils.toString(root), 512),
e.getMessage());
throw e;
}
} else {
throw samlException("SAML content [{}] should have a root element of Namespace=[{}] Tag=[{}]",
root, SAML_NAMESPACE, RESPONSE_TAG_NAME);
}
}
private SamlAttributes authenticateResponse(Element element, Collection<String> allowedSamlRequestIds) {
final Response response = buildXmlObject(element, Response.class);
if (response == null) {
throw samlException("Cannot convert element {} into Response object", element);
}
if (logger.isTraceEnabled()) {
logger.trace(SamlUtils.describeSamlObject(response));
}
final boolean requireSignedAssertions;
if (response.isSigned()) {
validateSignature(response.getSignature());
requireSignedAssertions = false;
} else {
requireSignedAssertions = true;
}
checkInResponseTo(response, allowedSamlRequestIds);
checkStatus(response.getStatus());
checkIssuer(response.getIssuer(), response);
checkResponseDestination(response);
Tuple<Assertion, List<Attribute>> details = extractDetails(response, allowedSamlRequestIds, requireSignedAssertions);
final Assertion assertion = details.v1();
final SamlNameId nameId = SamlNameId.forSubject(assertion.getSubject());
final String session = getSessionIndex(assertion);
final List<SamlAttributes.SamlAttribute> attributes = details.v2().stream()
.map(SamlAttributes.SamlAttribute::new)
.collect(Collectors.toList());
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
sb.append("The SAML Assertion contained the following attributes: \n");
for (SamlAttributes.SamlAttribute attr : attributes) {
sb.append(attr).append("\n");
}
logger.trace(sb.toString());
}
if (attributes.isEmpty() && nameId == null) {
logger.debug("The Attribute Statements of SAML Response with ID [{}] contained no attributes and the SAML Assertion Subject " +
"did not contain a SAML NameID. Please verify that the Identity Provider configuration with regards to attribute " +
"release is correct. ", response.getID());
throw samlException("Could not process any SAML attributes in {}", response.getElementQName());
}
return new SamlAttributes(nameId, session, attributes);
}
private String getSessionIndex(Assertion assertion) {
return assertion.getAuthnStatements().stream().map(as -> as.getSessionIndex()).filter(Objects::nonNull).findFirst().orElse(null);
}
private void checkResponseDestination(Response response) {
final String asc = getSpConfiguration().getAscUrl();
if (asc.equals(response.getDestination()) == false) {
if (response.isSigned() || Strings.hasText(response.getDestination())) {
throw samlException("SAML response " + response.getID() + " is for destination " + response.getDestination()
+ " but this realm uses " + asc);
}
}
}
private Tuple<Assertion, List<Attribute>> extractDetails(Response response, Collection<String> allowedSamlRequestIds,
boolean requireSignedAssertions) {
final int assertionCount = response.getAssertions().size() + response.getEncryptedAssertions().size();
if (assertionCount > 1) {
throw samlException("Expecting only 1 assertion, but response contains multiple (" + assertionCount + ")");
}
for (Assertion assertion : response.getAssertions()) {
return new Tuple<>(assertion, processAssertion(assertion, requireSignedAssertions, allowedSamlRequestIds));
}
for (EncryptedAssertion encrypted : response.getEncryptedAssertions()) {
Assertion assertion = decrypt(encrypted);
moveToNewDocument(assertion);
assertion.getDOM().setIdAttribute("ID", true);
return new Tuple<>(assertion, processAssertion(assertion, requireSignedAssertions, allowedSamlRequestIds));
}
throw samlException("No assertions found in SAML response");
}
private void moveToNewDocument(XMLObject xmlObject) {
final Element element = xmlObject.getDOM();
final Document doc = element.getOwnerDocument().getImplementation().createDocument(null, null, null);
doc.adoptNode(element);
doc.appendChild(element);
}
private Assertion decrypt(EncryptedAssertion encrypted) {
if (decrypter == null) {
throw samlException("SAML assertion [" + text(encrypted, 32) + "] is encrypted, but no decryption key is available");
}
try {
return decrypter.decrypt(encrypted);
} catch (DecryptionException e) {
logger.debug(() -> new ParameterizedMessage("Failed to decrypt SAML assertion [{}] with [{}]",
text(encrypted, 512), describe(getSpConfiguration().getEncryptionCredentials())), e);
throw samlException("Failed to decrypt SAML assertion " + text(encrypted, 32), e);
}
}
private List<Attribute> processAssertion(Assertion assertion, boolean requireSignature, Collection<String> allowedSamlRequestIds) {
if (logger.isTraceEnabled()) {
logger.trace("(Possibly decrypted) Assertion: {}", SamlUtils.getXmlContent(assertion, true));
logger.trace(SamlUtils.describeSamlObject(assertion));
}
// Do not further process unsigned Assertions
if (assertion.isSigned()) {
validateSignature(assertion.getSignature());
} else if (requireSignature) {
throw samlException("Assertion [{}] is not signed, but a signature is required", assertion.getElementQName());
}
checkConditions(assertion.getConditions());
checkIssuer(assertion.getIssuer(), assertion);
checkSubject(assertion.getSubject(), assertion, allowedSamlRequestIds);
checkAuthnStatement(assertion.getAuthnStatements());
List<Attribute> attributes = new ArrayList<>();
for (AttributeStatement statement : assertion.getAttributeStatements()) {
logger.trace("SAML AttributeStatement has [{}] attributes and [{}] encrypted attributes",
statement.getAttributes().size(), statement.getEncryptedAttributes().size());
attributes.addAll(statement.getAttributes());
for (EncryptedAttribute enc : statement.getEncryptedAttributes()) {
final Attribute attribute = decrypt(enc);
if (attribute != null) {
logger.trace("Successfully decrypted attribute: {}" + SamlUtils.getXmlContent(attribute, true));
attributes.add(attribute);
}
}
}
return attributes;
}
private void checkAuthnStatement(List<AuthnStatement> authnStatements) {
if (authnStatements.size() != 1) {
throw samlException("SAML Assertion subject contains [{}] Authn Statements while exactly one was expected.",
authnStatements.size());
}
final AuthnStatement authnStatement = authnStatements.get(0);
// "past now" that is now - the maximum skew we will tolerate. Essentially "if our clock is 2min fast, what time is it now?"
final Instant now = now();
final Instant pastNow = now.minusMillis(maxSkewInMillis());
if (authnStatement.getSessionNotOnOrAfter() != null &&
pastNow.isBefore(toInstant(authnStatement.getSessionNotOnOrAfter())) == false) {
throw samlException("Rejecting SAML assertion's Authentication Statement because [{}] is on/after [{}]", pastNow,
authnStatement.getSessionNotOnOrAfter());
}
List<String> reqAuthnCtxClassRef = this.getSpConfiguration().getReqAuthnCtxClassRef();
if (reqAuthnCtxClassRef.isEmpty() == false) {
String authnCtxClassRefValue = null;
if (authnStatement.getAuthnContext() != null && authnStatement.getAuthnContext().getAuthnContextClassRef() != null) {
authnCtxClassRefValue = authnStatement.getAuthnContext().getAuthnContextClassRef().getAuthnContextClassRef();
}
if (Strings.isNullOrEmpty(authnCtxClassRefValue) || reqAuthnCtxClassRef.contains(authnCtxClassRefValue) == false) {
throw samlException("Rejecting SAML assertion as the AuthnContextClassRef [{}] is not one of the ({}) that were " +
"requested in the corresponding AuthnRequest", authnCtxClassRefValue, reqAuthnCtxClassRef);
}
}
}
private Attribute decrypt(EncryptedAttribute encrypted) {
if (decrypter == null) {
logger.info("SAML message has encrypted attribute [" + text(encrypted, 32) + "], but no encryption key has been configured");
return null;
}
try {
return decrypter.decrypt(encrypted);
} catch (DecryptionException e) {
logger.info("Failed to decrypt SAML attribute " + text(encrypted, 32), e);
return null;
}
}
private void checkConditions(Conditions conditions) {
if (conditions != null) {
if (logger.isTraceEnabled()) {
logger.trace("SAML Assertion was intended for the following Service providers: {}",
conditions.getAudienceRestrictions().stream().map(r -> text(r, 32))
.collect(Collectors.joining(" | ")));
logger.trace("SAML Assertion is only valid between: " + conditions.getNotBefore() + " and " + conditions.getNotOnOrAfter());
}
checkAudienceRestrictions(conditions.getAudienceRestrictions());
checkLifetimeRestrictions(conditions);
}
}
private void checkSubject(Subject assertionSubject, XMLObject parent, Collection<String> allowedSamlRequestIds) {
if (assertionSubject == null) {
throw samlException("SAML Assertion ({}) has no Subject", text(parent, 16));
}
final List<SubjectConfirmationData> confirmationData = assertionSubject.getSubjectConfirmations().stream()
.filter(data -> data.getMethod().equals(METHOD_BEARER))
.map(SubjectConfirmation::getSubjectConfirmationData).filter(Objects::nonNull).collect(Collectors.toList());
if (confirmationData.size() != 1) {
throw samlException("SAML Assertion subject contains [{}] bearer SubjectConfirmation, while exactly one was expected.",
confirmationData.size());
}
if (logger.isTraceEnabled()) {
logger.trace("SAML Assertion Subject Confirmation intended recipient is: " + confirmationData.get(0).getRecipient());
logger.trace("SAML Assertion Subject Confirmation is only valid before: " + confirmationData.get(0).getNotOnOrAfter());
logger.trace("SAML Assertion Subject Confirmation is in response to: " + confirmationData.get(0).getInResponseTo());
}
checkRecipient(confirmationData.get(0));
checkLifetimeRestrictions(confirmationData.get(0));
checkSubjectInResponseTo(confirmationData.get(0), allowedSamlRequestIds);
}
private void checkSubjectInResponseTo(
SubjectConfirmationData subjectConfirmationData, Collection<String> allowedSamlRequestIds) {
// Allow for IdP initiated SSO where InResponseTo MUST be missing
if (Strings.hasText(subjectConfirmationData.getInResponseTo())
&& allowedSamlRequestIds.contains(subjectConfirmationData.getInResponseTo()) == false) {
throw samlException("SAML Assertion SubjectConfirmationData is in-response-to [{}] but expected one of [{}]",
subjectConfirmationData.getInResponseTo(), allowedSamlRequestIds);
}
}
private void checkRecipient(SubjectConfirmationData subjectConfirmationData) {
final SpConfiguration sp = getSpConfiguration();
if (sp.getAscUrl().equals(subjectConfirmationData.getRecipient()) == false) {
throw samlException("SAML Assertion SubjectConfirmationData Recipient [{}] does not match expected value [{}]",
subjectConfirmationData.getRecipient(), sp.getAscUrl());
}
}
private void checkAudienceRestrictions(List<AudienceRestriction> restrictions) {
if (restrictions.stream().allMatch(this::checkAudienceRestriction) == false) {
throw samlException("Conditions [{}] do not match required audience [{}]",
restrictions.stream().map(r -> text(r, 56, 8)).collect(Collectors.joining(" | ")), getSpConfiguration().getEntityId());
}
}
private boolean checkAudienceRestriction(AudienceRestriction restriction) {
final String spEntityId = this.getSpConfiguration().getEntityId();
if (restriction.getAudiences().stream().map(Audience::getAudienceURI).anyMatch(spEntityId::equals) == false) {
restriction.getAudiences().stream().map(Audience::getAudienceURI).forEach(uri -> {
int diffChar;
for (diffChar = 0; diffChar < uri.length() && diffChar < spEntityId.length(); diffChar++) {
if (uri.charAt(diffChar) != spEntityId.charAt(diffChar)) {
break;
}
}
// If the difference is less than half the length of the string, show it in detail
if (diffChar >= spEntityId.length() / 2) {
logger.info("Audience restriction [{}] does not match required audience [{}] " +
"(difference starts at character [#{}] [{}] vs [{}])",
uri, spEntityId, diffChar, uri.substring(diffChar), spEntityId.substring(diffChar));
} else {
logger.info("Audience restriction [{}] does not match required audience [{}]", uri, spEntityId);
}
});
return false;
}
return true;
}
private void checkLifetimeRestrictions(Conditions conditions) {
// In order to compensate for clock skew we construct 2 alternate realities
// - a "future now" that is now + the maximum skew we will tolerate. Essentially "if our clock is 2min slow, what time is it now?"
// - a "past now" that is now - the maximum skew we will tolerate. Essentially "if our clock is 2min fast, what time is it now?"
final Instant now = now();
final Instant futureNow = now.plusMillis(maxSkewInMillis());
final Instant pastNow = now.minusMillis(maxSkewInMillis());
if (conditions.getNotBefore() != null && futureNow.isBefore(toInstant(conditions.getNotBefore()))) {
throw samlException("Rejecting SAML assertion because [{}] is before [{}]", futureNow, conditions.getNotBefore());
}
if (conditions.getNotOnOrAfter() != null && pastNow.isBefore(toInstant(conditions.getNotOnOrAfter())) == false) {
throw samlException("Rejecting SAML assertion because [{}] is on/after [{}]", pastNow, conditions.getNotOnOrAfter());
}
}
private void checkLifetimeRestrictions(SubjectConfirmationData subjectConfirmationData) {
validateNotOnOrAfter(subjectConfirmationData.getNotOnOrAfter());
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.corecomponents;
import java.awt.Cursor;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import javax.swing.JTabbedPane;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.openide.nodes.Node;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
*
*/
public class DataContentPanel extends javax.swing.JPanel implements DataContent, ChangeListener {
private static Logger logger = Logger.getLogger(DataContentPanel.class.getName());
private final List<UpdateWrapper> viewers = new ArrayList<>();
;
private Node currentNode;
private final boolean isMain;
private boolean listeningToTabbedPane = false;
/**
* Creates new DataContentPanel panel The main data content panel can only
* be created by the data content top component, thus this constructor is
* not public.
*
* Use the createInstance factory method to create an external viewer data
* content panel.
*
*/
DataContentPanel(boolean isMain) {
this.isMain = isMain;
initComponents();
// add all implementors of DataContentViewer and put them in the tabbed pane
Collection<? extends DataContentViewer> dcvs = Lookup.getDefault().lookupAll(DataContentViewer.class);
for (DataContentViewer factory : dcvs) {
DataContentViewer dcv;
if (isMain) {
//use the instance from Lookup for the main viewer
dcv = factory;
} else {
dcv = factory.createInstance();
}
viewers.add(new UpdateWrapper(dcv));
jTabbedPane1.addTab(dcv.getTitle(), null,
dcv.getComponent(), dcv.getToolTip());
}
// disable the tabs
int numTabs = jTabbedPane1.getTabCount();
for (int tab = 0; tab < numTabs; ++tab) {
jTabbedPane1.setEnabledAt(tab, false);
}
}
/**
* Factory method to create an external (not main window) data content panel
* to be used in an external window
*
* @return a new instance of a data content panel
*/
public static DataContentPanel createInstance() {
return new DataContentPanel(false);
}
public JTabbedPane getTabPanels() {
return jTabbedPane1;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jTabbedPane1 = new javax.swing.JTabbedPane();
setMinimumSize(new java.awt.Dimension(5, 5));
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTabbedPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 400, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTabbedPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE)
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTabbedPane jTabbedPane1;
// End of variables declaration//GEN-END:variables
@Override
public void setNode(Node selectedNode) {
// change the cursor to "waiting cursor" for this operation
this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
try {
String defaultName = NbBundle.getMessage(DataContentTopComponent.class, "CTL_DataContentTopComponent");
// set the file path
if (selectedNode == null) {
setName(defaultName);
} else {
Content content = selectedNode.getLookup().lookup(Content.class);
if (content != null) {
//String path = DataConversion.getformattedPath(ContentUtils.getDisplayPath(selectedNode.getLookup().lookup(Content.class)), 0);
String path = defaultName;
try {
path = content.getUniquePath();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception while calling Content.getUniquePath() for {0}", content); //NON-NLS
}
setName(path);
} else {
setName(defaultName);
}
}
currentNode = selectedNode;
setupTabs(selectedNode);
} finally {
this.setCursor(null);
}
}
/**
* Resets the tabs based on the selected Node. If the selected node is null
* or not supported, disable that tab as well.
*
* @param selectedNode the selected content Node
*/
public void setupTabs(Node selectedNode) {
// Deferring becoming a listener to the tabbed pane until this point
// eliminates handling a superfluous stateChanged event during construction.
if (listeningToTabbedPane == false) {
jTabbedPane1.addChangeListener(this);
listeningToTabbedPane = true;
}
int currTabIndex = jTabbedPane1.getSelectedIndex();
int totalTabs = jTabbedPane1.getTabCount();
int maxPreferred = 0;
int preferredViewerIndex = 0;
for (int i = 0; i < totalTabs; ++i) {
UpdateWrapper dcv = viewers.get(i);
dcv.resetComponent();
// disable an unsupported tab (ex: picture viewer)
if ((selectedNode == null) || (dcv.isSupported(selectedNode) == false)) {
jTabbedPane1.setEnabledAt(i, false);
} else {
jTabbedPane1.setEnabledAt(i, true);
// remember the viewer with the highest preference value
int currentPreferred = dcv.isPreferred(selectedNode);
if (currentPreferred > maxPreferred) {
preferredViewerIndex = i;
maxPreferred = currentPreferred;
}
}
}
// let the user decide if we should stay with the current viewer
int tabIndex = UserPreferences.keepPreferredContentViewer() ? currTabIndex : preferredViewerIndex;
UpdateWrapper dcv = viewers.get(tabIndex);
// this is really only needed if no tabs were enabled
if (jTabbedPane1.isEnabledAt(tabIndex) == false) {
dcv.resetComponent();
} else {
dcv.setNode(selectedNode);
}
// set the tab to the one the user wants, then set that viewer's node.
jTabbedPane1.setSelectedIndex(tabIndex);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
}
@Override
public void stateChanged(ChangeEvent evt) {
JTabbedPane pane = (JTabbedPane) evt.getSource();
// Get and set current selected tab
int currentTab = pane.getSelectedIndex();
if (currentTab != -1) {
UpdateWrapper dcv = viewers.get(currentTab);
if (dcv.isOutdated()) {
// change the cursor to "waiting cursor" for this operation
this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
try {
dcv.setNode(currentNode);
} finally {
this.setCursor(null);
}
}
}
}
private static class UpdateWrapper {
private DataContentViewer wrapped;
private boolean outdated;
UpdateWrapper(DataContentViewer wrapped) {
this.wrapped = wrapped;
this.outdated = true;
}
void setNode(Node selectedNode) {
this.wrapped.setNode(selectedNode);
this.outdated = false;
}
void resetComponent() {
this.wrapped.resetComponent();
this.outdated = true;
}
boolean isOutdated() {
return this.outdated;
}
boolean isSupported(Node node) {
return this.wrapped.isSupported(node);
}
int isPreferred(Node node) {
return this.wrapped.isPreferred(node);
}
}
}
| |
/*
* Copyright 2016 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.deprecated;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.services.compute.model.Image;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.List;
import java.util.Objects;
/**
* A Google Compute Engine Image. An image contains a boot loader, an operating system and a root
* file system that is necessary for starting an instance. Compute Engine offers publicly-available
* images of certain operating systems that you can use, or you can create a custom image. A custom
* image is an image created from one of your virtual machine instances that contains your specific
* instance configurations. Use {@link DiskImageConfiguration} to create an image from an existing
* disk. Use {@link StorageImageConfiguration} to create an image from a file stored in Google
* Cloud Storage.
*
* @see <a href="https://cloud.google.com/compute/docs/images">Images</a>
*/
public class ImageInfo implements Serializable {
static final Function<Image, ImageInfo> FROM_PB_FUNCTION =
new Function<Image, ImageInfo>() {
@Override
public ImageInfo apply(Image pb) {
return ImageInfo.fromPb(pb);
}
};
static final Function<ImageInfo, Image> TO_PB_FUNCTION =
new Function<ImageInfo, Image>() {
@Override
public Image apply(ImageInfo image) {
return image.toPb();
}
};
private static final long serialVersionUID = -1061916352807358977L;
private static final DateTimeFormatter TIMESTAMP_FORMATTER = ISODateTimeFormat.dateTime();
private final String generatedId;
private final ImageId imageId;
private final Long creationTimestamp;
private final String description;
private final ImageConfiguration configuration;
private final Status status;
private final Long diskSizeGb;
private final List<LicenseId> licenses;
private final DeprecationStatus<ImageId> deprecationStatus;
/**
* The status of a Google Compute Engine Image. An image can be used to create other disks only
* after it has been successfully created and its status is set to {@code READY}.
*/
public enum Status {
/**
* Image creation failed. The image can not be used.
*/
FAILED,
/**
* Image creation is pending. The image is not ready to be used yet.
*/
PENDING,
/**
* Image has been created and is ready for use.
*/
READY
}
/**
* A builder for {@code ImageInfo} objects.
*/
public abstract static class Builder {
abstract Builder setGeneratedId(String generatedId);
abstract Builder getCreationTimestamp(Long creationTimestamp);
/**
* Sets the image identity.
*/
public abstract Builder setImageId(ImageId imageId);
/**
* Sets an optional textual description of the image.
*/
public abstract Builder setDescription(String description);
/**
* Sets the image configuration. Use {@link DiskImageConfiguration} to create an image from an
* existing disk. Use {@link StorageImageConfiguration} to create an image from a file stored in
* Google Cloud Storage.
*/
public abstract Builder setConfiguration(ImageConfiguration configuration);
abstract Builder setStatus(Status status);
abstract Builder setDiskSizeGb(Long diskSizeGb);
abstract Builder setLicenses(List<LicenseId> licenses);
abstract Builder setDeprecationStatus(DeprecationStatus<ImageId> deprecationStatus);
/**
* Creates a {@code ImageInfo} object.
*/
public abstract ImageInfo build();
}
static final class BuilderImpl extends Builder {
private String generatedId;
private Long creationTimestamp;
private ImageId imageId;
private String description;
private ImageConfiguration configuration;
private Status status;
private Long diskSizeGb;
private List<LicenseId> licenses;
private DeprecationStatus<ImageId> deprecationStatus;
BuilderImpl() {}
BuilderImpl(ImageInfo imageInfo) {
this.generatedId = imageInfo.generatedId;
this.creationTimestamp = imageInfo.creationTimestamp;
this.imageId = imageInfo.imageId;
this.description = imageInfo.description;
this.configuration = imageInfo.configuration;
this.status = imageInfo.status;
this.diskSizeGb = imageInfo.diskSizeGb;
this.licenses = imageInfo.licenses;
this.deprecationStatus = imageInfo.deprecationStatus;
}
BuilderImpl(Image imagePb) {
if (imagePb.getId() != null) {
this.generatedId = imagePb.getId().toString();
}
if (imagePb.getCreationTimestamp() != null) {
this.creationTimestamp = TIMESTAMP_FORMATTER.parseMillis(imagePb.getCreationTimestamp());
}
this.imageId = ImageId.fromUrl(imagePb.getSelfLink());
this.description = imagePb.getDescription();
this.configuration = ImageConfiguration.fromPb(imagePb);
if (imagePb.getStatus() != null) {
this.status = Status.valueOf(imagePb.getStatus());
}
this.diskSizeGb = imagePb.getDiskSizeGb();
if (imagePb.getLicenses() != null) {
this.licenses = Lists.transform(imagePb.getLicenses(), LicenseId.FROM_URL_FUNCTION);
}
if (imagePb.getDeprecated() != null) {
this.deprecationStatus =
DeprecationStatus.fromPb(imagePb.getDeprecated(), ImageId.FROM_URL_FUNCTION);
}
}
@Override
BuilderImpl setGeneratedId(String generatedId) {
this.generatedId = generatedId;
return this;
}
@Override
BuilderImpl getCreationTimestamp(Long creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
@Override
public BuilderImpl setImageId(ImageId imageId) {
this.imageId = checkNotNull(imageId);
return this;
}
@Override
public BuilderImpl setDescription(String description) {
this.description = description;
return this;
}
@Override
public BuilderImpl setConfiguration(ImageConfiguration configuration) {
this.configuration = checkNotNull(configuration);
return this;
}
@Override
BuilderImpl setStatus(Status status) {
this.status = status;
return this;
}
@Override
BuilderImpl setDiskSizeGb(Long diskSizeGb) {
this.diskSizeGb = diskSizeGb;
return this;
}
@Override
BuilderImpl setLicenses(List<LicenseId> licenses) {
this.licenses = licenses != null ? ImmutableList.copyOf(licenses) : null;
return this;
}
@Override
BuilderImpl setDeprecationStatus(DeprecationStatus<ImageId> deprecationStatus) {
this.deprecationStatus = deprecationStatus;
return this;
}
@Override
public ImageInfo build() {
return new ImageInfo(this);
}
}
ImageInfo(BuilderImpl builder) {
this.generatedId = builder.generatedId;
this.creationTimestamp = builder.creationTimestamp;
this.imageId = checkNotNull(builder.imageId);
this.description = builder.description;
this.configuration = checkNotNull(builder.configuration);
this.status = builder.status;
this.diskSizeGb = builder.diskSizeGb;
this.licenses = builder.licenses;
this.deprecationStatus = builder.deprecationStatus;
}
/**
* Returns the service-generated unique identifier for the image.
*/
public String getGeneratedId() {
return generatedId;
}
/**
* Returns the creation timestamp in milliseconds since epoch.
*/
public Long getCreationTimestamp() {
return creationTimestamp;
}
/**
* Returns the image identity.
*/
public ImageId getImageId() {
return imageId;
}
/**
* Returns a textual description of the image.
*/
public String getDescription() {
return description;
}
/**
* Returns the image configuration. This method returns an instance of
* {@link DiskImageConfiguration} if the the image was created from a Google Compute Engine disk.
* This method returns an instance of {@link StorageImageConfiguration} if the image was created
* from a file stored in Google Cloud Storage.
*/
@SuppressWarnings("unchecked")
public <T extends ImageConfiguration> T getConfiguration() {
return (T) configuration;
}
/**
* Returns all applicable publicly visible licenses.
*/
public List<LicenseId> getLicenses() {
return licenses;
}
/**
* Returns the status of the image. An image can be used to create other disks only after it has
* been successfully created and its status is set to {@link Status#READY}.
*/
public Status getStatus() {
return status;
}
/**
* Returns the size of the image when restored onto a persistent disk (in GB).
*/
public Long getDiskSizeGb() {
return diskSizeGb;
}
/**
* Returns the deprecation status of the image. If {@link DeprecationStatus#getStatus()} is either
* {@link DeprecationStatus.Status#DELETED} or {@link DeprecationStatus.Status#OBSOLETE} the
* image must not be used. Returns {@code null} if the image is not deprecated.
*/
public DeprecationStatus<ImageId> getDeprecationStatus() {
return deprecationStatus;
}
/**
* Returns a builder for the current image.
*/
public Builder toBuilder() {
return new BuilderImpl(this);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("generatedId", generatedId)
.add("creationTimestamp", creationTimestamp)
.add("imageId", imageId)
.add("description", description)
.add("configuration", configuration)
.add("status", status)
.add("diskSizeGb", diskSizeGb)
.add("licenses", licenses)
.toString();
}
@Override
public int hashCode() {
return Objects.hash(generatedId, creationTimestamp, imageId, description, configuration, status,
diskSizeGb, licenses);
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(ImageInfo.class)
&& Objects.equals(toPb(), ((ImageInfo) obj).toPb());
}
ImageInfo setProjectId(String projectId) {
return toBuilder()
.setImageId(imageId.setProjectId(projectId))
.setConfiguration(configuration.setProjectId(projectId))
.build();
}
Image toPb() {
Image imagePb = configuration.toPb();
if (generatedId != null) {
imagePb.setId(new BigInteger(generatedId));
}
if (creationTimestamp != null) {
imagePb.setCreationTimestamp(TIMESTAMP_FORMATTER.print(creationTimestamp));
}
imagePb.setName(imageId.getImage());
imagePb.setDescription(description);
imagePb.setSelfLink(imageId.getSelfLink());
if (status != null) {
imagePb.setStatus(status.name());
}
imagePb.setDiskSizeGb(diskSizeGb);
if (licenses != null) {
imagePb.setLicenses(Lists.transform(licenses, LicenseId.TO_URL_FUNCTION));
}
if (deprecationStatus != null) {
imagePb.setDeprecated(deprecationStatus.toPb());
}
return imagePb;
}
/**
* Returns a builder for an {@code ImageInfo} object given the image identity and an image
* configuration. Use {@link DiskImageConfiguration} to create an image from an existing disk. Use
* {@link StorageImageConfiguration} to create an image from a file stored in Google Cloud
* Storage.
*/
public static Builder newBuilder(ImageId imageId, ImageConfiguration configuration) {
return new BuilderImpl().setImageId(imageId).setConfiguration(configuration);
}
/**
* Returns an {@code ImageInfo} object given the image identity and an image configuration. Use
* {@link DiskImageConfiguration} to create an image from an existing disk. Use
* {@link StorageImageConfiguration} to create an image from a file stored in Google Cloud
* Storage.
*/
public static ImageInfo of(ImageId imageId, ImageConfiguration configuration) {
return newBuilder(imageId, configuration).build();
}
static ImageInfo fromPb(Image imagePb) {
return new BuilderImpl(imagePb).build();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.rest;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.model.OptionalIdentifiedDefinition;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.ToDefinition;
import org.apache.camel.model.ToDynamicDefinition;
import org.apache.camel.spi.Metadata;
/**
* Rest command
*/
@Metadata(label = "rest")
@XmlRootElement(name = "verb")
@XmlAccessorType(XmlAccessType.FIELD)
public class VerbDefinition extends OptionalIdentifiedDefinition<VerbDefinition> {
@XmlAttribute
private String method;
@XmlElementRef
private List<RestOperationParamDefinition> params = new ArrayList<RestOperationParamDefinition>();
@XmlElementRef
private List<RestOperationResponseMsgDefinition> responseMsgs = new ArrayList<RestOperationResponseMsgDefinition>();
@XmlAttribute
private String uri;
@XmlAttribute
private String consumes;
@XmlAttribute
private String produces;
@XmlAttribute
@Metadata(defaultValue = "auto")
private RestBindingMode bindingMode;
@XmlAttribute
private Boolean skipBindingOnErrorCode;
@XmlAttribute
private Boolean enableCORS;
@XmlAttribute
private String type;
@XmlAttribute
private String outType;
// used by XML DSL to either select a <to>, <toD>, or <route>
// so we need to use the common type OptionalIdentifiedDefinition
// must select one of them, and hence why they are all set to required = true, but the XSD is set to only allow one of the element
@XmlElements({
@XmlElement(required = true, name = "to", type = ToDefinition.class),
@XmlElement(required = true, name = "toD", type = ToDynamicDefinition.class),
@XmlElement(required = true, name = "route", type = RouteDefinition.class)}
)
private OptionalIdentifiedDefinition<?> toOrRoute;
// the Java DSL uses the to or route definition directory
@XmlTransient
private ToDefinition to;
@XmlTransient
private ToDynamicDefinition toD;
@XmlTransient
private RouteDefinition route;
@XmlTransient
private RestDefinition rest;
@XmlAttribute
private String routeId;
@XmlAttribute
private Boolean apiDocs;
@Override
public String getLabel() {
if (method != null) {
return method;
} else {
return "verb";
}
}
public List<RestOperationParamDefinition> getParams() {
return params;
}
/**
* To specify the REST operation parameters using Swagger.
*/
public void setParams(List<RestOperationParamDefinition> params) {
this.params = params;
}
public List<RestOperationResponseMsgDefinition> getResponseMsgs() {
return responseMsgs;
}
/**
* Sets swagger operation response messages
*/
public void setResponseMsgs(List<RestOperationResponseMsgDefinition> params) {
this.responseMsgs = responseMsgs;
}
public String getMethod() {
return method;
}
/**
* The HTTP verb such as GET or POST
*/
public void setMethod(String method) {
this.method = method;
}
public String getUri() {
return uri;
}
/**
* Uri template of this REST service such as /{id}.
*/
public void setUri(String uri) {
this.uri = uri;
}
public String getConsumes() {
return consumes;
}
/**
* To define the content type what the REST service consumes (accept as input), such as application/xml or application/json.
* This option will override what may be configured on a parent level
*/
public void setConsumes(String consumes) {
this.consumes = consumes;
}
public String getProduces() {
return produces;
}
/**
* To define the content type what the REST service produces (uses for output), such as application/xml or application/json
* This option will override what may be configured on a parent level
*/
public void setProduces(String produces) {
this.produces = produces;
}
public RestBindingMode getBindingMode() {
return bindingMode;
}
/**
* Sets the binding mode to use.
* This option will override what may be configured on a parent level
* <p/>
* The default value is auto
*/
public void setBindingMode(RestBindingMode bindingMode) {
this.bindingMode = bindingMode;
}
public Boolean getSkipBindingOnErrorCode() {
return skipBindingOnErrorCode;
}
/**
* Whether to skip binding on output if there is a custom HTTP error code header.
* This allows to build custom error messages that do not bind to json / xml etc, as success messages otherwise will do.
* This option will override what may be configured on a parent level
*/
public void setSkipBindingOnErrorCode(Boolean skipBindingOnErrorCode) {
this.skipBindingOnErrorCode = skipBindingOnErrorCode;
}
public Boolean getEnableCORS() {
return enableCORS;
}
/**
* Whether to enable CORS headers in the HTTP response.
* This option will override what may be configured on a parent level
* <p/>
* The default value is false.
*/
public void setEnableCORS(Boolean enableCORS) {
this.enableCORS = enableCORS;
}
public String getType() {
return type;
}
/**
* Sets the class name to use for binding from input to POJO for the incoming data
* This option will override what may be configured on a parent level
*/
public void setType(String type) {
this.type = type;
}
public String getOutType() {
return outType;
}
/**
* Sets the class name to use for binding from POJO to output for the outgoing data
* This option will override what may be configured on a parent level
*/
public void setOutType(String outType) {
this.outType = outType;
}
public String getRouteId() {
return routeId;
}
/**
* The route id this rest-dsl is using (read-only)
*/
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public Boolean getApiDocs() {
return apiDocs;
}
/**
* Whether to include or exclude the VerbDefinition in API documentation.
* <p/>
* The default value is true.
*/
public void setApiDocs(Boolean apiDocs) {
this.apiDocs = apiDocs;
}
public RestDefinition getRest() {
return rest;
}
public void setRest(RestDefinition rest) {
this.rest = rest;
}
public RouteDefinition getRoute() {
if (route != null) {
return route;
} else if (toOrRoute instanceof RouteDefinition) {
return (RouteDefinition) toOrRoute;
} else {
return null;
}
}
public void setRoute(RouteDefinition route) {
this.route = route;
this.toOrRoute = route;
}
public ToDefinition getTo() {
if (to != null) {
return to;
} else if (toOrRoute instanceof ToDefinition) {
return (ToDefinition) toOrRoute;
} else {
return null;
}
}
public ToDynamicDefinition getToD() {
if (toD != null) {
return toD;
} else if (toOrRoute instanceof ToDynamicDefinition) {
return (ToDynamicDefinition) toOrRoute;
} else {
return null;
}
}
public void setTo(ToDefinition to) {
this.to = to;
this.toD = null;
this.toOrRoute = to;
}
public void setToD(ToDynamicDefinition to) {
this.to = null;
this.toD = to;
this.toOrRoute = to;
}
public OptionalIdentifiedDefinition<?> getToOrRoute() {
return toOrRoute;
}
/**
* To route from this REST service to a Camel endpoint, or an inlined route
*/
public void setToOrRoute(OptionalIdentifiedDefinition<?> toOrRoute) {
this.toOrRoute = toOrRoute;
}
// Fluent API
// -------------------------------------------------------------------------
public RestDefinition get() {
return rest.get();
}
public RestDefinition get(String uri) {
return rest.get(uri);
}
public RestDefinition post() {
return rest.post();
}
public RestDefinition post(String uri) {
return rest.post(uri);
}
public RestDefinition put() {
return rest.put();
}
public RestDefinition put(String uri) {
return rest.put(uri);
}
public RestDefinition delete() {
return rest.delete();
}
public RestDefinition delete(String uri) {
return rest.delete(uri);
}
public RestDefinition head() {
return rest.head();
}
public RestDefinition head(String uri) {
return rest.head(uri);
}
public RestDefinition verb(String verb) {
return rest.verb(verb);
}
public RestDefinition verb(String verb, String uri) {
return rest.verb(verb, uri);
}
public String asVerb() {
// we do not want the jaxb model to repeat itself, by outputting <get method="get">
// so we defer the verb from the instance type
if (this instanceof GetVerbDefinition) {
return "get";
} else if (this instanceof PostVerbDefinition) {
return "post";
} else if (this instanceof PutVerbDefinition) {
return "put";
} else if (this instanceof PatchVerbDefinition) {
return "patch";
} else if (this instanceof DeleteVerbDefinition) {
return "delete";
} else if (this instanceof HeadVerbDefinition) {
return "head";
} else if (this instanceof OptionsVerbDefinition) {
return "options";
} else {
return method;
}
}
}
| |
package tlc2;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import org.jline.reader.EndOfFileException;
import org.jline.reader.LineReader;
import org.jline.reader.LineReaderBuilder;
import org.jline.reader.UserInterruptException;
import org.jline.reader.impl.DefaultParser;
import org.jline.reader.impl.history.DefaultHistory;
import org.jline.terminal.Terminal;
import org.jline.terminal.TerminalBuilder;
import tla2sany.semantic.ModuleNode;
import tla2sany.semantic.OpDefNode;
import tlc2.output.EC;
import tlc2.output.MP;
import tlc2.tool.EvalException;
import tlc2.tool.impl.FastTool;
import tlc2.tool.impl.Tool;
import tlc2.value.impl.Value;
import util.Assert;
import util.SimpleFilenameToStream;
import util.TLAConstants;
import util.ToolIO;
/**
* A TLA+ REPL which provides an interactive mode of evaluating expressions and specifications.
*/
public class REPL {
private static final String HISTORY_PATH = System.getProperty("user.home", "") + File.separator + ".tlaplus" + File.separator + "history.repl";
// The spec file to use in the REPL context, if any.
private File specFile = null;
// The naming prefix of the temporary directory.
static final String TEMP_DIR_PREFIX = "tlarepl";
// The name of the spec used for evaluating expressions.
final String REPL_SPEC_NAME = "tlarepl";
private static final String prompt = "(tla+) ";
private final Writer replWriter = new PrintWriter(System.out);
// A temporary directory to place auxiliary files needed for REPL evaluation.
Path replTempDir;
public REPL(Path tempDir) {
replTempDir = tempDir;
}
public void setSpecFile(final File pSpecFile) {
specFile = pSpecFile;
}
/**
* Evaluate the given string input as a TLA+ expression.
*
* @return the pretty printed result of the evaluation or an empty string if there was an error.
*/
public String processInput(String evalExpr) {
// The modules we will extend in the REPL environment.
String moduleExtends = "Reals,Sequences,Bags,FiniteSets,TLC,Randomization";
try {
// Try loading the "index" class of the Community Modules that define
// popular modulesl that should be loaded by default. If the Community Modules
// are not present, silently fail.
final Class<?> clazz = Class.forName("tlc2.overrides.CommunityModules");
final Method m = clazz.getDeclaredMethod("popularModules");
moduleExtends += String.format(",%s", m.invoke(null));
} catch (Exception | NoClassDefFoundError ignore) {
}
if (specFile != null) {
String mainModuleName = specFile.getName().replaceFirst(TLAConstants.Files.TLA_EXTENSION + "$", "");
moduleExtends += ("," + mainModuleName);
}
File tempFile, configFile;
try {
// We want to place the spec files used by REPL evaluation into the temporary directory.
tempFile = new File(replTempDir.toString(), REPL_SPEC_NAME + TLAConstants.Files.TLA_EXTENSION);
configFile = new File(replTempDir.toString(), REPL_SPEC_NAME + TLAConstants.Files.CONFIG_EXTENSION);
// Create the config file.
BufferedWriter cfgWriter = new BufferedWriter(new FileWriter(configFile.getAbsolutePath(), false));
cfgWriter.append("INIT replinit");
cfgWriter.newLine();
cfgWriter.append("NEXT replnext");
cfgWriter.newLine();
cfgWriter.close();
// Create the spec file lines.
ArrayList<String> lines = new ArrayList<String>();
String replValueVarName = "replvalue";
lines.add("---- MODULE tlarepl ----");
lines.add("EXTENDS " + moduleExtends);
lines.add("VARIABLE replvar");
// Dummy Init and Next predicates.
lines.add("replinit == replvar = 0");
lines.add("replnext == replvar' = 0");
// The expression to evaluate.
lines.add(replValueVarName + " == " + evalExpr);
lines.add("====");
// Write out the spec file.
BufferedWriter writer = new BufferedWriter(new FileWriter(tempFile.getAbsolutePath(), false));
for (String line : lines) {
writer.append(line);
writer.newLine();
}
writer.close();
// Avoid sending log messages to stdout and reset the messages recording.
ToolIO.setMode(ToolIO.TOOL);
ToolIO.reset();
try {
// We placed the REPL spec files into a temporary directory, so, we add this temp directory
// path to the filename resolver used by the Tool.
SimpleFilenameToStream resolver = new SimpleFilenameToStream(replTempDir.toAbsolutePath().toString());
Tool tool = new FastTool(REPL_SPEC_NAME, REPL_SPEC_NAME, resolver);
ModuleNode module = tool.getSpecProcessor().getRootModule();
OpDefNode valueNode = module.getOpDef(replValueVarName);
// Make output of TLC!Print and TLC!PrintT appear in the REPL. Set here
// and unset in finally below to suppress output of FastTool instantiation
// above.
tlc2.module.TLC.OUTPUT = replWriter;
final Value exprVal = (Value) tool.eval(valueNode.getBody());
return exprVal.toString();
} catch (EvalException exc) {
// TODO: Improve error messages with more specific detail.
System.out.printf("Error evaluating expression: '%s'%n%s%n", evalExpr, exc);
} catch (Assert.TLCRuntimeException exc) {
if (exc.parameters != null && exc.parameters.length > 0) {
// 0..1 \X 0..1 has non-null params of length zero. Actual error message is
// "Parsing or semantic analysis failed.".
System.out.printf("Error evaluating expression: '%s'%n%s%n", evalExpr,
Arrays.toString(exc.parameters));
} else if (exc.getMessage() != null) {
// Examples of what ends up here:
// 23 = TRUE
// Attempted to evaluate an expression of form P \/ Q when P was an integer.
// 23 \/ TRUE
// Attempted to check equality of integer 23 with non-integer: TRUE
// CHOOSE x \in Nat : x = 4
// Attempted to compute the value of an expression of form CHOOSE x \in S: P, but S was not enumerable.
String msg = exc.getMessage().trim();
// Strip meaningless location from error message.
msg = msg.replaceFirst("\\nline [0-9]+, col [0-9]+ to line [0-9]+, col [0-9]+ of module tlarepl$", "");
// Replace any newlines with whitespaces.
msg = msg.replaceAll("\\n", " ").trim();
System.out.printf("Error evaluating expression: '%s'%n%s%n", evalExpr, msg);
} else {
System.out.printf("Error evaluating expression: '%s'%n", evalExpr);
}
} finally {
replWriter.flush();
tlc2.module.TLC.OUTPUT = null;
}
} catch (IOException pe) {
pe.printStackTrace();
}
return "";
}
/**
* Runs the main REPL loop continuously until there is a fatal error or a user interrupt.
*/
public void runREPL(final LineReader reader) throws IOException {
// Run the loop.
String expr;
while (true) {
try {
expr = reader.readLine(prompt);
String res = processInput(expr);
if (res.equals("")) {
continue;
}
System.out.println(res);
} catch (UserInterruptException e) {
return;
} catch (EndOfFileException e) {
e.printStackTrace();
return;
} finally {
// Persistent file and directory will be create on demand.
reader.getHistory().save();
}
}
}
public static void main(String[] args) {
try {
final Path tempDir = Files.createTempDirectory(TEMP_DIR_PREFIX);
final REPL repl = new REPL(tempDir);
// TODO: Allow external spec file to be loaded into REPL context.
if(args.length == 1) {
String res = repl.processInput(args[0]);
if (!res.equals("")) {
System.out.println(res);
}
//TODO Return actual exit value if parsing/evaluation fails.
System.exit(0);
}
// For TLA+ we don't want to treat backslashes as escape chars e.g. for LaTeX like operators.
final DefaultParser parser = new DefaultParser();
parser.setEscapeChars(null);
final Terminal terminal = TerminalBuilder.builder().build();
final LineReader reader = LineReaderBuilder.builder().parser(parser).terminal(terminal)
.history(new DefaultHistory()).build();
reader.setVariable(LineReader.HISTORY_FILE, HISTORY_PATH);
System.out.println("Welcome to the TLA+ REPL!");
MP.printMessage(EC.TLC_VERSION, TLCGlobals.versionOfTLC);
System.out.println("Enter a constant-level TLA+ expression.");
repl.runREPL(reader);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.reliableLog;
import java.io.*;
/**
* This class is a simple implementation of a reliable Log. The
* client of a ReliableLog must provide a set of callbacks (via a
* LogHandler) that enables a ReliableLog to read and write snapshots
* (checkpoints) and log records. This implementation ensures that the
* data stored (via a ReliableLog) is recoverable after a system crash.
* The implementation is unsynchronized; the client must synchronize
* externally. <p>
*
* The secondary storage strategy is to record values in files using a
* representation of the caller's choosing. Two sorts of files are
* kept: snapshots and logs. At any instant, one snapshot is current.
* The log consists of a sequence of updates that have occurred since
* the current snapshot was taken. The current stable state is the
* value of the snapshot, as modified by the sequence of updates in
* the log. From time to time, the client of a ReliableLog instructs
* the package to make a new snapshot and clear the log. A ReliableLog
* arranges disk writes such that updates are stable (as long as the
* changes are force-written to disk) and atomic: no update is lost,
* and each update either is recorded completely in the log or not at
* all. Making a new snapshot is also atomic. <p>
*
* Normal use for maintaining the recoverable store is as follows: The
* client maintains the relevant data structure in virtual memory. As
* updates happen to the structure, the client informs the ReliableLog
* (call it "log") by calling log.update. Periodically, the client
* calls log.snapshot to provide the current complete contents of the
* data. On restart, the client calls log.recover to obtain the
* latest snapshot and the following sequences of updates; the client
* applies the updates to the snapshot to obtain the state that
* existed before the crash. <p>
*
* @author Sun Microsystems, Inc.
*
* @see LogHandler
*
*/
public class ReliableLog {
private static final String snapshotPrefix = "Snapshot.";
private static final String logfilePrefix = "Logfile.";
private static final String versionFile = "Version_Number";
private static final int MAGIC = 0xf2ecefe7;
private static final int FORMAT_UNPADDED = 0;
private static final int FORMAT_PADDED = 1;
private static final long intBytes = 4;
private final File dir; // base directory
private int version = 0; // current snapshot and log version
private int format = FORMAT_UNPADDED;
private String logName = null;
private RandomAccessFile log = null;
private FileDescriptor logFD;
private long snapshotBytes = 0;
private long logBytes = 0;
private final LogHandler handler;
private final byte[] intBuf = new byte[4];
private final byte[] zeroBuf = new byte[4];
/**
* Creates a ReliableLog to handle snapshots and logging in a
* stable storage directory, and sets up to recover any existing data
* from the stable storage directory. If there is no existing data,
* snapshot must be called next, otherwise recover must be called next.
*
* @param dirPath path to the stable storage directory
* @param handler the handler for log callbacks
*
* @throws LogException if the directory cannot be created or
* the current version in the directory is corrupted
* @throws IOException if any other I/O error occurs
*/
public ReliableLog(String dirPath, LogHandler handler) throws IOException {
dir = new File(dirPath);
if (!(dir.exists() ? dir.isDirectory() : dir.mkdir())) {
throw new LogException("could not create directory for log: " +
dirPath);
}
this.handler = handler;
try {
DataInputStream in =
new DataInputStream(new FileInputStream(fName(versionFile)));
try {
version = in.readInt();
} finally {
in.close();
}
} catch (IOException ex) {
writeVersionFile();
}
if (version < 0) {
throw new LogException("corrupted version file");
}
}
/**
* Retrieves the contents of the snapshot file by calling the client
* supplied recover callback and then applies the incremental updates
* by calling the readUpdate callback for each logged updated.
*
* @throws LogException if recovery fails due to serious log corruption,
* or if an exception is thrown by the recover or readUpdate callbacks
* @throws IOException if an other I/O error occurs
*/
public void recover() throws IOException {
if (version == 0)
return;
String fname = versionName(snapshotPrefix);
File file = new File(fname);
InputStream in = new BufferedInputStream(new FileInputStream(file));
try {
handler.recover(in);
} catch (Exception e) {
throw new LogException("recovery failed", e);
} finally {
in.close();
}
snapshotBytes = file.length();
fname = versionName(logfilePrefix);
file = new File(fname);
DataInputStream din =
new DataInputStream(new BufferedInputStream(
new FileInputStream(file)));
long length = file.length();
try {
int updateLen = din.readInt();
/* have to worry about no MAGIC in original format */
if (updateLen == MAGIC) {
format = din.readInt();
if (format != FORMAT_PADDED) {
throw new LogException("corrupted log: bad log format");
}
logBytes += (intBytes + intBytes);
updateLen = din.readInt();
}
while (true) {
if (updateLen == 0) { /* expected termination case */
break;
}
if (updateLen < 0) { /* serious corruption */
throw new LogException("corrupted log: bad update length");
}
if (length - logBytes - intBytes < updateLen) {
/* partial record at end of log; this should not happen
* if forceToDisk is always true, but might happen if
* buffered updates are used.
*/
break;
}
try {
handler.readUpdate(new LogInputStream(din, updateLen));
} catch (Exception e) {
throw new LogException("read update failed", e);
}
logBytes += (intBytes + updateLen);
if (format == FORMAT_PADDED) {
int offset = (int)logBytes & 3;
if (offset > 0) {
offset = 4 - offset;
logBytes += offset;
din.skipBytes(offset);
}
}
updateLen = din.readInt();
} /* while */
} catch (EOFException e) {
} finally {
din.close();
}
/* reopen log file at end */
openLogFile();
}
/**
* Records this update in the log file and forces the update to disk.
* The update is recorded by calling the client's writeUpdate callback.
* This method must not be called until this log's recover method has
* been invoked (and completed).
*
* @param value the object representing the update
*
* @throws LogException if an exception is thrown by the writeUpdate
* callback, or forcing the update to disk fails
* @throws IOException if any other I/O error occurs
*/
public void update(Object value) throws IOException {
update(value, true);
}
/**
* Records this update in the log file and optionally forces the update
* to disk. The update is recorded by calling the client's writeUpdate
* callback. This method must not be called until this log's recover
* method has been invoked (and completed).
*
* @param value the object representing the update
* @param forceToDisk true if the update should be forced to disk, false
* if the updates should be buffered
*
* @throws LogException if an exception is thrown by the writeUpdate
* callback, or forcing the update to disk fails
* @throws IOException if any other I/O error occurs
*/
public void update(Object value, boolean forceToDisk) throws IOException {
/* avoid accessing a null log field */
if (log == null) {
throw new LogException("log file for persistent state is "
+"inaccessible, it may have been "
+"corrupted or closed");
}
/* note: zero length header for this update was written as part
* of the previous update, or at initial opening of the log file
*/
try {
handler.writeUpdate(new LogOutputStream(log), value);
} catch (Exception e) {
throw new LogException("write update failed", e);
}
if (forceToDisk) {
/* must force contents to disk before writing real length header */
try {
logFD.sync();
} catch (SyncFailedException sfe) {
throw new LogException("sync log failed", sfe);
}
}
long entryEnd = log.getFilePointer();
long updateLen = entryEnd - logBytes - intBytes;
if (updateLen > Integer.MAX_VALUE) {
throw new LogException("maximum record length exceeded");
}
/* write real length header */
log.seek(logBytes);
writeInt(log, (int) updateLen);
/* pad out update record so length header does not span disk blocks */
if (format == FORMAT_PADDED) {
entryEnd = (entryEnd + 3) & ~3L;
}
/* write zero length header for next update */
log.seek(entryEnd);
log.write(zeroBuf);
logBytes = entryEnd;
/* force both length headers to disk */
if (forceToDisk) {
try {
logFD.sync();
} catch (SyncFailedException sfe) {
throw new LogException("sync log failed", sfe);
}
}
}
/**
* Write an int value in single write operation.
*
* @param out output stream
* @param val int value
* @throws IOException if any other I/O error occurs
*/
private void writeInt(DataOutput out, int val) throws IOException {
intBuf[0] = (byte) (val >> 24);
intBuf[1] = (byte) (val >> 16);
intBuf[2] = (byte) (val >> 8);
intBuf[3] = (byte) val;
out.write(intBuf);
}
/**
* Records the client-defined current snapshot by invoking the client
* supplied snapshot callback, and then empties the log of incremental
* updates.
*
* @throws LogException if the snapshot callback throws an exception
* @throws IOException if any other I/O error occurs
*/
public void snapshot() throws IOException {
int oldVersion = version;
version++;
String fname = versionName(snapshotPrefix);
File snapshotFile = new File(fname);
FileOutputStream out = new FileOutputStream(snapshotFile);
try {
try {
handler.snapshot(out);
/* force contents to disk */
out.getFD().sync();
} catch (Exception e) {
throw new LogException("snapshot failed", e);
}
snapshotBytes = snapshotFile.length();
} finally {
out.close();
}
logBytes = 0;
openLogFile();
writeVersionFile();
deleteSnapshot(oldVersion);
deleteLogFile(oldVersion);
}
/**
* Closes the stable storage directory in an orderly manner.
*
* @throws IOException if an I/O error occurs
*/
public void close() throws IOException {
if (log == null) return;
try {
log.close();
} finally {
log = null;
}
}
/**
* Closes the incremental update log file, removes all ReliableLog-related
* files from the stable storage directory, and deletes the directory.
*/
public void deletePersistentStore() {
try {
close();
} catch (IOException e) {
}
try {
deleteLogFile(version);
} catch (LogException e) {
}
try {
deleteSnapshot(version);
} catch (LogException e) {
}
try {
deleteFile(fName(versionFile));
} catch (LogException e) {
}
try {
/* Delete the directory. The following call to the delete method
* will fail only if the directory is not empty or if the Security
* Manager's checkDelete() method throws a SecurityException.
* (The Security Manager will throw such an exception if it
* determines that the current application is not allowed to
* delete the directory.) For either case, upon un-successful
* deletion of the directory, take no further action.
*/
dir.delete();
} catch (SecurityException e) {
}
}
/**
* Returns the size of the current snapshot file in bytes;
*/
public long snapshotSize() { return snapshotBytes; }
/**
* Returns the current size of the incremental update log file in bytes;
*/
public long logSize() { return logBytes; }
/**
* Generates a filename prepended with the stable storage directory path.
*
* @param name the name of the file (sans directory path)
*/
private String fName(String name) {
return dir.getPath() + File.separator + name;
}
/**
* Generates a version filename prepended with the stable storage
* directory path with the current version number as a suffix.
*
* @param name version filename prefix
*/
private String versionName(String name) {
return versionName(name, version);
}
/**
* Generates a version filename prepended with the stable storage
* directory path with the given version number as a suffix.
*
* @param version filename prefix
* @param ver version number
*/
private String versionName(String prefix, int ver) {
return fName(prefix) + String.valueOf(ver);
}
/**
* Deletes a file.
*
* @param name the name of the file (complete path)
* @throws LogException if file cannot be deleted
*/
private void deleteFile(String name) throws LogException {
if (!new File(name).delete()) {
throw new LogException("couldn't delete file: " + name);
}
}
/**
* Removes the snapshot file.
*
* @param ver the version to remove
*
* @throws LogException if file cannot be deleted
*/
private void deleteSnapshot(int ver) throws LogException {
if (ver != 0) {
deleteFile(versionName(snapshotPrefix, ver));
}
}
/**
* Removes the incremental update log file.
*
* @param ver the version to remove
*
* @throws LogException if file cannot be deleted
*/
private void deleteLogFile(int ver) throws LogException {
if (ver != 0) {
deleteFile(versionName(logfilePrefix, ver));
}
}
/**
* Opens the incremental update log file in read/write mode. If the
* file does not exist, it is created.
*
* @throws IOException if an I/O error occurs
*/
private void openLogFile() throws IOException {
try {
close();
} catch (IOException e) { /* assume this is okay */
}
logName = versionName(logfilePrefix);
log = new RandomAccessFile(logName, "rw");
logFD = log.getFD();
if (logBytes == 0) {
format = FORMAT_PADDED;
writeInt(log, MAGIC);
writeInt(log, format);
logBytes = (intBytes + intBytes);
} else {
log.seek(logBytes);
}
log.setLength(logBytes);
/* always start out with zero length header for the next update */
log.write(zeroBuf);
/* force length header to disk */
logFD.sync();
}
/**
* Writes the current version number to the version file.
*
* @throws IOException if an I/O error occurs
*/
private void writeVersionFile() throws IOException {
RandomAccessFile out = new RandomAccessFile(fName(versionFile), "rw");
try {
/* write should be atomic (four bytes on one disk block) */
writeInt(out, version);
/* force version to disk */
out.getFD().sync();
} finally {
out.close();
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.explorer.project;
import com.eas.designer.application.project.ClientType;
import com.eas.designer.application.project.AppServerType;
import com.eas.designer.application.project.PlatypusProjectSettings;
import com.eas.util.StringUtils;
import java.beans.PropertyChangeSupport;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.logging.Level;
import org.openide.ErrorManager;
import org.openide.filesystems.FileObject;
import org.openide.util.EditableProperties;
/**
* The facade class for the settings of a project.
*
* @author vv
*/
public class PlatypusProjectSettingsImpl implements PlatypusProjectSettings {
public static final int DEFAULT_PLATYPUS_SERVER_PORT = 8500;
public static final int CLIENT_APP_DEFAULT_DEBUG_PORT = 8900;
public static final int SERVER_APP_DEFAULT_DEBUG_PORT = 8901;
public static final Level DEFAULT_LOG_LEVEL = Level.INFO;
public static final String PROJECT_SETTINGS_FILE = "project.properties"; //NOI18N
public static final String PROJECT_PRIVATE_SETTINGS_FILE = "private.properties"; //NOI18N
public static final String PROJECT_DISPLAY_NAME_KEY = "projectDisplayName"; //NOI18N
public static final String RUN_ELEMENT_KEY = "runElement"; //NOI18N
public static final String DEFAULT_DATA_SOURCE_ELEMENT_KEY = "defaultDataSource"; //NOI18N
public static final String RUN_USER_KEY = "runUser"; //NOI18N
public static final String RUN_PASSWORD_KEY = "runPassword"; //NOI18N
public static final String RUN_CLIENT_OPTIONS_KEY = "runClientOptions"; //NOI18N
public static final String RUN_CLIENT_VM_OPTIONS_KEY = "runClientVmOptions"; //NOI18N
public static final String RUN_SERVER_OPTIONS_KEY = "runServerOptions"; //NOI18N
public static final String RUN_SERVER_VM_OPTIONS_KEY = "runServerVmOptions"; //NOI18N
public static final String SERVER_PORT_KEY = "serverPort";//NOI18N
public static final String CLIENT_URL_KEY = "clientUrl";//NOI18N
public static final String NOT_START_SERVER_KEY = "notStartServer"; //NOI18N
public static final String DEBUG_CLIENT_PORT_KEY = "debugClientPort"; //NOI18N
public static final String DEBUG_SERVER_PORT_KEY = "debugServerPort"; //NOI18N
public static final String CLIENT_LOG_LEVEL = "clientLogLevel"; //NOI18N
public static final String SERVER_LOG_LEVEL = "serverLogLevel"; //NOI18N
public static final String J2EE_SERVER_ID_KEY = "j2eeServerId"; //NOI18N
public static final String SERVER_CONTEXT_KEY = "context";//NOI18N
public static final String ENABLE_SECURITY_REALM_KEY = "enableSecurityRealm";//NOI18N
public static final String CLIENT_TYPE_KEY = "clientType"; //NOI18N
public static final String SERVER_TYPE_KEY = "serverType"; //NOI18N
protected static final String START_JS_FILE_TEMPLATE = "" //NOI18N
+ "/**\n" //NOI18N
+ " * Do not edit this file manually, it will be overwritten by\n" //NOI18N
+ " * Platypus Application Designer.\n" //NOI18N
+ " */\n" //NOI18N
+ "// this === global\n" //NOI18N
+ "(function () {\n" //NOI18N
+ " function ready() {\n" //NOI18N
+ " P.cacheBust = true;\n"
+ " var startModule = '%s';\n" //NOI18N
+ " P.require([startModule], function(){\n" //NOI18N
+ " %s"//NOI18N
+ " %s"//NOI18N
+ " }, function(e){\n" //NOI18N
+ " P.Logger.severe(e);\n"
+ " var messageParagraph = document.createElement(\"p\");\n"
+ " document.body.appendChild(messageParagraph);\n"
+ " messageParagraph.innerHTML = \"An error occured while require('\" + startModule + \"'). Error: \" + e;\n"
+ " messageParagraph.style.margin = '10px';\n"
+ " messageParagraph.style.fontFamily = 'Arial';\n"
+ " messageParagraph.style.fontSize = '14pt';\n" //NOI18N
+ " });\n"//NOI18N
+ " }\n"//NOI18N
+ " if(!this.P) {\n" //NOI18N
+ " this.P = {};\n" //NOI18N
+ " P.ready = ready;\n" //NOI18N
+ " } else {\n" //NOI18N
+ " ready();\n" //NOI18N
+ " }\n" //NOI18N
+ "})();\n"; //NOI18N
protected final FileObject projectDir;
protected final PropertyChangeSupport changeSupport = new PropertyChangeSupport(this);
protected EditableProperties projectProperties;
protected EditableProperties projectPrivateProperties;
private boolean projectPropertiesIsDirty;
private boolean projectPrivatePropertiesIsDirty;
public PlatypusProjectSettingsImpl(FileObject aProjectDir) throws Exception {
if (aProjectDir == null) {
throw new IllegalArgumentException("Project directory file object is null."); //NOI18N
}
projectDir = aProjectDir;
projectProperties = new EditableProperties(false);
try (InputStream is = getProjectSettingsFileObject().getInputStream()) {
projectProperties.load(is);
}
projectPrivateProperties = new EditableProperties(false);
try (InputStream is = getProjectPrivateSettingsFileObject().getInputStream()) {
projectPrivateProperties.load(is);
}
}
/**
* Gets the project's display name.
*
* @return title for the project
*/
@Override
public String getDisplayName() {
return projectProperties.get(PROJECT_DISPLAY_NAME_KEY);
}
/**
* Sets the project's display name.
*
* @param aValue title for the project
*/
@Override
public void setDisplayName(String aValue) {
if (aValue == null) {
throw new NullPointerException("The Display name parameter cannot be null."); // NOI18N
}
String oldValue = getDisplayName();
projectProperties.setProperty(PROJECT_DISPLAY_NAME_KEY, aValue);
projectPropertiesIsDirty = true;
changeSupport.firePropertyChange(PROJECT_DISPLAY_NAME_KEY, oldValue, aValue);
}
/**
* Gets default application element to run.
*
* @return application element name
*/
@Override
public String getRunElement() {
return projectProperties.get(RUN_ELEMENT_KEY);
}
/**
* Sets default application element to run.
*
* @param aValue application element name
* @throws java.lang.Exception
*/
@Override
public void setRunElement(String aValue) throws Exception {
String oldValue = getRunElement();
if (oldValue == null ? aValue != null : !oldValue.equals(aValue)) {
if (aValue != null && !aValue.isEmpty()) {
projectProperties.setProperty(RUN_ELEMENT_KEY, aValue);
projectPropertiesIsDirty = true;
} else {
projectProperties.remove(RUN_ELEMENT_KEY);
}
changeSupport.firePropertyChange(RUN_ELEMENT_KEY, oldValue, aValue);
}
}
/**
* Get the default data source name
*
* @return string of the default data source name
*/
@Override
public String getDefaultDataSourceName() {
return projectPrivateProperties.get(DEFAULT_DATA_SOURCE_ELEMENT_KEY);
}
/**
* Sets the default data source name for a project
*
* @param aValue a default data source name
*/
@Override
public void setDefaultDatasourceName(String aValue) {
String oldValue = getDefaultDataSourceName();
if (aValue != null) {
projectPrivateProperties.setProperty(DEFAULT_DATA_SOURCE_ELEMENT_KEY, aValue);
} else {
projectPrivateProperties.remove(DEFAULT_DATA_SOURCE_ELEMENT_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEFAULT_DATA_SOURCE_ELEMENT_KEY, oldValue, aValue);
}
/**
* Gets username for the Platypus user to login on application run.
*
* @return Platypus user name
*/
@Override
public String getRunUser() {
return projectPrivateProperties.get(RUN_USER_KEY);
}
/**
* Sets username for the Platypus user to login on application run.
*
* @param aValue Platypus user name
*/
@Override
public void setRunUser(String aValue) {
String oldValue = getRunUser();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_USER_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_USER_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_USER_KEY, oldValue, aValue);
}
/**
* Gets password for the Platypus user to login on application run.
*
* @return Platypus user name
*/
@Override
public String getRunPassword() {
return projectPrivateProperties.get(RUN_PASSWORD_KEY);
}
/**
* Sets password for the Platypus user to login on application run.
*
* @param aValue Platypus user name
*/
@Override
public void setRunPassword(String aValue) {
String oldValue = getRunPassword();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_PASSWORD_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_PASSWORD_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_PASSWORD_KEY, oldValue, aValue);
}
/**
* Gets optional parameters provided to Platypus Client.
*
* @return parameters string
*/
@Override
public String getRunClientOptions() {
return projectPrivateProperties.get(RUN_CLIENT_OPTIONS_KEY);
}
/**
* Sets optional parameters provided to Platypus Client.
*
* @param aValue
*/
@Override
public void setClientOptions(String aValue) {
String oldValue = getRunClientOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_CLIENT_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_CLIENT_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_CLIENT_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets JVM options provided to Platypus Client.
*
* @return parameters string
*/
@Override
public String getRunClientVmOptions() {
return projectPrivateProperties.get(RUN_CLIENT_VM_OPTIONS_KEY);
}
/**
* Sets JVM options provided to Platypus Client.
*
* @param aValue
*/
@Override
public void setClientVmOptions(String aValue) {
String oldValue = getRunClientVmOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_CLIENT_VM_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_CLIENT_VM_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_CLIENT_VM_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets optional parameters provided to Platypus Application Server.
*
* @return parameters string
*/
@Override
public String getRunServerOptions() {
return projectPrivateProperties.get(RUN_SERVER_OPTIONS_KEY);
}
/**
* Sets optional parameters provided to Platypus Application Server.
*
* @param aValue
*/
@Override
public void setServerOptions(String aValue) {
String oldValue = getRunServerOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_SERVER_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_SERVER_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_SERVER_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets JVM options provided to Platypus Application Server.
*
* @return parameters string
*/
@Override
public String getRunServerVmOptions() {
return projectPrivateProperties.get(RUN_SERVER_VM_OPTIONS_KEY);
}
/**
* Sets JVM options provided to Platypus Application Server.
*
* @param aValue
*/
@Override
public void setServerVmOptions(String aValue) {
String oldValue = getRunServerVmOptions();
if (aValue != null) {
projectPrivateProperties.setProperty(RUN_SERVER_VM_OPTIONS_KEY, aValue);
} else {
projectPrivateProperties.remove(RUN_SERVER_VM_OPTIONS_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(RUN_SERVER_VM_OPTIONS_KEY, oldValue, aValue);
}
/**
* Gets application server's host.
*
* @return Url string
*/
@Override
public String getClientUrl() {
return projectPrivateProperties.get(CLIENT_URL_KEY);
}
/**
* Sets application's server host.
*
* @param aValue Url string
*/
@Override
public void setClientUrl(String aValue) {
String oldValue = getClientUrl();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_URL_KEY, aValue);
} else {
projectPrivateProperties.remove(CLIENT_URL_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_URL_KEY, oldValue, aValue);
}
/**
* Gets application's server port.
*
* @return server port
*/
@Override
public int getServerPort() {
return StringUtils.parseInt(projectPrivateProperties.get(SERVER_PORT_KEY), DEFAULT_PLATYPUS_SERVER_PORT);
}
/**
* Sets application's server port.
*
* @param aValue server port
*/
@Override
public void setServerPort(int aValue) {
int oldValue = getServerPort();
projectPrivateProperties.setProperty(SERVER_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_PORT_KEY, oldValue, aValue);
}
/**
* Checks if NOT to start local development application server on
* application run.
*
* @return true not to start server
*/
@Override
public boolean isNotStartServer() {
return Boolean.valueOf(projectPrivateProperties.get(NOT_START_SERVER_KEY));
}
/**
* Sets flag NOT to start local development application server on
* application run.
*
* @param aValue true not to start server
*/
@Override
public void setNotStartServer(boolean aValue) {
boolean oldValue = isNotStartServer();
projectPrivateProperties.setProperty(NOT_START_SERVER_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(NOT_START_SERVER_KEY, oldValue, aValue);
}
/**
* Gets JMX debugging port for Platypus Client on local computer on
* development if null or empty, use default value.
*
* @return JMX debugging port
*/
@Override
public int getDebugClientPort() {
return StringUtils.parseInt(projectPrivateProperties.get(DEBUG_CLIENT_PORT_KEY), CLIENT_APP_DEFAULT_DEBUG_PORT);
}
/**
* Sets JMX debugging port for Platypus Client on local computer on
* development.
*
* @param aValue JMX debugging port
*/
@Override
public void setDebugClientPort(int aValue) {
int oldValue = getDebugClientPort();
projectPrivateProperties.setProperty(DEBUG_CLIENT_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEBUG_CLIENT_PORT_KEY, oldValue, aValue);
}
/**
* Gets JMX debugging port for Platypus Application Server on local computer
* on development if null or empty, use default value.
*
* @return JMX debugging port
*/
@Override
public int getDebugServerPort() {
return StringUtils.parseInt(projectPrivateProperties.get(DEBUG_SERVER_PORT_KEY), SERVER_APP_DEFAULT_DEBUG_PORT);
}
/**
* Sets JMX debugging port for Platypus Application Server on local computer
* on development.
*
* @param aValue JMX debugging port
*/
@Override
public void setDebugServerPort(int aValue) {
int oldValue = getDebugServerPort();
projectPrivateProperties.setProperty(DEBUG_SERVER_PORT_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(DEBUG_SERVER_PORT_KEY, oldValue, aValue);
}
/**
* Gets J2EE server instance ID.
*
* @return J2EE server ID
*/
@Override
public String getJ2eeServerId() {
return projectPrivateProperties.get(J2EE_SERVER_ID_KEY);
}
/**
* Sets J2EE server instance ID.
*
* @param aValue J2EE server ID
*/
@Override
public void setJ2eeServerId(String aValue) {
String oldValue = getJ2eeServerId();
if (aValue != null) {
projectPrivateProperties.setProperty(J2EE_SERVER_ID_KEY, aValue);
} else {
projectPrivateProperties.remove(J2EE_SERVER_ID_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(J2EE_SERVER_ID_KEY, oldValue, aValue);
}
/**
* Gets application's context name.
*
* @return The name of the context string
*/
@Override
public String getServerContext() {
return projectProperties.get(SERVER_CONTEXT_KEY);
}
/**
* Sets application's context name.
*
* @param aValue The name of the context string
*/
@Override
public void setServerContext(String aValue) {
String oldValue = getServerContext();
if (aValue != null) {
projectProperties.setProperty(SERVER_CONTEXT_KEY, aValue);
} else {
projectProperties.remove(SERVER_CONTEXT_KEY);
}
projectPropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_CONTEXT_KEY, oldValue, aValue);
}
/**
* Checks if security realm to be configured on J2EE server startup.
*
* @return true to enable configure security realm
*/
@Override
public boolean isSecurityRealmEnabled() {
return Boolean.valueOf(projectPrivateProperties.get(ENABLE_SECURITY_REALM_KEY));
}
/**
* Sets if security realm to be configured on J2EE server startup.
*
* @param aValue true to enable configure security realm
*/
@Override
public void setSecurityRealmEnabled(boolean aValue) {
boolean oldValue = isSecurityRealmEnabled();
projectPrivateProperties.setProperty(ENABLE_SECURITY_REALM_KEY, String.valueOf(aValue));
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(ENABLE_SECURITY_REALM_KEY, oldValue, aValue);
}
/**
* Gets client type to be run.
*
* @return ClientType instance
*/
@Override
public ClientType getRunClientType() {
ClientType val = ClientType.getById(projectPrivateProperties.get(CLIENT_TYPE_KEY));
return val != null ? val : ClientType.PLATYPUS_CLIENT;
}
/**
* Sets client type to be run.
*
* @param aValue ClientType instance
*/
@Override
public void setRunClientType(ClientType aValue) {
ClientType oldValue = getRunClientType();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_TYPE_KEY, aValue.getId());
} else {
projectPrivateProperties.remove(CLIENT_TYPE_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_TYPE_KEY, aValue, oldValue);
}
/**
* Gets application server type to be run.
*
* @return AppServerType instance
*/
@Override
public AppServerType getRunAppServerType() {
AppServerType val = AppServerType.getById(projectPrivateProperties.get(SERVER_TYPE_KEY));
return val != null ? val : AppServerType.NONE;
}
/**
* Sets application server type to be run.
*
* @param aValue AppServerType instance
*/
@Override
public void setRunAppServerType(AppServerType aValue) {
AppServerType oldValue = getRunAppServerType();
if (aValue != null) {
projectPrivateProperties.setProperty(SERVER_TYPE_KEY, aValue.getId());
} else {
projectPrivateProperties.remove(SERVER_TYPE_KEY);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_TYPE_KEY, aValue, oldValue);
}
@Override
public void save() throws Exception {
if (projectPropertiesIsDirty) {
try (OutputStream os = getProjectSettingsFileObject().getOutputStream()) {
projectProperties.store(os);
}
projectPropertiesIsDirty = false;
}
if (projectPrivatePropertiesIsDirty) {
try (OutputStream os = getProjectPrivateSettingsFileObject().getOutputStream()) {
projectPrivateProperties.store(os);
}
projectPrivatePropertiesIsDirty = false;
}
}
@Override
public PropertyChangeSupport getChangeSupport() {
return changeSupport;
}
protected final FileObject getProjectSettingsFileObject() {
FileObject fo = projectDir.getFileObject(PROJECT_SETTINGS_FILE);
if (fo == null) {
try {
fo = projectDir.createData(PROJECT_SETTINGS_FILE);
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
}
}
return fo;
}
protected final FileObject getProjectPrivateSettingsFileObject() {
FileObject fo = projectDir.getFileObject(PROJECT_PRIVATE_SETTINGS_FILE);
if (fo == null) {
try {
fo = projectDir.createData(PROJECT_PRIVATE_SETTINGS_FILE);
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
}
}
return fo;
}
/**
* Gets the log level for Platypus Client.
*
* @return Log level value
*/
@Override
public Level getClientLogLevel() {
String logLevel = projectPrivateProperties.get(CLIENT_LOG_LEVEL);
if (logLevel == null || logLevel.isEmpty()) {
return DEFAULT_LOG_LEVEL;
}
try {
return Level.parse(logLevel);
} catch (IllegalArgumentException ex) {
return DEFAULT_LOG_LEVEL;
}
}
/**
* Sets a log level for Platypus Client.
*
* @param aValue Log level value
*/
@Override
public void setClientLogLevel(Level aValue) {
Level oldValue = getClientLogLevel();
if (aValue != null) {
projectPrivateProperties.setProperty(CLIENT_LOG_LEVEL, aValue.getName());
} else {
projectPrivateProperties.remove(CLIENT_LOG_LEVEL);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(CLIENT_LOG_LEVEL, aValue, oldValue);
}
/**
* Gets the log level for Platypus Server.
*
* @return Log level value
*/
@Override
public Level getServerLogLevel() {
String logLevel = projectPrivateProperties.get(SERVER_LOG_LEVEL);
if (logLevel == null || logLevel.isEmpty()) {
return DEFAULT_LOG_LEVEL;
}
try {
return Level.parse(logLevel);
} catch (IllegalArgumentException ex) {
return DEFAULT_LOG_LEVEL;
}
}
/**
* Sets a log level for Platypus Server.
*
* @param aValue Log level value
*/
@Override
public void setServerLogLevel(Level aValue) {
Level oldValue = getServerLogLevel();
if (aValue != null) {
projectPrivateProperties.setProperty(SERVER_LOG_LEVEL, aValue.getName());
} else {
projectPrivateProperties.remove(SERVER_LOG_LEVEL);
}
projectPrivatePropertiesIsDirty = true;
changeSupport.firePropertyChange(SERVER_LOG_LEVEL, aValue, oldValue);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.hplsql;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.ParseException;
public class Arguments {
private CommandLine commandLine;
private Options options = new Options();
String execString;
String fileName;
String main;
Map<String, String> vars = new HashMap<String, String>();
@SuppressWarnings("static-access")
Arguments() {
// -e 'query'
options.addOption(OptionBuilder
.hasArg()
.withArgName("quoted-query-string")
.withDescription("HPL/SQL from command line")
.create('e'));
// -f <file>
options.addOption(OptionBuilder
.hasArg()
.withArgName("filename")
.withDescription("HPL/SQL from a file")
.create('f'));
// -main entry_point_name
options.addOption(OptionBuilder
.hasArg()
.withArgName("procname")
.withDescription("Entry point (procedure or function name)")
.create("main"));
// -hiveconf x=y
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("property=value")
.withLongOpt("hiveconf")
.withDescription("Value for given property")
.create());
// Substitution option -d, --define
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("key=value")
.withLongOpt("define")
.withDescription("Variable substitution e.g. -d A=B or --define A=B")
.create('d'));
// Substitution option --hivevar
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("key=value")
.withLongOpt("hivevar")
.withDescription("Variable substitution e.g. --hivevar A=B")
.create());
// [-version|--version]
options.addOption(new Option("version", "version", false, "Print HPL/SQL version"));
// [-trace|--trace]
options.addOption(new Option("trace", "trace", false, "Print debug information"));
// [-offline|--offline]
options.addOption(new Option("offline", "offline", false, "Offline mode - skip SQL execution"));
// [-H|--help]
options.addOption(new Option("H", "help", false, "Print help information"));
}
/**
* Parse the command line arguments
*/
public boolean parse(String[] args) {
try {
commandLine = new GnuParser().parse(options, args);
execString = commandLine.getOptionValue('e');
fileName = commandLine.getOptionValue('f');
main = commandLine.getOptionValue("main");
Properties p = commandLine.getOptionProperties("hiveconf");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
p = commandLine.getOptionProperties("hivevar");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
p = commandLine.getOptionProperties("define");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
} catch (ParseException e) {
System.err.println(e.getMessage());
return false;
}
return true;
}
/**
* Get the value of execution option -e
*/
public String getExecString() {
return execString;
}
/**
* Get the value of file option -f
*/
public String getFileName() {
return fileName;
}
/**
* Get the value of -main option
*/
public String getMain() {
return main;
}
/**
* Get the variables
*/
public Map<String, String> getVars() {
return vars;
}
/**
* Test whether version option is set
*/
public boolean hasVersionOption() {
if(commandLine.hasOption("version")) {
return true;
}
return false;
}
/**
* Test whether debug option is set
*/
public boolean hasTraceOption() {
if(commandLine.hasOption("trace")) {
return true;
}
return false;
}
/**
* Test whether offline option is set
*/
public boolean hasOfflineOption() {
if(commandLine.hasOption("offline")) {
return true;
}
return false;
}
/**
* Test whether help option is set
*/
public boolean hasHelpOption() {
if(commandLine.hasOption('H')) {
return true;
}
return false;
}
/**
* Print help information
*/
public void printHelp() {
new HelpFormatter().printHelp("hplsql", options);
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.curve;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE;
import static com.opengamma.engine.value.ValuePropertyNames.CURVE_SENSITIVITY_CURRENCY;
import static com.opengamma.engine.value.ValueRequirementNames.CURVE_INSTRUMENT_CONVERSION_HISTORICAL_TIME_SERIES;
import static com.opengamma.engine.value.ValueRequirementNames.FX_MATRIX;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.LocalDate;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.curve.interestrate.generator.GeneratorYDCurve;
import com.opengamma.analytics.financial.forex.method.FXMatrix;
import com.opengamma.analytics.financial.instrument.InstrumentDefinition;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor;
import com.opengamma.analytics.financial.legalentity.LegalEntity;
import com.opengamma.analytics.financial.legalentity.LegalEntityFilter;
import com.opengamma.analytics.financial.model.interestrate.curve.DiscountCurve;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldCurve;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldPeriodicCurve;
import com.opengamma.analytics.financial.provider.curve.CurveBuildingBlock;
import com.opengamma.analytics.financial.provider.curve.CurveBuildingBlockBundle;
import com.opengamma.analytics.financial.provider.curve.issuer.IssuerDiscountBuildingRepository;
import com.opengamma.analytics.financial.provider.description.interestrate.IssuerProviderDiscount;
import com.opengamma.analytics.financial.provider.description.interestrate.ParameterIssuerProviderInterface;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MulticurveSensitivity;
import com.opengamma.analytics.math.curve.InterpolatedDoublesCurve;
import com.opengamma.analytics.math.interpolation.Interpolator1D;
import com.opengamma.analytics.math.interpolation.factory.NamedInterpolator1dFactory;
import com.opengamma.analytics.math.matrix.DoubleMatrix2D;
import com.opengamma.analytics.util.time.TimeCalculator;
import com.opengamma.core.marketdatasnapshot.SnapshotDataBundle;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.CompiledFunctionDefinition;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.analytics.curve.CurveConstructionConfiguration;
import com.opengamma.financial.analytics.curve.CurveDefinition;
import com.opengamma.financial.analytics.curve.CurveGroupConfiguration;
import com.opengamma.financial.analytics.curve.CurveTypeConfiguration;
import com.opengamma.financial.analytics.curve.InterpolatedCurveSpecification;
import com.opengamma.financial.analytics.curve.IssuerCurveTypeConfiguration;
import com.opengamma.financial.analytics.ircurve.strips.ContinuouslyCompoundedRateNode;
import com.opengamma.financial.analytics.ircurve.strips.CurveNode;
import com.opengamma.financial.analytics.ircurve.strips.CurveNodeVisitor;
import com.opengamma.financial.analytics.ircurve.strips.CurveNodeWithIdentifier;
import com.opengamma.financial.analytics.ircurve.strips.DiscountFactorNode;
import com.opengamma.financial.analytics.ircurve.strips.PeriodicallyCompoundedRateNode;
import com.opengamma.financial.analytics.model.InterpolatedDataProperties;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle;
import com.opengamma.id.ExternalId;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
import com.opengamma.util.time.Tenor;
import com.opengamma.util.tuple.Pair;
import com.opengamma.util.tuple.Pairs;
public class IssuerProviderInterpolatedFunction extends
MultiCurveFunction<ParameterIssuerProviderInterface, IssuerDiscountBuildingRepository, GeneratorYDCurve, MulticurveSensitivity> {
private static final Logger LOGGER = LoggerFactory.getLogger(IssuerProviderInterpolatedFunction.class);
public IssuerProviderInterpolatedFunction(final String curveConstructionConfigurationName) {
super(curveConstructionConfigurationName);
}
@Override
protected InstrumentDerivativeVisitor<ParameterIssuerProviderInterface, Double> getCalculator() {
throw new UnsupportedOperationException("Curves created with the Interpolated method do not use a calculator");
}
@Override
protected InstrumentDerivativeVisitor<ParameterIssuerProviderInterface, MulticurveSensitivity> getSensitivityCalculator() {
throw new UnsupportedOperationException("Curves created with the Interpolated method do not use a sensitivity calculator");
}
@Override
protected String getCurveTypeProperty() {
return InterpolatedDataProperties.CALCULATION_METHOD_NAME;
}
@Override
public CompiledFunctionDefinition getCompiledFunction(final ZonedDateTime earliestInvocation, final ZonedDateTime latestInvocation, final String[] curveNames,
final Set<ValueRequirement> exogenousRequirements,
final CurveConstructionConfiguration curveConstructionConfiguration) {
return new IssuerProviderInterpolatedCompiledFunctionDefinition(earliestInvocation, latestInvocation, curveNames, exogenousRequirements,
curveConstructionConfiguration);
}
@Override
public CompiledFunctionDefinition getCompiledFunction(final ZonedDateTime earliestInvocation, final ZonedDateTime latestInvocation, final String[] curveNames,
final Set<ValueRequirement> exogenousRequirements,
final CurveConstructionConfiguration curveConstructionConfiguration, final String[] currencies) {
return new IssuerProviderInterpolatedCompiledFunctionDefinition(earliestInvocation, latestInvocation, curveNames, exogenousRequirements,
curveConstructionConfiguration, currencies);
}
private class IssuerProviderInterpolatedCompiledFunctionDefinition extends CurveCompiledFunctionDefinition {
/** The curve construction configuration */
private final CurveConstructionConfiguration _curveConstructionConfiguration;
/**
* @param earliestInvocation
* The earliest time for which this function is valid, null if there is no bound
* @param latestInvocation
* The latest time for which this function is valid, null if there is no bound
* @param curveNames
* The names of the curves produced by this function, not null
* @param exogenousRequirements
* The exogenous requirements, not null
* @param curveConstructionConfiguration
* The curve construction configuration, not null
* @param currencies
* The set of currencies to which the curves produce sensitivities
*/
IssuerProviderInterpolatedCompiledFunctionDefinition(
final ZonedDateTime earliestInvocation,
final ZonedDateTime latestInvocation,
final String[] curveNames,
final Set<ValueRequirement> exogenousRequirements,
final CurveConstructionConfiguration curveConstructionConfiguration,
final String[] currencies) {
super(earliestInvocation, latestInvocation, curveNames, ValueRequirementNames.YIELD_CURVE, exogenousRequirements, currencies);
ArgumentChecker.notNull(curveConstructionConfiguration, "curve construction configuration");
_curveConstructionConfiguration = curveConstructionConfiguration;
}
@Override
public boolean canHandleMissingRequirements() {
return true;
}
@Override
public boolean canHandleMissingInputs() {
return true;
}
/**
* @param earliestInvocation
* The earliest time for which this function is valid, null if there is no bound
* @param latestInvocation
* The latest time for which this function is valid, null if there is no bound
* @param curveNames
* The names of the curves produced by this function, not null
* @param exogenousRequirements
* The exogenous requirements, not null
* @param curveConstructionConfiguration
* The curve construction configuration, not null
*/
IssuerProviderInterpolatedCompiledFunctionDefinition(final ZonedDateTime earliestInvocation,
final ZonedDateTime latestInvocation,
final String[] curveNames,
final Set<ValueRequirement> exogenousRequirements,
final CurveConstructionConfiguration curveConstructionConfiguration) {
super(earliestInvocation, latestInvocation, curveNames, ValueRequirementNames.YIELD_CURVE, exogenousRequirements);
ArgumentChecker.notNull(curveConstructionConfiguration, "curve construction configuration");
_curveConstructionConfiguration = curveConstructionConfiguration;
}
@Override
protected ParameterIssuerProviderInterface getKnownData(final FunctionInputs inputs) {
final FXMatrix fxMatrix = new FXMatrix();
IssuerProviderDiscount knownData;
if (getExogenousRequirements().isEmpty()) {
knownData = new IssuerProviderDiscount(fxMatrix);
} else {
knownData = (IssuerProviderDiscount) inputs.getValue(ValueRequirementNames.CURVE_BUNDLE);
knownData.getMulticurveProvider().setForexMatrix(fxMatrix);
}
return knownData;
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext compilationContext, final ComputationTarget target,
final ValueRequirement desiredValue) {
final Set<ValueRequirement> requirements = super.getRequirements(compilationContext, target, desiredValue);
if (requirements == null) {
return null;
}
final Set<ValueRequirement> trimmed = new HashSet<>();
for (final ValueRequirement requirement : requirements) {
final String requirementName = requirement.getValueName();
if (!(requirementName.equals(CURVE_INSTRUMENT_CONVERSION_HISTORICAL_TIME_SERIES) || requirementName.equals(FX_MATRIX))) {
trimmed.add(requirement);
}
}
return requirements;
}
@Override
protected IssuerDiscountBuildingRepository getBuilder(final double absoluteTolerance, final double relativeTolerance, final int maxIterations) {
// Returns null because builder is not used
return null;
}
@Override
protected GeneratorYDCurve getGenerator(final CurveDefinition definition, final LocalDate valuationDate) {
// Returns null because generator is not used
return null;
}
@Override
protected CurveNodeVisitor<InstrumentDefinition<?>> getCurveNodeConverter(final FunctionExecutionContext context, final SnapshotDataBundle marketData,
final ExternalId dataId,
final HistoricalTimeSeriesBundle historicalData, final ZonedDateTime valuationTime, final FXMatrix fxMatrix) {
// No need to convert to InstrumentDefinition if we are not fitting the curve.
return null;
}
@Override
protected Pair<ParameterIssuerProviderInterface, CurveBuildingBlockBundle> getCurves(final FunctionInputs inputs, final ZonedDateTime now,
final IssuerDiscountBuildingRepository builder,
final ParameterIssuerProviderInterface knownData, final FunctionExecutionContext context, final FXMatrix fx) {
int n = 0;
// These loops are here because the market data snapshot might not contain all of the required information
for (final CurveGroupConfiguration group : _curveConstructionConfiguration.getCurveGroups()) {
for (final Map.Entry<String, List<? extends CurveTypeConfiguration>> entry : group.getTypesForCurves().entrySet()) {
final String curveName = entry.getKey();
final ValueProperties curveProperties = ValueProperties.builder().with(CURVE, curveName).get();
final InterpolatedCurveSpecification specification = (InterpolatedCurveSpecification) inputs
.getValue(new ValueRequirement(ValueRequirementNames.CURVE_SPECIFICATION, ComputationTargetSpecification.NULL, curveProperties));
n += specification.getNodes().size();
}
}
final IssuerProviderDiscount curveBundle = (IssuerProviderDiscount) getKnownData(inputs);
final LinkedHashMap<String, Pair<Integer, Integer>> unitMap = new LinkedHashMap<>();
final LinkedHashMap<String, Pair<CurveBuildingBlock, DoubleMatrix2D>> unitBundles = new LinkedHashMap<>();
int totalNodes = 0;
for (final CurveGroupConfiguration group : _curveConstructionConfiguration.getCurveGroups()) {
for (final Map.Entry<String, List<? extends CurveTypeConfiguration>> entry : group.getTypesForCurves().entrySet()) {
final String curveName = entry.getKey();
final List<? extends CurveTypeConfiguration> types = entry.getValue();
final ValueProperties curveProperties = ValueProperties.builder().with(CURVE, curveName).get();
final Object dataObject = inputs
.getValue(new ValueRequirement(ValueRequirementNames.CURVE_MARKET_DATA, ComputationTargetSpecification.NULL, curveProperties));
if (dataObject == null) {
throw new OpenGammaRuntimeException("Could not get yield curve data");
}
final SnapshotDataBundle marketData = (SnapshotDataBundle) dataObject;
final InterpolatedCurveSpecification specification = (InterpolatedCurveSpecification) inputs
.getValue(new ValueRequirement(ValueRequirementNames.CURVE_SPECIFICATION, ComputationTargetSpecification.NULL, curveProperties));
n = specification.getNodes().size();
final double[] times = new double[n];
final double[] yields = new double[n];
final double[][] jacobian = new double[n][n];
boolean isYield = false;
int i = 0;
int compoundPeriodsPerYear = 0;
final int nNodesForCurve = specification.getNodes().size();
for (final CurveNodeWithIdentifier node : specification.getNodes()) {
final CurveNode curveNode = node.getCurveNode();
if (curveNode instanceof ContinuouslyCompoundedRateNode) {
if (i == 0) {
isYield = true;
} else {
if (!isYield) {
throw new OpenGammaRuntimeException("Was expecting only continuously-compounded rate nodes; have " + curveNode);
}
}
} else if (curveNode instanceof DiscountFactorNode) {
if (i == 0) {
isYield = false;
} else {
if (isYield) {
throw new OpenGammaRuntimeException("Was expecting only discount factor nodes; have " + curveNode);
}
}
} else if (curveNode instanceof PeriodicallyCompoundedRateNode) {
if (i == 0) {
compoundPeriodsPerYear = ((PeriodicallyCompoundedRateNode) curveNode).getCompoundingPeriodsPerYear();
isYield = true;
} else {
if (!isYield) {
throw new OpenGammaRuntimeException("Was expecting only periodically compounded nodes; have " + curveNode);
}
}
} else {
throw new OpenGammaRuntimeException("Can only handle discount factor or continuously-compounded rate nodes; have " + curveNode);
}
final Double marketValue = marketData.getDataPoint(node.getIdentifier());
if (marketValue == null) {
throw new OpenGammaRuntimeException("Could not get market value for " + node);
}
final Tenor maturity = curveNode.getResolvedMaturity();
times[i] = TimeCalculator.getTimeBetween(now, now.plus(maturity.getPeriod()));
yields[i] = marketValue;
jacobian[i][i] = 1;
i++;
}
final String interpolatorName = specification.getInterpolatorName();
final String rightExtrapolatorName = specification.getRightExtrapolatorName();
final String leftExtrapolatorName = specification.getLeftExtrapolatorName();
final Interpolator1D interpolator = NamedInterpolator1dFactory.of(interpolatorName, leftExtrapolatorName, rightExtrapolatorName);
final InterpolatedDoublesCurve rawCurve = InterpolatedDoublesCurve.from(times, yields, interpolator, curveName);
final YieldAndDiscountCurve discountCurve;
if (compoundPeriodsPerYear != 0 && isYield) {
discountCurve = YieldPeriodicCurve.from(compoundPeriodsPerYear, rawCurve);
} else if (isYield) {
discountCurve = new YieldCurve(curveName, rawCurve);
} else {
discountCurve = new DiscountCurve(curveName, rawCurve);
}
for (final CurveTypeConfiguration type : types) {
if (type instanceof IssuerCurveTypeConfiguration) {
final IssuerCurveTypeConfiguration issuer = (IssuerCurveTypeConfiguration) type;
curveBundle.setCurve(Pairs.<Object, LegalEntityFilter<LegalEntity>> of(issuer.getKeys(), issuer.getFilters()), discountCurve);
curveBundle.getMulticurveProvider().setCurve(Currency.of(curveName.substring(0, 3)), discountCurve);
}
}
unitMap.put(curveName, Pairs.of(totalNodes + nNodesForCurve, nNodesForCurve));
unitBundles.put(curveName, Pairs.of(new CurveBuildingBlock(unitMap), new DoubleMatrix2D(jacobian)));
totalNodes += nNodesForCurve;
}
}
return Pairs.of((ParameterIssuerProviderInterface) curveBundle, new CurveBuildingBlockBundle(unitBundles));
}
@Override
protected Set<ComputedValue> getResults(final ValueSpecification bundleSpec, final ValueSpecification jacobianSpec, final ValueProperties bundleProperties,
final Pair<ParameterIssuerProviderInterface, CurveBuildingBlockBundle> pair) {
final Set<ComputedValue> result = new HashSet<>();
final IssuerProviderDiscount provider = (IssuerProviderDiscount) pair.getFirst();
result.add(new ComputedValue(bundleSpec, provider));
result.add(new ComputedValue(jacobianSpec, pair.getSecond()));
for (final String curveName : getCurveNames()) {
final ValueProperties curveProperties = bundleProperties.copy()
.with(CurveCalculationPropertyNamesAndValues.PROPERTY_CURVE_TYPE, getCurveTypeProperty())
.withoutAny(ValuePropertyNames.CURVE)
.withoutAny(CURVE_SENSITIVITY_CURRENCY)
.with(ValuePropertyNames.CURVE, curveName)
.get();
final YieldAndDiscountCurve curve = provider.getIssuerCurve(curveName);
if (curve == null) {
LOGGER.error("Could not get curve called {} from configuration {}", curveName, getCurveConstructionConfigurationName());
} else {
final ValueSpecification curveSpec = new ValueSpecification(ValueRequirementNames.YIELD_CURVE, ComputationTargetSpecification.NULL, curveProperties);
result.add(new ComputedValue(curveSpec, curve));
}
}
return result;
}
}
}
| |
/*
* Powered By wufuwei
*/
package com.osp.biz.model;
import javacommon.base.BaseEntity;
public class TableVersionDownloadLog extends BaseEntity {
//date formats
public static final String FORMAT_DOWNTIME = DATE_TIME_FORMAT;
//columns START
private java.lang.Integer id;
private java.lang.String ip;
private java.sql.Timestamp downtime;
private java.lang.Integer versionId;
private java.lang.String platform;
private java.lang.String version;
private java.lang.String isImportantVersion;
private java.lang.String type;
private java.lang.String province;
private java.lang.String versionUrl;
private java.lang.String versionInfo;
private java.lang.Long fileSize;
private java.lang.String md5;
private java.lang.Boolean isEnforce;
private java.lang.String downloadUrl;
private java.lang.String fileName;
private java.lang.String downTerminal;
private java.lang.String name;
private java.lang.String apppackage;
//columns END
//generateConstructor START
public TableVersionDownloadLog(){}
public TableVersionDownloadLog(
java.lang.Integer id
){
this.id = id;
}
//generateConstructor END
public void setId(java.lang.Integer value) {
this.id = value;
}
public java.lang.Integer getId() {
return this.id;
}
public void setIp(java.lang.String value) {
this.ip = value;
}
public java.lang.String getIp() {
return this.ip;
}
public String getDowntimeString() {
return date2String(getDowntime(), FORMAT_DOWNTIME);
}
public void setDowntimeString(String value) {
setDowntime(string2Date(value, FORMAT_DOWNTIME,java.sql.Timestamp.class));
}
public void setDowntime(java.sql.Timestamp value) {
this.downtime = value;
}
public java.sql.Timestamp getDowntime() {
return this.downtime;
}
public void setVersionId(java.lang.Integer value) {
this.versionId = value;
}
public java.lang.Integer getVersionId() {
return this.versionId;
}
public void setPlatform(java.lang.String value) {
this.platform = value;
}
public java.lang.String getPlatform() {
return this.platform;
}
public void setVersion(java.lang.String value) {
this.version = value;
}
public java.lang.String getVersion() {
return this.version;
}
public void setIsImportantVersion(java.lang.String value) {
this.isImportantVersion = value;
}
public java.lang.String getIsImportantVersion() {
return this.isImportantVersion;
}
public void setType(java.lang.String value) {
this.type = value;
}
public java.lang.String getType() {
return this.type;
}
public void setProvince(java.lang.String value) {
this.province = value;
}
public java.lang.String getProvince() {
return this.province;
}
public void setVersionUrl(java.lang.String value) {
this.versionUrl = value;
}
public java.lang.String getVersionUrl() {
return this.versionUrl;
}
public void setVersionInfo(java.lang.String value) {
this.versionInfo = value;
}
public java.lang.String getVersionInfo() {
return this.versionInfo;
}
public void setFileSize(java.lang.Long value) {
this.fileSize = value;
}
public java.lang.Long getFileSize() {
return this.fileSize;
}
public void setMd5(java.lang.String value) {
this.md5 = value;
}
public java.lang.String getMd5() {
return this.md5;
}
public void setIsEnforce(java.lang.Boolean value) {
this.isEnforce = value;
}
public java.lang.Boolean getIsEnforce() {
return this.isEnforce;
}
public void setDownloadUrl(java.lang.String value) {
this.downloadUrl = value;
}
public java.lang.String getDownloadUrl() {
return this.downloadUrl;
}
public void setFileName(java.lang.String value) {
this.fileName = value;
}
public java.lang.String getFileName() {
return this.fileName;
}
public void setDownTerminal(java.lang.String value) {
this.downTerminal = value;
}
public java.lang.String getDownTerminal() {
return this.downTerminal;
}
public void setName(java.lang.String value) {
this.name = value;
}
public java.lang.String getName() {
return this.name;
}
public void setApppackage(java.lang.String value) {
this.apppackage = value;
}
public java.lang.String getApppackage() {
return this.apppackage;
}
public String toString() {
return new StringBuffer().append("[")
.append("Id").append("=").append(getId()).append(",")
.append("Ip").append("=").append(getIp()).append(",")
.append("Downtime").append("=").append(getDowntime()).append(",")
.append("VersionId").append("=").append(getVersionId()).append(",")
.append("Platform").append("=").append(getPlatform()).append(",")
.append("Version").append("=").append(getVersion()).append(",")
.append("IsImportantVersion").append("=").append(getIsImportantVersion()).append(",")
.append("Type").append("=").append(getType()).append(",")
.append("Province").append("=").append(getProvince()).append(",")
.append("VersionUrl").append("=").append(getVersionUrl()).append(",")
.append("VersionInfo").append("=").append(getVersionInfo()).append(",")
.append("FileSize").append("=").append(getFileSize()).append(",")
.append("Md5").append("=").append(getMd5()).append(",")
.append("IsEnforce").append("=").append(getIsEnforce()).append(",")
.append("DownloadUrl").append("=").append(getDownloadUrl()).append(",")
.append("FileName").append("=").append(getFileName()).append(",")
.append("DownTerminal").append("=").append(getDownTerminal()).append(",")
.append("Name").append("=").append(getName()).append(",")
.append("Apppackage").append("=").append(getApppackage())
.append("]").toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.conf;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.DataInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.DataOutputStream;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Random;
import junit.framework.TestCase;
import org.apache.hadoop.fs.Path;
import org.codehaus.jackson.map.ObjectMapper;
public class TestConfiguration extends TestCase {
private Configuration conf;
final static String CONFIG = new File("./test-config.xml").getAbsolutePath();
final static String CONFIG2 = new File("./test-config2.xml").getAbsolutePath();
final static Random RAN = new Random();
@Override
protected void setUp() throws Exception {
super.setUp();
conf = new Configuration();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
new File(CONFIG).delete();
new File(CONFIG2).delete();
}
private void startConfig() throws IOException{
out.write("<?xml version=\"1.0\"?>\n");
out.write("<configuration>\n");
}
private void endConfig() throws IOException{
out.write("</configuration>\n");
out.close();
}
private void addInclude(String filename) throws IOException{
out.write("<xi:include href=\"" + filename + "\" xmlns:xi=\"http://www.w3.org/2001/XInclude\" />\n ");
}
public void testVariableSubstitution() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
declareProperty("my.int", "${intvar}", "42");
declareProperty("intvar", "42", "42");
declareProperty("my.base", "/tmp/${user.name}", UNSPEC);
declareProperty("my.file", "hello", "hello");
declareProperty("my.suffix", ".txt", ".txt");
declareProperty("my.relfile", "${my.file}${my.suffix}", "hello.txt");
declareProperty("my.fullfile", "${my.base}/${my.file}${my.suffix}", UNSPEC);
// check that undefined variables are returned as-is
declareProperty("my.failsexpand", "a${my.undefvar}b", "a${my.undefvar}b");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
for (Prop p : props) {
System.out.println("p=" + p.name);
String gotVal = conf.get(p.name);
String gotRawVal = conf.getRaw(p.name);
assertEq(p.val, gotRawVal);
if (p.expectEval == UNSPEC) {
// expansion is system-dependent (uses System properties)
// can't do exact match so just check that all variables got expanded
assertTrue(gotVal != null && -1 == gotVal.indexOf("${"));
} else {
assertEq(p.expectEval, gotVal);
}
}
// check that expansion also occurs for getInt()
assertTrue(conf.getInt("intvar", -1) == 42);
assertTrue(conf.getInt("my.int", -1) == 42);
}
public void testFinalParam() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
declareProperty("my.var", "", "", true);
endConfig();
Path fileResource = new Path(CONFIG);
Configuration conf1 = new Configuration();
conf1.addResource(fileResource);
assertNull("my var is not null", conf1.get("my.var"));
out=new BufferedWriter(new FileWriter(CONFIG2));
startConfig();
declareProperty("my.var", "myval", "myval", false);
endConfig();
fileResource = new Path(CONFIG2);
Configuration conf2 = new Configuration(conf1);
conf2.addResource(fileResource);
assertNull("my var is not final", conf2.get("my.var"));
}
public static void assertEq(Object a, Object b) {
System.out.println("assertEq: " + a + ", " + b);
assertEquals(a, b);
}
static class Prop {
String name;
String val;
String expectEval;
}
final String UNSPEC = null;
ArrayList<Prop> props = new ArrayList<Prop>();
void declareProperty(String name, String val, String expectEval)
throws IOException {
declareProperty(name, val, expectEval, false);
}
void declareProperty(String name, String val, String expectEval,
boolean isFinal)
throws IOException {
appendProperty(name, val, isFinal);
Prop p = new Prop();
p.name = name;
p.val = val;
p.expectEval = expectEval;
props.add(p);
}
void appendProperty(String name, String val) throws IOException {
appendProperty(name, val, false);
}
void appendProperty(String name, String val, boolean isFinal)
throws IOException {
out.write("<property>");
out.write("<name>");
out.write(name);
out.write("</name>");
out.write("<value>");
out.write(val);
out.write("</value>");
if (isFinal) {
out.write("<final>true</final>");
}
out.write("</property>\n");
}
public void testOverlay() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("a","b");
appendProperty("b","c");
appendProperty("d","e");
appendProperty("e","f", true);
endConfig();
out=new BufferedWriter(new FileWriter(CONFIG2));
startConfig();
appendProperty("a","b");
appendProperty("b","d");
appendProperty("e","e");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
//set dynamically something
conf.set("c","d");
conf.set("a","d");
Configuration clone=new Configuration(conf);
clone.addResource(new Path(CONFIG2));
assertEquals(clone.get("a"), "d");
assertEquals(clone.get("b"), "d");
assertEquals(clone.get("c"), "d");
assertEquals(clone.get("d"), "e");
assertEquals(clone.get("e"), "f");
}
public void testCommentsInValue() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("my.comment", "this <!--comment here--> contains a comment");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
//two spaces one after "this", one before "contains"
assertEquals("this contains a comment", conf.get("my.comment"));
}
public void testTrim() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
String[] whitespaces = {"", " ", "\n", "\t"};
String[] name = new String[100];
for(int i = 0; i < name.length; i++) {
name[i] = "foo" + i;
StringBuilder prefix = new StringBuilder();
StringBuilder postfix = new StringBuilder();
for(int j = 0; j < 3; j++) {
prefix.append(whitespaces[RAN.nextInt(whitespaces.length)]);
postfix.append(whitespaces[RAN.nextInt(whitespaces.length)]);
}
appendProperty(prefix + name[i] + postfix, name[i] + ".value");
}
endConfig();
conf.addResource(new Path(CONFIG));
for(String n : name) {
assertEquals(n + ".value", conf.get(n));
}
}
public void testToString() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
String expectedOutput =
"Configuration: core-default.xml, core-site.xml, " +
fileResource.toString();
assertEquals(expectedOutput, conf.toString());
}
public void testWriteXml() throws IOException {
Configuration conf = new Configuration();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
conf.writeXml(baos);
String result = baos.toString();
assertTrue("Result has proper header", result.startsWith(
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>"));
assertTrue("Result has proper footer", result.endsWith("</configuration>"));
}
public void testIncludes() throws Exception {
tearDown();
System.out.println("XXX testIncludes");
out=new BufferedWriter(new FileWriter(CONFIG2));
startConfig();
appendProperty("a","b");
appendProperty("c","d");
endConfig();
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
addInclude(CONFIG2);
appendProperty("e","f");
appendProperty("g","h");
endConfig();
// verify that the includes file contains all properties
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals(conf.get("a"), "b");
assertEquals(conf.get("c"), "d");
assertEquals(conf.get("e"), "f");
assertEquals(conf.get("g"), "h");
tearDown();
}
BufferedWriter out;
public void testIntegerRanges() {
Configuration conf = new Configuration();
conf.set("first", "-100");
conf.set("second", "4-6,9-10,27");
conf.set("third", "34-");
Configuration.IntegerRanges range = conf.getRange("first", null);
System.out.println("first = " + range);
assertEquals(true, range.isIncluded(0));
assertEquals(true, range.isIncluded(1));
assertEquals(true, range.isIncluded(100));
assertEquals(false, range.isIncluded(101));
range = conf.getRange("second", null);
System.out.println("second = " + range);
assertEquals(false, range.isIncluded(3));
assertEquals(true, range.isIncluded(4));
assertEquals(true, range.isIncluded(6));
assertEquals(false, range.isIncluded(7));
assertEquals(false, range.isIncluded(8));
assertEquals(true, range.isIncluded(9));
assertEquals(true, range.isIncluded(10));
assertEquals(false, range.isIncluded(11));
assertEquals(false, range.isIncluded(26));
assertEquals(true, range.isIncluded(27));
assertEquals(false, range.isIncluded(28));
range = conf.getRange("third", null);
System.out.println("third = " + range);
assertEquals(false, range.isIncluded(33));
assertEquals(true, range.isIncluded(34));
assertEquals(true, range.isIncluded(100000000));
}
public void testHexValues() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.hex1", "0x10");
appendProperty("test.hex2", "0xF");
appendProperty("test.hex3", "-0x10");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals(16, conf.getInt("test.hex1", 0));
assertEquals(16, conf.getLong("test.hex1", 0));
assertEquals(15, conf.getInt("test.hex2", 0));
assertEquals(15, conf.getLong("test.hex2", 0));
assertEquals(-16, conf.getInt("test.hex3", 0));
assertEquals(-16, conf.getLong("test.hex3", 0));
}
public void testIntegerValues() throws IOException{
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.int1", "20");
appendProperty("test.int2", "020");
appendProperty("test.int3", "-20");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
assertEquals(20, conf.getInt("test.int1", 0));
assertEquals(20, conf.getLong("test.int1", 0));
assertEquals(20, conf.getInt("test.int2", 0));
assertEquals(20, conf.getLong("test.int2", 0));
assertEquals(-20, conf.getInt("test.int3", 0));
assertEquals(-20, conf.getLong("test.int3", 0));
}
public void testReload() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.key1", "final-value1", true);
appendProperty("test.key2", "value2");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
out=new BufferedWriter(new FileWriter(CONFIG2));
startConfig();
appendProperty("test.key1", "value1");
appendProperty("test.key3", "value3");
endConfig();
Path fileResource1 = new Path(CONFIG2);
conf.addResource(fileResource1);
// add a few values via set.
conf.set("test.key3", "value4");
conf.set("test.key4", "value5");
assertEquals("final-value1", conf.get("test.key1"));
assertEquals("value2", conf.get("test.key2"));
assertEquals("value4", conf.get("test.key3"));
assertEquals("value5", conf.get("test.key4"));
// change values in the test file...
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.key1", "final-value1");
appendProperty("test.key3", "final-value3", true);
endConfig();
conf.reloadConfiguration();
assertEquals("value1", conf.get("test.key1"));
// overlayed property overrides.
assertEquals("value4", conf.get("test.key3"));
assertEquals(null, conf.get("test.key2"));
assertEquals("value5", conf.get("test.key4"));
}
public void testSize() throws IOException {
Configuration conf = new Configuration(false);
conf.set("a", "A");
conf.set("b", "B");
assertEquals(2, conf.size());
}
public void testClear() throws IOException {
Configuration conf = new Configuration(false);
conf.set("a", "A");
conf.set("b", "B");
conf.clear();
assertEquals(0, conf.size());
assertFalse(conf.iterator().hasNext());
}
public static void main(String[] argv) throws Exception {
junit.textui.TestRunner.main(new String[]{
TestConfiguration.class.getName()
});
}
static class JsonConfiguration {
JsonProperty[] properties;
public JsonProperty[] getProperties() {
return properties;
}
public void setProperties(JsonProperty[] properties) {
this.properties = properties;
}
}
static class JsonProperty {
String key;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public boolean getIsFinal() {
return isFinal;
}
public void setIsFinal(boolean isFinal) {
this.isFinal = isFinal;
}
public String getResource() {
return resource;
}
public void setResource(String resource) {
this.resource = resource;
}
String value;
boolean isFinal;
String resource;
}
public void testDumpConfiguration () throws IOException {
StringWriter outWriter = new StringWriter();
Configuration.dumpConfiguration(conf, outWriter);
String jsonStr = outWriter.toString();
ObjectMapper mapper = new ObjectMapper();
JsonConfiguration jconf =
mapper.readValue(jsonStr, JsonConfiguration.class);
int defaultLength = jconf.getProperties().length;
// add 3 keys to the existing configuration properties
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.key1", "value1");
appendProperty("test.key2", "value2",true);
appendProperty("test.key3", "value3");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
out.close();
outWriter = new StringWriter();
Configuration.dumpConfiguration(conf, outWriter);
jsonStr = outWriter.toString();
mapper = new ObjectMapper();
jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
int length = jconf.getProperties().length;
// check for consistency in the number of properties parsed in Json format.
assertEquals(length, defaultLength+3);
//change few keys in another resource file
out=new BufferedWriter(new FileWriter(CONFIG2));
startConfig();
appendProperty("test.key1", "newValue1");
appendProperty("test.key2", "newValue2");
endConfig();
Path fileResource1 = new Path(CONFIG2);
conf.addResource(fileResource1);
out.close();
outWriter = new StringWriter();
Configuration.dumpConfiguration(conf, outWriter);
jsonStr = outWriter.toString();
mapper = new ObjectMapper();
jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
// put the keys and their corresponding attributes into a hashmap for their
// efficient retrieval
HashMap<String,JsonProperty> confDump = new HashMap<String,JsonProperty>();
for(JsonProperty prop : jconf.getProperties()) {
confDump.put(prop.getKey(), prop);
}
// check if the value and resource of test.key1 is changed
assertEquals("newValue1", confDump.get("test.key1").getValue());
assertEquals(false, confDump.get("test.key1").getIsFinal());
assertEquals(fileResource1.toString(),
confDump.get("test.key1").getResource());
// check if final parameter test.key2 is not changed, since it is first
// loaded as final parameter
assertEquals("value2", confDump.get("test.key2").getValue());
assertEquals(true, confDump.get("test.key2").getIsFinal());
assertEquals(fileResource.toString(),
confDump.get("test.key2").getResource());
// check for other keys which are not modified later
assertEquals("value3", confDump.get("test.key3").getValue());
assertEquals(false, confDump.get("test.key3").getIsFinal());
assertEquals(fileResource.toString(),
confDump.get("test.key3").getResource());
// check for resource to be "Unknown" for keys which are loaded using 'set'
// and expansion of properties
conf.set("test.key4", "value4");
conf.set("test.key5", "value5");
conf.set("test.key6", "${test.key5}");
outWriter = new StringWriter();
Configuration.dumpConfiguration(conf, outWriter);
jsonStr = outWriter.toString();
mapper = new ObjectMapper();
jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
confDump = new HashMap<String, JsonProperty>();
for(JsonProperty prop : jconf.getProperties()) {
confDump.put(prop.getKey(), prop);
}
assertEquals("value5",confDump.get("test.key6").getValue());
assertEquals(Configuration.SOURCE_CODE, confDump.get("test.key4").getResource());
outWriter.close();
}
public void testDumpConfiguratioWithoutDefaults() throws IOException {
// check for case when default resources are not loaded
Configuration config = new Configuration(false);
StringWriter outWriter = new StringWriter();
Configuration.dumpConfiguration(config, outWriter);
String jsonStr = outWriter.toString();
ObjectMapper mapper = new ObjectMapper();
JsonConfiguration jconf =
mapper.readValue(jsonStr, JsonConfiguration.class);
//ensure that no properties are loaded.
assertEquals(0, jconf.getProperties().length);
// add 2 keys
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendProperty("test.key1", "value1");
appendProperty("test.key2", "value2",true);
endConfig();
Path fileResource = new Path(CONFIG);
config.addResource(fileResource);
out.close();
outWriter = new StringWriter();
Configuration.dumpConfiguration(config, outWriter);
jsonStr = outWriter.toString();
mapper = new ObjectMapper();
jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
HashMap<String, JsonProperty>confDump = new HashMap<String, JsonProperty>();
for (JsonProperty prop : jconf.getProperties()) {
confDump.put(prop.getKey(), prop);
}
//ensure only 2 keys are loaded
assertEquals(2,jconf.getProperties().length);
//ensure the values are consistent
assertEquals(confDump.get("test.key1").getValue(),"value1");
assertEquals(confDump.get("test.key2").getValue(),"value2");
//check the final tag
assertEquals(false, confDump.get("test.key1").getIsFinal());
assertEquals(true, confDump.get("test.key2").getIsFinal());
//check the resource for each property
for (JsonProperty prop : jconf.getProperties()) {
assertEquals(fileResource.toString(),prop.getResource());
}
}
public void testConcurrentDefaultResourceChange() throws Exception {
try {
final Exception[] ex = new Exception[1];
// Load a lot of default resources.
Thread tload = new Thread() {
@Override
public void run() {
for (int i = 0; i < 500; i++) {
Configuration.addDefaultResource("a" + i);
}
}
};
// Create configurations at the same time.
Thread tcreate = new Thread() {
@Override
public void run() {
try {
for (int i = 0; i < 500; i++) {
Configuration conf = new Configuration();
conf.get("test");
}
} catch (Exception e) {
ex[0] = e;
}
}
};
// Starts the threads together
tload.start();
tcreate.start();
tload.join();
tcreate.join();
// Check exception
assertNull("Should not have thrown exception " + ex[0], ex[0]);
} finally {
for (int i = 0; i < 500; i++) {
Configuration.removeDefaultResource("a" + i);
}
}
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kuujo.copycat.raft.protocol;
import net.kuujo.copycat.util.BuilderPool;
import net.kuujo.copycat.io.BufferInput;
import net.kuujo.copycat.io.BufferOutput;
import net.kuujo.copycat.io.serializer.SerializeWith;
import net.kuujo.copycat.io.serializer.Serializer;
import net.kuujo.copycat.io.storage.Entry;
import net.kuujo.copycat.util.ReferenceManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* Protocol append request.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
@SerializeWith(id=256)
public class AppendRequest extends AbstractRequest<AppendRequest> {
private static final BuilderPool<Builder, AppendRequest> POOL = new BuilderPool<>(Builder::new);
/**
* Returns a new append request builder.
*
* @return A new append request builder.
*/
public static Builder builder() {
return POOL.acquire();
}
/**
* Returns an append request builder for an existing request.
*
* @param request The request to build.
* @return The append request builder.
*/
public static Builder builder(AppendRequest request) {
return POOL.acquire(request);
}
private long term;
private int leader;
private long logIndex;
private long logTerm;
private List<Entry> entries = new ArrayList<>(128);
private long commitIndex;
private long globalIndex;
private AppendRequest(ReferenceManager<AppendRequest> referenceManager) {
super(referenceManager);
}
@Override
public Type type() {
return Type.APPEND;
}
/**
* Returns the requesting node's current term.
*
* @return The requesting node's current term.
*/
public long term() {
return term;
}
/**
* Returns the requesting leader address.
*
* @return The leader's address.
*/
public int leader() {
return leader;
}
/**
* Returns the index of the log entry preceding the new entry.
*
* @return The index of the log entry preceding the new entry.
*/
public long logIndex() {
return logIndex;
}
/**
* Returns the term of the log entry preceding the new entry.
*
* @return The index of the term preceding the new entry.
*/
public long logTerm() {
return logTerm;
}
/**
* Returns the log entries to append.
*
* @return A list of log entries.
*/
public List<? extends Entry> entries() {
return entries;
}
/**
* Returns the leader's commit index.
*
* @return The leader commit index.
*/
public long commitIndex() {
return commitIndex;
}
/**
* Returns the leader's global index.
*
* @return The leader global index.
*/
public long globalIndex() {
return globalIndex;
}
@Override
public void writeObject(BufferOutput buffer, Serializer serializer) {
buffer.writeLong(term)
.writeInt(leader)
.writeLong(logIndex)
.writeLong(logTerm)
.writeLong(commitIndex)
.writeLong(globalIndex);
buffer.writeInt(entries.size());
for (Entry entry : entries) {
buffer.writeLong(entry.getIndex());
serializer.writeObject(entry, buffer);
}
}
@Override
public void readObject(BufferInput buffer, Serializer serializer) {
term = buffer.readLong();
leader = buffer.readInt();
logIndex = buffer.readLong();
logTerm = buffer.readLong();
commitIndex = buffer.readLong();
globalIndex = buffer.readLong();
int numEntries = buffer.readInt();
entries.clear();
for (int i = 0; i < numEntries; i++) {
long index = buffer.readLong();
Entry entry = serializer.readObject(buffer);
entry.setIndex(index);
entries.add(entry);
}
}
@Override
public void close() {
entries.forEach(Entry::release);
super.close();
}
@Override
public int hashCode() {
return Objects.hash(getClass(), term, leader, logIndex, logTerm, entries, commitIndex, globalIndex);
}
@Override
public boolean equals(Object object) {
if (object instanceof AppendRequest) {
AppendRequest request = (AppendRequest) object;
return request.term == term
&& request.leader == leader
&& request.logIndex == logIndex
&& request.logTerm == logTerm
&& request.entries.equals(entries)
&& request.commitIndex == commitIndex
&& request.globalIndex == globalIndex;
}
return false;
}
@Override
public String toString() {
return String.format("%s[term=%d, leader=%s, logIndex=%d, logTerm=%d, entries=[%d], commitIndex=%d, globalIndex=%d]", getClass().getSimpleName(), term, leader, logIndex, logTerm, entries.size(), commitIndex, globalIndex);
}
/**
* Append request builder.
*/
public static class Builder extends AbstractRequest.Builder<Builder, AppendRequest> {
protected Builder(BuilderPool<Builder, AppendRequest> pool) {
super(pool, AppendRequest::new);
}
@Override
public void reset() {
super.reset();
request.leader = 0;
request.term = 0;
request.logIndex = 0;
request.logTerm = 0;
request.entries.clear();
request.commitIndex = 0;
request.globalIndex = 0;
}
/**
* Sets the request term.
*
* @param term The request term.
* @return The append request builder.
*/
public Builder withTerm(long term) {
if (term <= 0)
throw new IllegalArgumentException("term must be positive");
request.term = term;
return this;
}
/**
* Sets the request leader.
*
* @param leader The request leader.
* @return The append request builder.
*/
public Builder withLeader(int leader) {
request.leader = leader;
return this;
}
/**
* Sets the request last log index.
*
* @param index The request last log index.
* @return The append request builder.
*/
public Builder withLogIndex(long index) {
if (index < 0)
throw new IllegalArgumentException("log index must be positive");
request.logIndex = index;
return this;
}
/**
* Sets the request last log term.
*
* @param term The request last log term.
* @return The append request builder.
*/
public Builder withLogTerm(long term) {
if (term < 0)
throw new IllegalArgumentException("log term must be positive");
request.logTerm = term;
return this;
}
/**
* Sets the request entries.
*
* @param entries The request entries.
* @return The append request builder.
*/
public Builder withEntries(Entry... entries) {
return withEntries(Arrays.asList(entries));
}
/**
* Sets the request entries.
*
* @param entries The request entries.
* @return The append request builder.
*/
@SuppressWarnings("unchecked")
public Builder withEntries(List<? extends Entry> entries) {
if (entries == null)
throw new NullPointerException("entries cannot be null");
request.entries = (List<Entry>) entries;
return this;
}
/**
* Sets the request commit index.
*
* @param index The request commit index.
* @return The append request builder.
*/
public Builder withCommitIndex(long index) {
if (index < 0)
throw new IllegalArgumentException("commit index must be positive");
request.commitIndex = index;
return this;
}
/**
* Sets the request global index.
*
* @param index The global recycle index.
* @return The append request builder.
*/
public Builder withGlobalIndex(long index) {
if (index < 0)
throw new IllegalArgumentException("global index must be positive");
request.globalIndex = index;
return this;
}
@Override
public AppendRequest build() {
super.build();
if (request.term <= 0)
throw new IllegalArgumentException("term must be positive");
if (request.logIndex < 0)
throw new IllegalArgumentException("log index must be positive");
if (request.logTerm < 0)
throw new IllegalArgumentException("log term must be positive");
if (request.entries == null)
throw new NullPointerException("entries cannot be null");
if (request.commitIndex < 0)
throw new IllegalArgumentException("commit index must be positive");
if (request.globalIndex < 0)
throw new IllegalArgumentException("global index must be positive");
return request;
}
@Override
public int hashCode() {
return Objects.hash(request);
}
@Override
public boolean equals(Object object) {
return object instanceof Builder && ((Builder) object).request.equals(request);
}
@Override
public String toString() {
return String.format("%s[request=%s]", getClass().getCanonicalName(), request);
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import static com.facebook.buck.jvm.core.JavaLibrary.MAVEN_JAR;
import static com.facebook.buck.jvm.core.JavaLibrary.SRC_JAR;
import static com.facebook.buck.jvm.java.Javadoc.DOC_JAR;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.jvm.java.MavenPublishable;
import com.facebook.buck.maven.Publisher;
import com.facebook.buck.parser.BuildTargetSpec;
import com.facebook.buck.parser.TargetNodeSpec;
import com.facebook.buck.util.CommandLineException;
import com.facebook.buck.util.ExitCode;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.deployment.DeployResult;
import org.eclipse.aether.deployment.DeploymentException;
import org.kohsuke.args4j.Option;
public class PublishCommand extends BuildCommand {
public static final String REMOTE_REPO_LONG_ARG = "--remote-repo";
public static final String REMOTE_REPO_SHORT_ARG = "-r";
public static final String INCLUDE_SOURCE_LONG_ARG = "--include-source";
public static final String INCLUDE_SOURCE_SHORT_ARG = "-s";
public static final String INCLUDE_DOCS_LONG_ARG = "--include-docs";
public static final String INCLUDE_DOCS_SHORT_ARG = "-w";
public static final String TO_MAVEN_CENTRAL_LONG_ARG = "--to-maven-central";
public static final String DRY_RUN_LONG_ARG = "--dry-run";
private static final String PUBLISH_GEN_PATH = "publish";
@Option(
name = REMOTE_REPO_LONG_ARG,
aliases = REMOTE_REPO_SHORT_ARG,
usage = "A url of the remote repository to publish artifact(s) to")
@Nullable
private URL remoteRepo = null;
@Option(
name = TO_MAVEN_CENTRAL_LONG_ARG,
usage = "Same as \"" + REMOTE_REPO_LONG_ARG + " " + Publisher.MAVEN_CENTRAL_URL + "\"")
private boolean toMavenCentral = false;
@Option(
name = INCLUDE_SOURCE_LONG_ARG,
aliases = INCLUDE_SOURCE_SHORT_ARG,
usage = "Publish source code as well")
private boolean includeSource = false;
@Option(
name = INCLUDE_DOCS_LONG_ARG,
aliases = INCLUDE_DOCS_SHORT_ARG,
usage = "Publish docs as well")
private boolean includeDocs = false;
@Option(name = DRY_RUN_LONG_ARG, usage = "Just print the artifacts to be published")
private boolean dryRun = false;
@Option(
name = "--username",
aliases = "-u",
usage = "User name to use to authenticate with the server")
@Nullable
private String username = null;
@Option(
name = "--password",
aliases = "-p",
usage = "Password to use to authenticate with the server")
@Nullable
private String password = null;
@Override
public ExitCode runWithoutHelp(CommandRunnerParams params)
throws IOException, InterruptedException {
// Input validation
if (remoteRepo != null && toMavenCentral) {
throw new CommandLineException(
"please specify only a single remote repository to publish to.\n"
+ "Use "
+ REMOTE_REPO_LONG_ARG
+ " <URL> or "
+ TO_MAVEN_CENTRAL_LONG_ARG
+ " but not both.");
}
if (remoteRepo == null && !toMavenCentral) {
throw new CommandLineException(
"please specify a remote repository to publish to.\n"
+ "Use "
+ REMOTE_REPO_LONG_ARG
+ " <URL> or "
+ TO_MAVEN_CENTRAL_LONG_ARG);
}
// Build the specified target(s).
assertArguments(params);
BuildRunResult buildRunResult;
try (CommandThreadManager pool =
new CommandThreadManager("Publish", getConcurrencyLimit(params.getBuckConfig()))) {
buildRunResult = super.run(params, pool, ImmutableSet.of());
}
ExitCode exitCode = buildRunResult.getExitCode();
if (exitCode != ExitCode.SUCCESS) {
return exitCode;
}
// Publish starting with the given targets.
return publishTargets(buildRunResult.getBuildTargets(), params)
? ExitCode.SUCCESS
: ExitCode.RUN_ERROR;
}
private boolean publishTargets(
ImmutableSet<BuildTarget> buildTargets, CommandRunnerParams params) {
ImmutableSet.Builder<MavenPublishable> publishables = ImmutableSet.builder();
boolean success = true;
for (BuildTarget buildTarget : buildTargets) {
BuildRule buildRule = getBuild().getGraphBuilder().requireRule(buildTarget);
Preconditions.checkNotNull(buildRule);
if (!(buildRule instanceof MavenPublishable)) {
params
.getBuckEventBus()
.post(
ConsoleEvent.severe(
"Cannot publish rule of type %s", buildRule.getClass().getName()));
success = false;
continue;
}
MavenPublishable publishable = (MavenPublishable) buildRule;
if (!publishable.getMavenCoords().isPresent()) {
params
.getBuckEventBus()
.post(
ConsoleEvent.severe(
"No maven coordinates specified for %s",
buildTarget.getUnflavoredBuildTarget().getFullyQualifiedName()));
success = false;
continue;
}
publishables.add(publishable);
}
// Assume validation passed.
URL repoUrl = toMavenCentral ? Publisher.MAVEN_CENTRAL : Preconditions.checkNotNull(remoteRepo);
Publisher publisher =
new Publisher(
params.getCell().getFilesystem().getBuckPaths().getTmpDir().resolve(PUBLISH_GEN_PATH),
repoUrl,
Optional.ofNullable(username),
Optional.ofNullable(password),
dryRun);
try {
ImmutableSet<DeployResult> deployResults =
publisher.publish(
DefaultSourcePathResolver.from(
new SourcePathRuleFinder(getBuild().getGraphBuilder())),
publishables.build());
for (DeployResult deployResult : deployResults) {
printArtifactsInformation(params, deployResult);
}
} catch (DeploymentException e) {
params.getConsole().printBuildFailureWithoutStacktraceDontUnwrap(e);
return false;
}
return success;
}
private static void printArtifactsInformation(
CommandRunnerParams params, DeployResult deployResult) {
params
.getConsole()
.getStdOut()
.println(
"\nPublished artifacts:\n"
+ Joiner.on('\n')
.join(
FluentIterable.from(deployResult.getArtifacts())
.transform(PublishCommand::artifactToString)));
params.getConsole().getStdOut().println("\nDeployRequest:\n" + deployResult.getRequest());
}
private static String artifactToString(Artifact artifact) {
return artifact + " < " + artifact.getFile();
}
@Override
public ImmutableList<TargetNodeSpec> parseArgumentsAsTargetNodeSpecs(
CellPathResolver cellPathResolver, BuckConfig config, Iterable<String> targetsAsArgs) {
ImmutableList<TargetNodeSpec> specs =
super.parseArgumentsAsTargetNodeSpecs(cellPathResolver, config, targetsAsArgs);
Map<BuildTarget, TargetNodeSpec> uniqueSpecs = new HashMap<>();
for (TargetNodeSpec spec : specs) {
if (!(spec instanceof BuildTargetSpec)) {
throw new IllegalArgumentException(
"Need to specify build targets explicitly when publishing. " + "Cannot modify " + spec);
}
BuildTargetSpec targetSpec = (BuildTargetSpec) spec;
Preconditions.checkNotNull(targetSpec.getBuildTarget());
BuildTarget mavenTarget = targetSpec.getBuildTarget().withFlavors(MAVEN_JAR);
uniqueSpecs.put(mavenTarget, targetSpec.withBuildTarget(mavenTarget));
if (includeSource) {
BuildTarget sourceTarget = targetSpec.getBuildTarget().withFlavors(MAVEN_JAR, SRC_JAR);
uniqueSpecs.put(sourceTarget, targetSpec.withBuildTarget(sourceTarget));
}
if (includeDocs) {
BuildTarget docsTarget = targetSpec.getBuildTarget().withFlavors(MAVEN_JAR, DOC_JAR);
uniqueSpecs.put(docsTarget, targetSpec.withBuildTarget(docsTarget));
}
}
return ImmutableList.copyOf(uniqueSpecs.values());
}
@Override
public String getShortDescription() {
return "builds and publishes a library to a central repository";
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.vfs;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests {@link UnixGlob}
*/
@RunWith(JUnit4.class)
public class GlobTest {
private Path tmpPath;
private FileSystem fs;
private Path throwOnReaddir = null;
@Before
public final void initializeFileSystem() throws Exception {
fs = new InMemoryFileSystem() {
@Override
public Collection<Dirent> readdir(Path path, boolean followSymlinks) throws IOException {
if (path.equals(throwOnReaddir)) {
throw new FileNotFoundException(path.getPathString());
}
return super.readdir(path, followSymlinks);
}
};
tmpPath = fs.getPath("/globtmp");
for (String dir : ImmutableList.of("foo/bar/wiz",
"foo/barnacle/wiz",
"food/barnacle/wiz",
"fool/barnacle/wiz")) {
FileSystemUtils.createDirectoryAndParents(tmpPath.getRelative(dir));
}
FileSystemUtils.createEmptyFile(tmpPath.getRelative("foo/bar/wiz/file"));
}
@Test
public void testQuestionMarkMatch() throws Exception {
assertGlobMatches("foo?", /* => */"food", "fool");
}
@Test
public void testQuestionMarkNoMatch() throws Exception {
assertGlobMatches("food/bar?" /* => nothing */);
}
@Test
public void testStartsWithStar() throws Exception {
assertGlobMatches("*oo", /* => */"foo");
}
@Test
public void testStartsWithStarWithMiddleStar() throws Exception {
assertGlobMatches("*f*o", /* => */"foo");
}
@Test
public void testEndsWithStar() throws Exception {
assertGlobMatches("foo*", /* => */"foo", "food", "fool");
}
@Test
public void testEndsWithStarWithMiddleStar() throws Exception {
assertGlobMatches("f*oo*", /* => */"foo", "food", "fool");
}
@Test
public void testMiddleStar() throws Exception {
assertGlobMatches("f*o", /* => */"foo");
}
@Test
public void testTwoMiddleStars() throws Exception {
assertGlobMatches("f*o*o", /* => */"foo");
}
@Test
public void testSingleStarPatternWithNamedChild() throws Exception {
assertGlobMatches("*/bar", /* => */"foo/bar");
}
@Test
public void testSingleStarPatternWithChildGlob() throws Exception {
assertGlobMatches("*/bar*", /* => */
"foo/bar", "foo/barnacle", "food/barnacle", "fool/barnacle");
}
@Test
public void testSingleStarAsChildGlob() throws Exception {
assertGlobMatches("foo/*/wiz", /* => */"foo/bar/wiz", "foo/barnacle/wiz");
}
@Test
public void testNoAsteriskAndFilesDontExist() throws Exception {
// Note un-UNIX like semantics:
assertGlobMatches("ceci/n'est/pas/une/globbe" /* => nothing */);
}
@Test
public void testSingleAsteriskUnderNonexistentDirectory() throws Exception {
// Note un-UNIX like semantics:
assertGlobMatches("not-there/*" /* => nothing */);
}
@Test
public void testGlobWithNonExistentBase() throws Exception {
Collection<Path> globResult = UnixGlob.forPath(fs.getPath("/does/not/exist"))
.addPattern("*.txt")
.globInterruptible();
assertThat(globResult).isEmpty();
}
@Test
public void testGlobUnderFile() throws Exception {
assertGlobMatches("foo/bar/wiz/file/*" /* => nothing */);
}
private void assertGlobMatches(String pattern, String... expecteds)
throws Exception {
assertGlobMatches(Collections.singleton(pattern), expecteds);
}
private void assertGlobMatches(Collection<String> pattern,
String... expecteds)
throws Exception {
assertThat(
new UnixGlob.Builder(tmpPath)
.addPatterns(pattern)
.globInterruptible())
.containsExactlyElementsIn(resolvePaths(expecteds));
}
private Set<Path> resolvePaths(String... relativePaths) {
Set<Path> expectedFiles = new HashSet<>();
for (String expected : relativePaths) {
Path file = expected.equals(".")
? tmpPath
: tmpPath.getRelative(expected);
expectedFiles.add(file);
}
return expectedFiles;
}
@Test
public void testIOFailureOnStat() throws Exception {
UnixGlob.FilesystemCalls syscalls = new UnixGlob.FilesystemCalls() {
@Override
public FileStatus statIfFound(Path path, Symlinks symlinks) throws IOException {
throw new IOException("EIO");
}
@Override
public Collection<Dirent> readdir(Path path, Symlinks symlinks) {
throw new IllegalStateException();
}
};
try {
new UnixGlob.Builder(tmpPath)
.addPattern("foo/bar/wiz/file")
.setFilesystemCalls(new AtomicReference<>(syscalls))
.glob();
fail("Expected failure");
} catch (IOException e) {
assertThat(e).hasMessageThat().isEqualTo("EIO");
}
}
@Test
public void testGlobWithoutWildcardsDoesNotCallReaddir() throws Exception {
UnixGlob.FilesystemCalls syscalls = new UnixGlob.FilesystemCalls() {
@Override
public FileStatus statIfFound(Path path, Symlinks symlinks) throws IOException {
return UnixGlob.DEFAULT_SYSCALLS.statIfFound(path, symlinks);
}
@Override
public Collection<Dirent> readdir(Path path, Symlinks symlinks) {
throw new IllegalStateException();
}
};
assertThat(
new UnixGlob.Builder(tmpPath)
.addPattern("foo/bar/wiz/file")
.setFilesystemCalls(new AtomicReference<>(syscalls))
.glob())
.containsExactly(tmpPath.getRelative("foo/bar/wiz/file"));
}
@Test
public void testIllegalPatterns() throws Exception {
assertIllegalPattern("foo**bar");
assertIllegalPattern("");
assertIllegalPattern(".");
assertIllegalPattern("/foo");
assertIllegalPattern("./foo");
assertIllegalPattern("foo/");
assertIllegalPattern("foo/./bar");
assertIllegalPattern("../foo/bar");
assertIllegalPattern("foo//bar");
}
/**
* Tests that globs can contain Java regular expression special characters
*/
@Test
public void testSpecialRegexCharacter() throws Exception {
Path tmpPath2 = fs.getPath("/globtmp2");
FileSystemUtils.createDirectoryAndParents(tmpPath2);
Path aDotB = tmpPath2.getChild("a.b");
FileSystemUtils.createEmptyFile(aDotB);
FileSystemUtils.createEmptyFile(tmpPath2.getChild("aab"));
// Note: this contains two asterisks because otherwise a RE is not built,
// as an optimization.
assertThat(UnixGlob.forPath(tmpPath2).addPattern("*a.b*").globInterruptible()).containsExactly(
aDotB);
}
@Test
public void testMatchesCallWithNoCache() {
assertThat(UnixGlob.matches("*a*b", "CaCb", null)).isTrue();
}
@Test
public void testMultiplePatterns() throws Exception {
assertGlobMatches(Lists.newArrayList("foo", "fool"), "foo", "fool");
}
@Test
public void testMatcherMethodRecursiveBelowDir() throws Exception {
FileSystemUtils.createEmptyFile(tmpPath.getRelative("foo/file"));
String pattern = "foo/**/*";
assertThat(UnixGlob.matches(pattern, "foo/bar")).isTrue();
assertThat(UnixGlob.matches(pattern, "foo/bar/baz")).isTrue();
assertThat(UnixGlob.matches(pattern, "foo")).isFalse();
assertThat(UnixGlob.matches(pattern, "foob")).isFalse();
assertThat(UnixGlob.matches("**/foo", "foo")).isTrue();
}
@Test
public void testMultiplePatternsWithOverlap() throws Exception {
assertGlobMatchesAnyOrder(Lists.newArrayList("food", "foo?"),
"food", "fool");
assertGlobMatchesAnyOrder(Lists.newArrayList("food", "?ood", "f??d"),
"food");
assertThat(resolvePaths("food", "fool", "foo")).containsExactlyElementsIn(
new UnixGlob.Builder(tmpPath).addPatterns("food", "xxx", "*").glob());
}
private void assertGlobMatchesAnyOrder(ArrayList<String> patterns,
String... paths) throws Exception {
assertThat(resolvePaths(paths)).containsExactlyElementsIn(
new UnixGlob.Builder(tmpPath).addPatterns(patterns).globInterruptible());
}
private void assertIllegalPattern(String pattern) throws Exception {
try {
new UnixGlob.Builder(tmpPath)
.addPattern(pattern)
.globInterruptible();
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().containsMatch("in glob pattern");
}
}
@Test
public void testHiddenFiles() throws Exception {
for (String dir : ImmutableList.of(".hidden", "..also.hidden", "not.hidden")) {
FileSystemUtils.createDirectoryAndParents(tmpPath.getRelative(dir));
}
// Note that these are not in the result: ".", ".."
assertGlobMatches("*", "not.hidden", "foo", "fool", "food", ".hidden", "..also.hidden");
assertGlobMatches("*.hidden", "not.hidden");
}
@Test
public void testIOException() throws Exception {
throwOnReaddir = fs.getPath("/throw_on_readdir");
throwOnReaddir.createDirectory();
try {
new UnixGlob.Builder(throwOnReaddir).addPattern("**").glob();
fail();
} catch (IOException e) {
// Expected.
}
}
@Test
public void testCheckCanBeInterrupted() throws Exception {
final Thread mainThread = Thread.currentThread();
final ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(10);
Predicate<Path> interrupterPredicate =
new Predicate<Path>() {
@Override
public boolean apply(Path input) {
mainThread.interrupt();
return true;
}
};
Future<?> globResult = null;
try {
globResult =
new UnixGlob.Builder(tmpPath)
.addPattern("**")
.setDirectoryFilter(interrupterPredicate)
.setThreadPool(executor)
.globAsync(true);
globResult.get();
fail(); // Should have received InterruptedException
} catch (InterruptedException e) {
// good
}
globResult.cancel(true);
try {
Uninterruptibles.getUninterruptibly(globResult);
fail();
} catch (CancellationException e) {
// Expected.
}
Thread.interrupted();
assertThat(executor.isShutdown()).isFalse();
executor.shutdown();
assertThat(executor.awaitTermination(TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS))
.isTrue();
}
@Test
public void testCheckCannotBeInterrupted() throws Exception {
final Thread mainThread = Thread.currentThread();
final ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(10);
final AtomicBoolean sentInterrupt = new AtomicBoolean(false);
Predicate<Path> interrupterPredicate = new Predicate<Path>() {
@Override
public boolean apply(Path input) {
if (!sentInterrupt.getAndSet(true)) {
mainThread.interrupt();
}
return true;
}
};
List<Path> result = new UnixGlob.Builder(tmpPath)
.addPatterns("**", "*")
.setDirectoryFilter(interrupterPredicate).setThreadPool(executor).glob();
// In the non-interruptible case, the interrupt bit should be set, but the
// glob should return the correct set of full results.
assertThat(Thread.interrupted()).isTrue();
assertThat(result)
.containsExactlyElementsIn(
resolvePaths(
".",
"foo",
"foo/bar",
"foo/bar/wiz",
"foo/bar/wiz/file",
"foo/barnacle",
"foo/barnacle/wiz",
"food",
"food/barnacle",
"food/barnacle/wiz",
"fool",
"fool/barnacle",
"fool/barnacle/wiz"));
assertThat(executor.isShutdown()).isFalse();
executor.shutdown();
assertThat(executor.awaitTermination(TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS))
.isTrue();
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.android.sdklib.build.ApkBuilder;
import com.android.sdklib.build.ApkCreationException;
import com.android.sdklib.build.DuplicateFileException;
import com.android.sdklib.build.SealedApkException;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Path;
import java.security.Key;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.UnrecoverableKeyException;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Map;
/**
* Merges resources into a final APK. This code is based off of the now deprecated apkbuilder tool:
* https://android.googlesource.com/platform/sdk/+/fd30096196e3747986bdf8a95cc7713dd6e0b239%5E/sdkmanager/libs/sdklib/src/main/java/com/android/sdklib/build/ApkBuilderMain.java
*/
public class ApkBuilderStep implements Step {
/**
* The type of a keystore created via the {@code jarsigner} command in Sun/Oracle Java. See
* http://docs.oracle.com/javase/7/docs/technotes/guides/security/StandardNames.html#KeyStore.
*/
private static final String JARSIGNER_KEY_STORE_TYPE = "jks";
private final ProjectFilesystem filesystem;
private final Path resourceApk;
private final Path dexFile;
private final Path pathToOutputApkFile;
private final ImmutableSet<Path> assetDirectories;
private final ImmutableSet<Path> nativeLibraryDirectories;
private final ImmutableSet<Path> zipFiles;
private final ImmutableSet<Path> jarFilesThatMayContainResources;
private final Path pathToKeystore;
private final Supplier<KeystoreProperties> keystorePropertiesSupplier;
private final boolean debugMode;
private final ImmutableList<String> javaRuntimeLauncher;
/**
* @param resourceApk Path to the Apk which only contains resources, no dex files.
* @param pathToOutputApkFile Path to output our APK to.
* @param dexFile Path to the classes.dex file.
* @param assetDirectories List of paths to assets to be included in the apk.
* @param nativeLibraryDirectories List of paths to native directories.
* @param zipFiles List of paths to zipfiles to be included into the apk.
* @param debugMode Whether or not to run ApkBuilder with debug mode turned on.
*/
public ApkBuilderStep(
ProjectFilesystem filesystem,
Path resourceApk,
Path pathToOutputApkFile,
Path dexFile,
ImmutableSet<Path> assetDirectories,
ImmutableSet<Path> nativeLibraryDirectories,
ImmutableSet<Path> zipFiles,
ImmutableSet<Path> jarFilesThatMayContainResources,
Path pathToKeystore,
Supplier<KeystoreProperties> keystorePropertiesSupplier,
boolean debugMode,
ImmutableList<String> javaRuntimeLauncher) {
this.filesystem = filesystem;
this.resourceApk = resourceApk;
this.pathToOutputApkFile = pathToOutputApkFile;
this.dexFile = dexFile;
this.assetDirectories = assetDirectories;
this.nativeLibraryDirectories = nativeLibraryDirectories;
this.jarFilesThatMayContainResources = jarFilesThatMayContainResources;
this.zipFiles = zipFiles;
this.pathToKeystore = pathToKeystore;
this.keystorePropertiesSupplier = keystorePropertiesSupplier;
this.debugMode = debugMode;
this.javaRuntimeLauncher = javaRuntimeLauncher;
}
@Override
public StepExecutionResult execute(ExecutionContext context)
throws IOException, InterruptedException {
PrintStream output = null;
if (context.getVerbosity().shouldUseVerbosityFlagIfAvailable()) {
output = context.getStdOut();
}
try {
PrivateKeyAndCertificate privateKeyAndCertificate = createKeystoreProperties();
ApkBuilder builder =
new ApkBuilder(
filesystem.getPathForRelativePath(pathToOutputApkFile).toFile(),
filesystem.getPathForRelativePath(resourceApk).toFile(),
filesystem.getPathForRelativePath(dexFile).toFile(),
privateKeyAndCertificate.privateKey,
privateKeyAndCertificate.certificate,
output);
builder.setDebugMode(debugMode);
for (Path nativeLibraryDirectory : nativeLibraryDirectories) {
builder.addNativeLibraries(
filesystem.getPathForRelativePath(nativeLibraryDirectory).toFile());
}
for (Path assetDirectory : assetDirectories) {
builder.addSourceFolder(filesystem.getPathForRelativePath(assetDirectory).toFile());
}
for (Path zipFile : zipFiles) {
// TODO(natthu): Skipping silently is bad. These should really be assertions.
if (filesystem.exists(zipFile) && filesystem.isFile(zipFile)) {
builder.addZipFile(filesystem.getPathForRelativePath(zipFile).toFile());
}
}
for (Path jarFileThatMayContainResources : jarFilesThatMayContainResources) {
Path jarFile = filesystem.getPathForRelativePath(jarFileThatMayContainResources);
builder.addResourcesFromJar(jarFile.toFile());
}
// Build the APK
builder.sealApk();
} catch (ApkCreationException
| KeyStoreException
| NoSuchAlgorithmException
| SealedApkException
| UnrecoverableKeyException e) {
context.logError(e, "Error when creating APK at: %s.", pathToOutputApkFile);
return StepExecutionResult.ERROR;
} catch (DuplicateFileException e) {
throw new HumanReadableException(
String.format(
"Found duplicate file for APK: %1$s\nOrigin 1: %2$s\nOrigin 2: %3$s",
e.getArchivePath(), e.getFile1(), e.getFile2()));
}
return StepExecutionResult.SUCCESS;
}
private PrivateKeyAndCertificate createKeystoreProperties()
throws IOException, KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException {
KeyStore keystore = KeyStore.getInstance(JARSIGNER_KEY_STORE_TYPE);
KeystoreProperties keystoreProperties = keystorePropertiesSupplier.get();
char[] keystorePassword = keystoreProperties.getStorepass().toCharArray();
try {
keystore.load(filesystem.getInputStreamForRelativePath(pathToKeystore), keystorePassword);
} catch (NoSuchAlgorithmException | CertificateException e) {
throw new HumanReadableException(e, "%s is an invalid keystore.", pathToKeystore);
}
String alias = keystoreProperties.getAlias();
char[] keyPassword = keystoreProperties.getKeypass().toCharArray();
Key key = keystore.getKey(alias, keyPassword);
// key can be null if alias/password is incorrect.
if (key == null) {
throw new HumanReadableException(
"The keystore [%s] key.alias [%s] does not exist or does not identify a key-related "
+ "entry",
pathToKeystore, alias);
}
Certificate certificate = keystore.getCertificate(alias);
return new PrivateKeyAndCertificate((PrivateKey) key, (X509Certificate) certificate);
}
@Override
public String getShortName() {
return "apk_builder";
}
@Override
public String getDescription(ExecutionContext context) {
ImmutableList.Builder<String> args = ImmutableList.builder();
args.addAll(javaRuntimeLauncher);
args.add(
"-classpath",
// TODO(mbolin): Make the directory that corresponds to $ANDROID_HOME a field that is
// accessible via an AndroidPlatformTarget and insert that here in place of "$ANDROID_HOME".
"$ANDROID_HOME/tools/lib/sdklib.jar",
"com.android.sdklib.build.ApkBuilderMain");
args.add(String.valueOf(pathToOutputApkFile));
args.add("-v" /* verbose */);
if (debugMode) {
args.add("-d");
}
// Unfortunately, ApkBuilderMain does not have CLI args to set the keystore,
// so these member variables are left out of the command:
// pathToKeystore, pathToKeystorePropertiesFile
Multimap<String, Collection<Path>> groups =
ImmutableMultimap.<String, Collection<Path>>builder()
.put("-z", ImmutableList.of(resourceApk))
.put("-f", ImmutableList.of(dexFile))
.put("-rf", assetDirectories)
.put("-nf", nativeLibraryDirectories)
.put("-z", zipFiles)
.put("-rj", jarFilesThatMayContainResources)
.build();
for (Map.Entry<String, Collection<Path>> group : groups.entries()) {
String prefix = group.getKey();
for (Path path : group.getValue()) {
args.add(prefix, String.valueOf(path));
}
}
return Joiner.on(' ').join(args.build());
}
private static class PrivateKeyAndCertificate {
private final PrivateKey privateKey;
private final X509Certificate certificate;
PrivateKeyAndCertificate(PrivateKey privateKey, X509Certificate certificate) {
this.privateKey = privateKey;
this.certificate = certificate;
}
}
}
| |
package org.jgroups.tests;
import org.jgroups.*;
import org.jgroups.protocols.*;
import org.jgroups.protocols.pbcast.GMS;
import org.jgroups.protocols.pbcast.NAKACK2;
import org.jgroups.protocols.relay.RELAY2;
import org.jgroups.protocols.relay.Route;
import org.jgroups.protocols.relay.SiteMaster;
import org.jgroups.protocols.relay.SiteMasterPicker;
import org.jgroups.protocols.relay.config.RelayConfig;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Util;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Various RELAY2-related tests
* @author Bela Ban
* @since 3.2
*/
@Test(groups=Global.FUNCTIONAL,singleThreaded=true)
public class Relay2Test {
protected JChannel a, b, c; // members in site "lon"
protected JChannel x, y, z; // members in site "sfo
protected static final String BRIDGE_CLUSTER = "global";
protected static final String LON_CLUSTER = "lon-cluster";
protected static final String SFO_CLUSTER = "sfo-cluster";
protected static final String SFO = "sfo", LON="lon";
protected static final InetAddress LOOPBACK;
static {
LOOPBACK=InetAddress.getLoopbackAddress();
}
@AfterMethod protected void destroy() {Util.close(z,y,x,c,b,a);}
/**
* Test that RELAY2 can be added to an already connected channel.
*/
public void testAddRelay2ToAnAlreadyConnectedChannel() throws Exception {
// Create and connect a channel.
a=new JChannel();
a.connect(SFO_CLUSTER);
System.out.println("Channel " + a.getName() + " is connected. View: " + a.getView());
// Add RELAY2 protocol to the already connected channel.
RELAY2 relayToInject = createRELAY2(SFO);
// Util.setField(Util.getField(relayToInject.getClass(), "local_addr"), relayToInject, a.getAddress());
a.getProtocolStack().insertProtocolAtTop(relayToInject);
relayToInject.down(new Event(Event.SET_LOCAL_ADDRESS, a.getAddress()));
relayToInject.setProtocolStack(a.getProtocolStack());
relayToInject.configure();
relayToInject.handleView(a.getView());
// Check for RELAY2 presence
RELAY2 ar=a.getProtocolStack().findProtocol(RELAY2.class);
assert ar != null;
waitUntilRoute(SFO, true, 10000, 500, a);
assert !ar.printRoutes().equals("n/a (not site master)") : "This member should be site master";
Route route=getRoute(a, SFO);
System.out.println("Route at sfo to sfo: " + route);
assert route != null;
}
/**
* Tests that routes are correctly registered after a partition and a subsequent merge
* (https://issues.jboss.org/browse/JGRP-1524)
*/
public void testMissingRouteAfterMerge() throws Exception {
a=createNode(LON, "A", LON_CLUSTER, null);
b=createNode(LON, "B", LON_CLUSTER, null);
Util.waitUntilAllChannelsHaveSameView(30000, 1000, a, b);
x=createNode(SFO, "X", SFO_CLUSTER, null);
assert x.getView().size() == 1;
RELAY2 ar=a.getProtocolStack().findProtocol(RELAY2.class),
xr=x.getProtocolStack().findProtocol(RELAY2.class);
assert ar != null && xr != null;
JChannel a_bridge=null, x_bridge=null;
for(int i=0; i < 20; i++) {
a_bridge=ar.getBridge(SFO);
x_bridge=xr.getBridge(LON);
if(a_bridge != null && x_bridge != null && a_bridge.getView().size() == 2 && x_bridge.getView().size() == 2)
break;
Util.sleep(500);
}
assert a_bridge != null && x_bridge != null;
System.out.println("A's bridge channel: " + a_bridge.getView());
System.out.println("X's bridge channel: " + x_bridge.getView());
assert a_bridge.getView().size() == 2 : "bridge view is " + a_bridge.getView();
assert x_bridge.getView().size() == 2 : "bridge view is " + x_bridge.getView();
Route route=getRoute(x, LON);
System.out.println("Route at sfo to lon: " + route);
assert route != null;
// Now inject a partition into site LON
System.out.println("Creating partition between A and B:");
createPartition(a, b);
System.out.println("A's view: " + a.getView() + "\nB's view: " + b.getView());
assert a.getView().size() == 1 && b.getView().size() == 1;
route=getRoute(x, LON);
System.out.println("Route at sfo to lon: " + route);
assert route != null;
View bridge_view=xr.getBridgeView(BRIDGE_CLUSTER);
System.out.println("bridge_view = " + bridge_view);
// Now make A and B form a cluster again:
View merge_view=new MergeView(a.getAddress(), 10, Arrays.asList(a.getAddress(), b.getAddress()),
Arrays.asList(View.create(a.getAddress(), 5, a.getAddress()),
View.create(b.getAddress(), 5, b.getAddress())));
GMS gms=a.getProtocolStack().findProtocol(GMS.class);
gms.installView(merge_view, null);
gms=b.getProtocolStack().findProtocol(GMS.class);
gms.installView(merge_view, null);
Util.waitUntilAllChannelsHaveSameView(20000, 500, a, b);
System.out.println("A's view: " + a.getView() + "\nB's view: " + b.getView());
for(int i=0; i < 20; i++) {
bridge_view=xr.getBridgeView(BRIDGE_CLUSTER);
if(bridge_view != null && bridge_view.size() == 2)
break;
Util.sleep(500);
}
route=getRoute(x, LON);
System.out.println("Route at sfo to lon: " + route);
assert route != null;
}
/**
* Tests whether the bridge channel connects and disconnects ok.
*/
public void testConnectAndReconnectOfBridgeStack() throws Exception {
a=new JChannel(createBridgeStack());
a.setName("A");
b=new JChannel(createBridgeStack());
b.setName("B");
a.connect(BRIDGE_CLUSTER);
b.connect(BRIDGE_CLUSTER);
Util.waitUntilAllChannelsHaveSameView(10000, 500, a, b);
b.disconnect();
Util.waitUntilAllChannelsHaveSameView(10000, 500, a);
b.connect(BRIDGE_CLUSTER);
Util.waitUntilAllChannelsHaveSameView(10000, 500, a, b);
}
/**
* Tests sites LON and SFO, with SFO disconnecting (bridge view on LON should be 1) and reconnecting (bridge view on
* LON and SFO should be 2)
*/
public void testDisconnectAndReconnect() throws Exception {
a=createNode(LON, "A", LON_CLUSTER, null);
x=createNode(SFO, "X", SFO_CLUSTER, null);
System.out.println("Started A and X; waiting for bridge view of 2 on A and X");
waitForBridgeView(2, 20000, 500, a, x);
System.out.println("Disconnecting X; waiting for a bridge view on 1 on A");
x.disconnect();
waitForBridgeView(1, 20000, 500, a);
System.out.println("Reconnecting X again; waiting for a bridge view of 2 on A and X");
x.connect(SFO_CLUSTER);
waitForBridgeView(2, 20000, 500, a, x);
}
public void testCoordinatorShutdown() throws Exception {
a=createNode(LON, "A", LON_CLUSTER, null);
b=createNode(LON, "B", LON_CLUSTER, null);
x=createNode(SFO, "X", SFO_CLUSTER, null);
y=createNode(SFO, "Y", SFO_CLUSTER, null);
Util.waitUntilAllChannelsHaveSameView(10000, 100, a, b);
Util.waitUntilAllChannelsHaveSameView(10000, 100, x, y);
waitForBridgeView(2, 20000, 100, a, x); // A and X are site masters
long start=System.currentTimeMillis();
a.close();
long time=System.currentTimeMillis()-start;
System.out.println("A took " + time + " ms");
Util.waitUntilAllChannelsHaveSameView(10000, 100, b);
waitForBridgeView(2, 20000, 100, b, x); // B and X are now site masters
long start2=System.currentTimeMillis();
b.close();
long time2=System.currentTimeMillis() - start2;
System.out.println("B took " + time2 + " ms");
waitForBridgeView(1, 40000, 500, x);
Util.close(x,y);
}
/**
* Tests the following scenario:
* <ul>
* <li>Nodes A in LON and B in SFO, both are up</li>
* <li>B goes down</li>
* <li>The status of site SFO in LON is set to UNKNOWN and a task T is started which will set SFO's status
* to DOWN in site_down_timeout ms</li>
* <li>Before T kicks in, B in SFO is started again</li>
* <li>The status of site SFO in LON is now UP</li>
* <li>Make sure T is cancelled when transitioning from UNKNOWN to UP, or else it'll set the status
* of SFO to DOWN when it triggers</li>
* </ul>
*/
public void testUnknownAndUpStateTransitions() throws Exception {
a=createNode(LON, "A", LON_CLUSTER, null);
x=createNode(SFO, "X", SFO_CLUSTER, null);
waitForBridgeView(2, 20000, 500, a, x);
System.out.println("Disconnecting X");
x.disconnect();
System.out.println("A: waiting for site SFO to be UNKNOWN");
waitUntilRoute(SFO, false, 20000, 500, a);
System.out.println("Reconnecting X, waiting for 5 seconds to see if the route is marked as DOWN");
x.connect(SFO_CLUSTER);
Util.sleep(5000);
Route route=getRoute(a, SFO);
assert route != null : "route is " + route + " (expected to be UP)";
route=getRoute(x, LON);
assert route != null : "route is " + route + " (expected to be UP)";
}
/**
* Cluster A,B,C in LON and X,Y,Z in SFO. A, B, X and Y are site masters (max_site_masters: 2).
* Verifies that messages sent by C in the LON site are received in the correct order by all members of the SFO site
* despite using multiple site masters. JIRA: https://issues.jboss.org/browse/JGRP-2112
*/
public void testSenderOrderWithMultipleSiteMasters() throws Exception {
MyReceiver rx=new MyReceiver(), ry=new MyReceiver(), rz=new MyReceiver();
final int NUM=512;
final String sm_picker_impl=SiteMasterPickerImpl.class.getName();
a=createNode(LON, "A", LON_CLUSTER, 2, sm_picker_impl, null);
b=createNode(LON, "B", LON_CLUSTER, 2, sm_picker_impl, null);
c=createNode(LON, "C", LON_CLUSTER, 2, sm_picker_impl, null);
Util.waitUntilAllChannelsHaveSameView(10000, 1000, a,b,c);
x=createNode(SFO, "X", SFO_CLUSTER, 2, sm_picker_impl, rx);
y=createNode(SFO, "Y", SFO_CLUSTER, 2, sm_picker_impl, ry);
z=createNode(SFO, "Z", SFO_CLUSTER, 2, sm_picker_impl, rz);
Util.waitUntilAllChannelsHaveSameView(10000, 1000, x,y,z);
waitForBridgeView(4, 10000, 1000, a,b,x,y);
// C in LON sends messages to the site master of SFO (via either SM A or B); everyone in SFO (x,y,z)
// must receive them in correct order
SiteMaster target_sm=new SiteMaster(SFO);
System.out.printf("%s: sending %d messages to %s:\n", c.getAddress(), NUM, target_sm);
for(int i=1; i <= NUM; i++) {
Message msg=new Message(target_sm, i); // the seqno is in the payload of the message
c.send(msg);
}
boolean running=true;
for(int i=0; running && i < 10; i++) {
for(MyReceiver r: Arrays.asList(rx,ry,rz)) {
if(r.getList().size() >= NUM) {
running=false;
break;
}
}
Util.sleep(1000);
}
System.out.printf("X: size=%d\nY: size=%d\nZ: size=%d\n", rx.getList().size(), ry.getList().size(), rz.getList().size());
assert rx.getList().size() == NUM || ry.getList().size() == NUM;
assert rz.getList().isEmpty();
}
protected static class SiteMasterPickerImpl implements SiteMasterPicker {
public SiteMasterPickerImpl() {
}
public Address pickSiteMaster(List<Address> site_masters, Address original_sender) {
return site_masters.get(0);
}
public Route pickRoute(String site, List<Route> routes, Address original_sender) {
return routes.get(0);
}
}
protected JChannel createNode(String site_name, String node_name, String cluster_name,
Receiver receiver) throws Exception {
return createNode(site_name, node_name, cluster_name, 1, null, receiver);
}
protected JChannel createNode(String site_name, String node_name, String cluster_name, int num_site_masters,
String sm_picker, Receiver receiver) throws Exception {
JChannel ch=new JChannel(new SHARED_LOOPBACK(),
new SHARED_LOOPBACK_PING(),
new MERGE3().setValue("max_interval", 3000).setValue("min_interval", 1000),
new NAKACK2(),
new UNICAST3(),
new GMS().setValue("print_local_addr", false),
new FORWARD_TO_COORD(),
createRELAY2(site_name)
.setValue("max_site_masters", num_site_masters)
.setValue("site_master_picker_impl", sm_picker)).name(node_name);
if(receiver != null)
ch.setReceiver(receiver);
if(cluster_name != null)
ch.connect(cluster_name);
return ch;
}
protected RELAY2 createRELAY2(String site_name) {
RELAY2 relay=new RELAY2().site(site_name).enableAddressTagging(false).asyncRelayCreation(false);
RelayConfig.SiteConfig lon_cfg=new RelayConfig.SiteConfig(LON),
sfo_cfg=new RelayConfig.SiteConfig(SFO);
lon_cfg.addBridge(new RelayConfig.ProgrammaticBridgeConfig(BRIDGE_CLUSTER, createBridgeStack()));
sfo_cfg.addBridge(new RelayConfig.ProgrammaticBridgeConfig(BRIDGE_CLUSTER, createBridgeStack()));
relay.addSite(LON, lon_cfg).addSite(SFO, sfo_cfg);
return relay;
}
protected static Protocol[] createBridgeStack() {
return new Protocol[] {
new TCP().setBindAddress(LOOPBACK),
new MPING(),
new MERGE3().setValue("max_interval", 3000).setValue("min_interval", 1000),
new NAKACK2().setUseMcastXmit(false),
new UNICAST3(),
new GMS().setValue("print_local_addr", false)
};
}
/** Creates a singleton view for each channel listed and injects it */
protected static void createPartition(JChannel ... channels) {
for(JChannel ch: channels) {
View view=View.create(ch.getAddress(), 5, ch.getAddress());
GMS gms=ch.getProtocolStack().findProtocol(GMS.class);
gms.installView(view);
}
}
protected void waitForBridgeView(int expected_size, long timeout, long interval, JChannel ... channels) {
long deadline=System.currentTimeMillis() + timeout;
while(System.currentTimeMillis() < deadline) {
boolean views_correct=true;
for(JChannel ch: channels) {
RELAY2 relay=ch.getProtocolStack().findProtocol(RELAY2.class);
View bridge_view=relay.getBridgeView(BRIDGE_CLUSTER);
if(bridge_view == null || bridge_view.size() != expected_size) {
views_correct=false;
break;
}
}
if(views_correct)
break;
Util.sleep(interval);
}
System.out.println("Bridge views:\n");
for(JChannel ch: channels) {
RELAY2 relay=ch.getProtocolStack().findProtocol(RELAY2.class);
View bridge_view=relay.getBridgeView(BRIDGE_CLUSTER);
System.out.println(ch.getAddress() + ": " + bridge_view);
}
for(JChannel ch: channels) {
RELAY2 relay=ch.getProtocolStack().findProtocol(RELAY2.class);
View bridge_view=relay.getBridgeView(BRIDGE_CLUSTER);
assert bridge_view != null && bridge_view.size() == expected_size
: ch.getAddress() + ": bridge view=" + bridge_view + ", expected=" + expected_size;
}
}
protected void waitUntilRoute(String site_name, boolean present,
long timeout, long interval, JChannel ch) throws Exception {
RELAY2 relay=ch.getProtocolStack().findProtocol(RELAY2.class);
if(relay == null)
throw new IllegalArgumentException("Protocol RELAY2 not found");
Route route=null;
long deadline=System.currentTimeMillis() + timeout;
while(System.currentTimeMillis() < deadline) {
route=relay.getRoute(site_name);
if((route != null && present) || (route == null && !present))
break;
Util.sleep(interval);
}
assert (route != null && present) || (route == null && !present);
}
protected Route getRoute(JChannel ch, String site_name) {
RELAY2 relay=ch.getProtocolStack().findProtocol(RELAY2.class);
return relay.getRoute(site_name);
}
protected static class MyReceiver extends ReceiverAdapter {
protected final List<Integer> list=new ArrayList<>(512);
public List<Integer> getList() {return list;}
public void clear() {list.clear();}
public void receive(Message msg) {
list.add(msg.getObject());
System.out.printf("<-- %s from %s\n", msg.getObject(), msg.src());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.service;
import static org.apache.pulsar.broker.auth.MockedPulsarServiceBaseTest.retryStrategically;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.fail;
import java.util.LinkedHashSet;
import java.util.concurrent.TimeUnit;
import lombok.Cleanup;
import org.apache.pulsar.broker.BrokerTestUtil;
import org.apache.pulsar.broker.service.persistent.PersistentTopic;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.common.naming.NamespaceBundle;
import org.apache.pulsar.common.naming.NamespaceBundles;
import org.apache.pulsar.common.naming.NamespaceName;
import org.apache.pulsar.common.policies.data.TopicStats;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.testng.collections.Lists;
import com.google.common.collect.Sets;
@Test(groups = "broker")
public class PeerReplicatorTest extends ReplicatorTestBase {
@Override
@BeforeClass(timeOut = 300000)
public void setup() throws Exception {
super.setup();
}
@Override
@AfterClass(alwaysRun = true, timeOut = 300000)
public void cleanup() throws Exception {
super.cleanup();
}
@DataProvider(name = "lookupType")
public Object[][] codecProvider() {
return new Object[][] { { "http" }, { "binary" } };
}
/**
* It verifies that lookup/admin requests for global-namespace would be redirected to peer-cluster if local cluster
* doesn't own it and peer-cluster owns it, else request will be failed.
* <pre>
* 1. Create global-namespace ns1 for replication cluster-r1
* 2. Try to create producer using broker in cluster r3
* 3. Reject lookup: "r3" receives request and doesn't find namespace in local/peer cluster
* 4. Add "r1" as a peer-cluster into "r3"
* 5. Try to create producer using broker in cluster r3
* 6. Success : "r3" finds "r1" in peer cluster which owns n1 and redirects to "r1"
* 7. call admin-api to "r3" which redirects request to "r1"
*
* </pre>
*
* @param protocol
* @throws Exception
*/
@Test(dataProvider = "lookupType", timeOut = 10000)
public void testPeerClusterTopicLookup(String protocol) throws Exception {
// clean up peer-clusters
admin1.clusters().updatePeerClusterNames("r1", null);
admin1.clusters().updatePeerClusterNames("r2", null);
admin1.clusters().updatePeerClusterNames("r3", null);
final String serviceUrl = protocol.equalsIgnoreCase("http") ? pulsar3.getWebServiceAddress()
: pulsar3.getBrokerServiceUrl();
final String namespace1 = "pulsar/global/peer1-" + protocol;
final String namespace2 = "pulsar/global/peer2-" + protocol;
admin1.namespaces().createNamespace(namespace1);
admin1.namespaces().createNamespace(namespace2);
// add replication cluster
admin1.namespaces().setNamespaceReplicationClusters(namespace1, Sets.newHashSet("r1"));
admin1.namespaces().setNamespaceReplicationClusters(namespace2, Sets.newHashSet("r2"));
admin1.clusters().updatePeerClusterNames("r3", null);
// disable tls as redirection url is prepared according tls configuration
pulsar1.getConfiguration().setTlsEnabled(false);
pulsar2.getConfiguration().setTlsEnabled(false);
pulsar3.getConfiguration().setTlsEnabled(false);
final String topic1 = "persistent://" + namespace1 + "/topic1";
final String topic2 = "persistent://" + namespace2 + "/topic2";
@Cleanup
PulsarClient client3 = PulsarClient.builder().serviceUrl(serviceUrl).statsInterval(0, TimeUnit.SECONDS)
.operationTimeout(1000, TimeUnit.MILLISECONDS).build();
try {
// try to create producer for topic1 (part of cluster: r1) by calling cluster: r3
client3.newProducer().topic(topic1).create();
fail("should have failed as cluster:r3 doesn't own namespace");
} catch (PulsarClientException e) {
// Ok
}
try {
// try to create producer for topic2 (part of cluster: r2) by calling cluster: r3
client3.newProducer().topic(topic2).create();
fail("should have failed as cluster:r3 doesn't own namespace");
} catch (PulsarClientException e) {
// Ok
}
// set peer-clusters : r3->r1
admin1.clusters().updatePeerClusterNames("r3", Sets.newLinkedHashSet(Lists.newArrayList("r1")));
Producer<byte[]> producer = client3.newProducer().topic(topic1).create();
PersistentTopic topic = (PersistentTopic) pulsar1.getBrokerService().getOrCreateTopic(topic1).get();
assertNotNull(topic);
pulsar1.getBrokerService().updateRates();
// get stats for topic1 using cluster-r3's admin3
TopicStats stats = admin1.topics().getStats(topic1);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
stats = admin3.topics().getStats(topic1);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
producer.close();
// set peer-clusters : r3->r2
admin2.clusters().updatePeerClusterNames("r3", Sets.newLinkedHashSet(Lists.newArrayList("r2")));
producer = client3.newProducer().topic(topic2).create();
topic = (PersistentTopic) pulsar2.getBrokerService().getOrCreateTopic(topic2).get();
assertNotNull(topic);
pulsar2.getBrokerService().updateRates();
// get stats for topic1 using cluster-r3's admin3
stats = admin3.topics().getStats(topic2);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
stats = admin3.topics().getStats(topic2);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
producer.close();
}
@Test(timeOut = 10000)
public void testGetPeerClusters() throws Exception {
// clean up peer-clusters
admin1.clusters().updatePeerClusterNames("r1", null);
admin1.clusters().updatePeerClusterNames("r2", null);
admin1.clusters().updatePeerClusterNames("r3", null);
final String mainClusterName = "r1";
assertNull(admin1.clusters().getPeerClusterNames(mainClusterName));
LinkedHashSet<String> peerClusters = Sets.newLinkedHashSet(Lists.newArrayList("r2", "r3"));
admin1.clusters().updatePeerClusterNames(mainClusterName, peerClusters);
retryStrategically((test) -> {
try {
return admin1.clusters().getPeerClusterNames(mainClusterName).size() == 1;
} catch (PulsarAdminException e) {
return false;
}
}, 5, 100);
assertEquals(admin1.clusters().getPeerClusterNames(mainClusterName), peerClusters);
}
/**
* Removing local cluster from the replication-cluster should make sure that bundle should not be loaded by the
* cluster even if owner broker doesn't receive the watch to avoid lookup-conflict between peer-cluster.
*
* @throws Exception
*/
@Test(groups = "broker")
public void testPeerClusterInReplicationClusterListChange() throws Exception {
// clean up peer-clusters
admin1.clusters().updatePeerClusterNames("r1", null);
admin1.clusters().updatePeerClusterNames("r2", null);
admin1.clusters().updatePeerClusterNames("r3", null);
final String serviceUrl = pulsar3.getBrokerServiceUrl();
final String namespace1 = BrokerTestUtil.newUniqueName("pulsar/global/peer-change-repl-ns");
admin1.namespaces().createNamespace(namespace1);
// add replication cluster
admin1.namespaces().setNamespaceReplicationClusters(namespace1, Sets.newHashSet("r1"));
admin1.clusters().updatePeerClusterNames("r3", null);
// disable tls as redirection url is prepared according tls configuration
pulsar1.getConfiguration().setTlsEnabled(false);
pulsar2.getConfiguration().setTlsEnabled(false);
pulsar3.getConfiguration().setTlsEnabled(false);
final String topic1 = "persistent://" + namespace1 + "/topic1";
@Cleanup
PulsarClient client3 = PulsarClient.builder().serviceUrl(serviceUrl).statsInterval(0, TimeUnit.SECONDS).build();
// set peer-clusters : r3->r1
admin1.clusters().updatePeerClusterNames("r3", Sets.newLinkedHashSet(Lists.newArrayList("r1")));
admin1.clusters().updatePeerClusterNames("r1", Sets.newLinkedHashSet(Lists.newArrayList("r3")));
Producer<byte[]> producer = client3.newProducer().topic(topic1).create();
PersistentTopic topic = (PersistentTopic) pulsar1.getBrokerService().getOrCreateTopic(topic1).get();
assertNotNull(topic);
pulsar1.getBrokerService().updateRates();
// get stats for topic1 using cluster-r3's admin3
TopicStats stats = admin1.topics().getStats(topic1);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
stats = admin3.topics().getStats(topic1);
assertNotNull(stats);
assertEquals(stats.publishers.size(), 1);
producer.close();
// change the repl cluster to peer-cluster r3 from r1
admin1.namespaces().setNamespaceReplicationClusters(namespace1, Sets.newHashSet("r3"));
NamespaceBundles bundles = pulsar1.getNamespaceService().getNamespaceBundleFactory()
.getBundles(NamespaceName.get(namespace1));
NamespaceBundle bundle = bundles.getBundles().get(0);
retryStrategically((test) -> {
try {
return !pulsar1.getNamespaceService().isNamespaceBundleOwned(bundle).get();
} catch (Exception e) {
return false;
}
}, 5, 200);
assertFalse(pulsar1.getNamespaceService().isNamespaceBundleOwned(bundle).get());
// topic should be unloaded from broker1
assertFalse(pulsar1.getBrokerService().getTopics().containsKey(topic1));
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.transaction.state;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.neo4j.collection.primitive.PrimitiveLongIterator;
import org.neo4j.helpers.collection.Iterables;
import org.neo4j.helpers.collection.IteratorUtil;
import org.neo4j.helpers.collection.PrefetchingIterator;
import org.neo4j.helpers.collection.Visitor;
import org.neo4j.kernel.api.EntityType;
import org.neo4j.kernel.api.exceptions.EntityNotFoundException;
import org.neo4j.kernel.api.exceptions.PropertyNotFoundException;
import org.neo4j.kernel.api.index.IndexDescriptor;
import org.neo4j.kernel.api.index.NodePropertyUpdate;
import org.neo4j.kernel.api.labelscan.NodeLabelUpdate;
import org.neo4j.kernel.api.properties.Property;
import org.neo4j.kernel.impl.api.CountsAccessor;
import org.neo4j.kernel.impl.api.index.IndexStoreView;
import org.neo4j.kernel.impl.api.index.StoreScan;
import org.neo4j.kernel.impl.locking.Lock;
import org.neo4j.kernel.impl.locking.LockService;
import org.neo4j.kernel.impl.store.NeoStores;
import org.neo4j.kernel.impl.store.NodeStore;
import org.neo4j.kernel.impl.store.PropertyStore;
import org.neo4j.kernel.impl.store.StoreIdIterator;
import org.neo4j.kernel.impl.store.counts.CountsTracker;
import org.neo4j.kernel.impl.store.record.NodeRecord;
import org.neo4j.kernel.impl.store.record.PropertyBlock;
import org.neo4j.kernel.impl.store.record.PropertyRecord;
import org.neo4j.kernel.impl.store.record.Record;
import org.neo4j.register.Register.DoubleLongRegister;
import static org.neo4j.collection.primitive.PrimitiveLongCollections.EMPTY_LONG_ARRAY;
import static org.neo4j.kernel.api.labelscan.NodeLabelUpdate.labelChanges;
import static org.neo4j.kernel.impl.store.NodeLabelsField.parseLabelsField;
public class NeoStoreIndexStoreView implements IndexStoreView
{
private final PropertyStore propertyStore;
private final NodeStore nodeStore;
private final LockService locks;
private final CountsTracker counts;
public NeoStoreIndexStoreView( LockService locks, NeoStores neoStores )
{
this.locks = locks;
this.propertyStore = neoStores.getPropertyStore();
this.nodeStore = neoStores.getNodeStore();
this.counts = neoStores.getCounts();
}
@Override
public DoubleLongRegister indexUpdatesAndSize( IndexDescriptor descriptor, DoubleLongRegister output )
{
return counts.indexUpdatesAndSize( descriptor.getLabelId(), descriptor.getPropertyKeyId(), output );
}
@Override
public void replaceIndexCounts( IndexDescriptor descriptor,
long uniqueElements, long maxUniqueElements, long indexSize )
{
int labelId = descriptor.getLabelId();
int propertyKeyId = descriptor.getPropertyKeyId();
try ( CountsAccessor.IndexStatsUpdater updater = counts.updateIndexCounts() )
{
updater.replaceIndexSample( labelId, propertyKeyId, uniqueElements, maxUniqueElements );
updater.replaceIndexUpdateAndSize( labelId, propertyKeyId, 0l, indexSize );
}
}
@Override
public void incrementIndexUpdates( IndexDescriptor descriptor, long updatesDelta )
{
try ( CountsAccessor.IndexStatsUpdater updater = counts.updateIndexCounts() )
{
updater.incrementIndexUpdates( descriptor.getLabelId(), descriptor.getPropertyKeyId(), updatesDelta );
}
}
@Override
public DoubleLongRegister indexSample( IndexDescriptor descriptor, DoubleLongRegister output )
{
return counts.indexSample( descriptor.getLabelId(), descriptor.getPropertyKeyId(), output );
}
@Override
public <FAILURE extends Exception> StoreScan<FAILURE> visitNodesWithPropertyAndLabel(
IndexDescriptor descriptor, final Visitor<NodePropertyUpdate, FAILURE> visitor )
{
final int soughtLabelId = descriptor.getLabelId();
final int soughtPropertyKeyId = descriptor.getPropertyKeyId();
return new NodeStoreScan<NodePropertyUpdate, FAILURE>()
{
@Override
protected NodePropertyUpdate read( NodeRecord node )
{
long[] labels = parseLabelsField( node ).get( nodeStore );
if ( !containsLabel( soughtLabelId, labels ) )
{
return null;
}
for ( PropertyBlock property : properties( node ) )
{
int propertyKeyId = property.getKeyIndexId();
if ( soughtPropertyKeyId == propertyKeyId )
{
return NodePropertyUpdate.add( node.getId(), propertyKeyId, valueOf( property ), labels );
}
}
return null;
}
@Override
protected void process( NodePropertyUpdate update ) throws FAILURE
{
visitor.visit( update );
}
};
}
@Override
public <FAILURE extends Exception> StoreScan<FAILURE> visitNodes(
final int[] labelIds, final int[] propertyKeyIds,
final Visitor<NodePropertyUpdate, FAILURE> propertyUpdateVisitor,
final Visitor<NodeLabelUpdate, FAILURE> labelUpdateVisitor )
{
return new NodeStoreScan<Update, FAILURE>()
{
@Override
protected Update read( NodeRecord node )
{
long[] labels = parseLabelsField( node ).get( nodeStore );
Update update = new Update( node.getId(), labels );
if ( !containsAnyLabel( labelIds, labels ) )
{
return update;
}
properties: for ( PropertyBlock property : properties( node ) )
{
int propertyKeyId = property.getKeyIndexId();
for ( int sought : propertyKeyIds )
{
if ( propertyKeyId == sought )
{
update.add( NodePropertyUpdate
.add( node.getId(), propertyKeyId, valueOf( property ), labels ) );
continue properties;
}
}
}
return update;
}
@Override
protected void process( Update update ) throws FAILURE
{
labelUpdateVisitor.visit( update.labels );
for ( NodePropertyUpdate propertyUpdate : update )
{
propertyUpdateVisitor.visit( propertyUpdate );
}
}
};
}
@Override
public Iterable<NodePropertyUpdate> nodeAsUpdates( long nodeId )
{
NodeRecord node = nodeStore.forceGetRecord( nodeId );
if ( !node.inUse() )
{
return Iterables.empty(); // node not in use => no updates
}
long firstPropertyId = node.getNextProp();
if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() )
{
return Iterables.empty(); // no properties => no updates (it's not going to be in any index)
}
long[] labels = parseLabelsField( node ).get( nodeStore );
if ( labels.length == 0 )
{
return Iterables.empty(); // no labels => no updates (it's not going to be in any index)
}
ArrayList<NodePropertyUpdate> updates = new ArrayList<>();
for ( PropertyRecord propertyRecord : propertyStore.getPropertyRecordChain( firstPropertyId ) )
{
for ( PropertyBlock property : propertyRecord )
{
Object value = property.getType().getValue( property, propertyStore );
updates.add( NodePropertyUpdate.add( node.getId(), property.getKeyIndexId(), value, labels ) );
}
}
return updates;
}
@Override
public Property getProperty( long nodeId, int propertyKeyId ) throws EntityNotFoundException, PropertyNotFoundException
{
NodeRecord node = nodeStore.forceGetRecord( nodeId );
if ( !node.inUse() )
{
throw new EntityNotFoundException( EntityType.NODE, nodeId );
}
long firstPropertyId = node.getNextProp();
if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() )
{
throw new PropertyNotFoundException( propertyKeyId, EntityType.NODE, nodeId );
}
for ( PropertyRecord propertyRecord : propertyStore.getPropertyRecordChain( firstPropertyId ) )
{
PropertyBlock propertyBlock = propertyRecord.getPropertyBlock( propertyKeyId );
if ( propertyBlock != null )
{
return propertyBlock.newPropertyData( propertyStore );
}
}
throw new PropertyNotFoundException( propertyKeyId, EntityType.NODE, nodeId );
}
private Object valueOf( PropertyBlock property )
{
// Make sure the value is loaded, even if it's of a "heavy" kind.
propertyStore.ensureHeavy( property );
return property.getType().getValue( property, propertyStore );
}
private Iterable<PropertyBlock> properties( final NodeRecord node )
{
return new Iterable<PropertyBlock>()
{
@Override
public Iterator<PropertyBlock> iterator()
{
return new PropertyBlockIterator( node );
}
};
}
private static boolean containsLabel( int sought, long[] labels )
{
for ( long label : labels )
{
if ( label == sought )
{
return true;
}
}
return false;
}
private static boolean containsAnyLabel( int[] soughtIds, long[] labels )
{
for ( int soughtId : soughtIds )
{
if ( containsLabel( soughtId, labels ) )
{
return true;
}
}
return false;
}
private static class Update implements Iterable<NodePropertyUpdate>
{
private final NodeLabelUpdate labels;
private final List<NodePropertyUpdate> propertyUpdates = new ArrayList<>();
Update( long nodeId, long[] labels )
{
this.labels = labelChanges( nodeId, EMPTY_LONG_ARRAY, labels );
}
void add( NodePropertyUpdate update )
{
propertyUpdates.add( update );
}
@Override
public Iterator<NodePropertyUpdate> iterator()
{
return propertyUpdates.iterator();
}
}
private class PropertyBlockIterator extends PrefetchingIterator<PropertyBlock>
{
private final Iterator<PropertyRecord> records;
private Iterator<PropertyBlock> blocks = IteratorUtil.emptyIterator();
PropertyBlockIterator( NodeRecord node )
{
long firstPropertyId = node.getNextProp();
if ( firstPropertyId == Record.NO_NEXT_PROPERTY.intValue() )
{
records = IteratorUtil.emptyIterator();
}
else
{
records = propertyStore.getPropertyRecordChain( firstPropertyId ).iterator();
}
}
@Override
protected PropertyBlock fetchNextOrNull()
{
for (; ; )
{
if ( blocks.hasNext() )
{
return blocks.next();
}
if ( !records.hasNext() )
{
return null;
}
blocks = records.next().iterator();
}
}
}
private abstract class NodeStoreScan<RESULT, FAILURE extends Exception> implements StoreScan<FAILURE>
{
private volatile boolean continueScanning;
protected abstract RESULT read( NodeRecord node );
protected abstract void process( RESULT result ) throws FAILURE;
@Override
public void run() throws FAILURE
{
PrimitiveLongIterator nodeIds = new StoreIdIterator( nodeStore );
continueScanning = true;
while ( continueScanning && nodeIds.hasNext() )
{
long id = nodeIds.next();
RESULT result = null;
try ( Lock ignored = locks.acquireNodeLock( id, LockService.LockType.READ_LOCK ) )
{
NodeRecord record = nodeStore.forceGetRecord( id );
if ( record.inUse() )
{
result = read( record );
}
}
if ( result != null )
{
process( result );
}
}
}
@Override
public void stop()
{
continueScanning = false;
}
}
}
| |
/*
* Copyright 2019 University of Hildesheim, Software Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ssehub.kernel_haven.util.io.json;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.util.Iterator;
import java.util.Map.Entry;
import org.junit.Test;
import net.ssehub.kernel_haven.util.FormatException;
/**
* Tests the structure classes inheriting from {@link JsonElement}.
*
* @author Adam
*/
public class JsonElementTest {
/**
* Tests the getString() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetString() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonString("b"));
assertThat(obj.getString("a"), is("b"));
}
/**
* Tests the getString() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetStringNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getString("a");
}
/**
* Tests the getString() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetStringWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2));
obj.getString("a");
}
/**
* Tests the getBoolean() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetBoolean() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", JsonBoolean.TRUE);
assertThat(obj.getBoolean("a"), is(true));
}
/**
* Tests the getBoolean() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetBooleanNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getBoolean("a");
}
/**
* Tests the getBoolean() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetBooleanWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2));
obj.getBoolean("a");
}
/**
* Tests the getInt() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetInt() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(4));
assertThat(obj.getInt("a"), is(4));
}
/**
* Tests the getInt() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetIntNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getInt("a");
}
/**
* Tests the getInt() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetIntWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonString("abc"));
obj.getInt("a");
}
/**
* Tests the getInt() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetIntWrongNumberType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2.5));
obj.getInt("a");
}
/**
* Tests the getLong() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetLong() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(4));
assertThat(obj.getLong("a"), is(4L));
obj.putElement("b", new JsonNumber(12321321L));
assertThat(obj.getLong("b"), is(12321321L));
}
/**
* Tests the getLong() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetLongNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getLong("a");
}
/**
* Tests the getLong() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetLongWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonString("abc"));
obj.getLong("a");
}
/**
* Tests the getLong() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetLongWrongNumberType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2.5));
obj.getLong("a");
}
/**
* Tests the getDouble() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetDouble() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(4.2));
assertThat(obj.getDouble("a"), is(4.2));
}
/**
* Tests the getDouble() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetDoubleNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getDouble("a");
}
/**
* Tests the getDouble() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetDoubleWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonString("abc"));
obj.getDouble("a");
}
/**
* Tests the getDouble() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetDoubleWrongNumberType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2));
obj.getDouble("a");
}
/**
* Tests the getList() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetList() throws FormatException {
JsonObject obj = new JsonObject();
JsonList l = new JsonList();
l.addElement(new JsonNumber(1));
l.addElement(new JsonNumber(2));
obj.putElement("a", l);
assertThat(obj.getList("a"), is(l));
}
/**
* Tests the getList() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetListNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getList("a");
}
/**
* Tests the getList() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetListWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2));
obj.getList("a");
}
/**
* Tests the getObject() method {@link JsonObject}.
*
* @throws FormatException unwanted.
*/
@Test
public void testObjectGetObject() throws FormatException {
JsonObject obj = new JsonObject();
JsonObject o1 = new JsonObject();
o1.putElement("abc", new JsonNumber(1));
o1.putElement("def", new JsonNumber(2));
obj.putElement("a", o1);
assertThat(obj.getObject("a"), is(o1));
}
/**
* Tests the getObject() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetObjectNoKey() throws FormatException {
JsonObject obj = new JsonObject();
obj.getObject("a");
}
/**
* Tests the getObject() method {@link JsonObject}.
*
* @throws FormatException wanted.
*/
@Test(expected = FormatException.class)
public void testObjectGetObjectWrongType() throws FormatException {
JsonObject obj = new JsonObject();
obj.putElement("a", new JsonNumber(2));
obj.getObject("a");
}
/**
* Tests the basic set operations of {@link JsonObject}.
*/
@Test
public void testObjectSetOpreations() {
JsonObject obj = new JsonObject();
assertThat(obj.getSize(), is(0));
Iterator<Entry<String, JsonElement>> it = obj.iterator();
assertThat(it.hasNext(), is(false));
obj.putElement("a", new JsonNumber(2));
assertThat(obj.getSize(), is(1));
assertThat(obj.getElement("a"), is(new JsonNumber(2)));
it = obj.iterator();
assertThat(it.next().getKey(), is("a"));
assertThat(it.hasNext(), is(false));
obj.removeElement("a");
assertThat(obj.getSize(), is(0));
obj.putElement("a", new JsonNumber(2));
obj.putElement("a", new JsonNumber(3));
obj.putElement("b", new JsonNumber(2));
assertThat(obj.getSize(), is(2));
assertThat(obj.getElement("a"), is(new JsonNumber(3)));
assertThat(obj.getElement("b"), is(new JsonNumber(2)));
it = obj.iterator();
assertThat(it.next().getKey(), is("a"));
assertThat(it.next().getKey(), is("b"));
assertThat(it.hasNext(), is(false));
}
/**
* Tests the basic list operations of {@link JsonList}.
*/
// CHECKSTYLE:OFF // method length
@Test
// CHECKSTYLE:ON
public void testListListOperations() {
JsonList l = new JsonList();
assertThat(l.getSize(), is(0));
Iterator<JsonElement> it = l.iterator();
assertThat(it.hasNext(), is(false));
l.addElement(new JsonNumber(2));
assertThat(l.getSize(), is(1));
assertThat(l.getElement(0), is(new JsonNumber(2)));
it = l.iterator();
assertThat(it.next(), is(new JsonNumber(2)));
assertThat(it.hasNext(), is(false));
l.addElement(new JsonString("abc"));
l.addElement(new JsonObject());
assertThat(l.getSize(), is(3));
assertThat(l.getElement(0), is(new JsonNumber(2)));
assertThat(l.getElement(1), is(new JsonString("abc")));
assertThat(l.getElement(2), is(new JsonObject()));
it = l.iterator();
assertThat(it.next(), is(new JsonNumber(2)));
assertThat(it.next(), is(new JsonString("abc")));
assertThat(it.next(), is(new JsonObject()));
assertThat(it.hasNext(), is(false));
l.setElement(1, new JsonNumber(0));
assertThat(l.getSize(), is(3));
assertThat(l.getElement(0), is(new JsonNumber(2)));
assertThat(l.getElement(1), is(new JsonNumber(0)));
assertThat(l.getElement(2), is(new JsonObject()));
it = l.iterator();
assertThat(it.next(), is(new JsonNumber(2)));
assertThat(it.next(), is(new JsonNumber(0)));
assertThat(it.next(), is(new JsonObject()));
assertThat(it.hasNext(), is(false));
l.removeElement(0);
assertThat(l.getSize(), is(2));
assertThat(l.getElement(0), is(new JsonNumber(0)));
assertThat(l.getElement(1), is(new JsonObject()));
it = l.iterator();
assertThat(it.next(), is(new JsonNumber(0)));
assertThat(it.next(), is(new JsonObject()));
assertThat(it.hasNext(), is(false));
try {
l.getElement(2);
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
try {
l.getElement(-1);
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
try {
l.removeElement(2);
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
try {
l.removeElement(-1);
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
try {
l.setElement(2, new JsonObject());
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
try {
l.setElement(-1, new JsonObject());
fail("Expected IndexOutOfBoundsException");
} catch (IndexOutOfBoundsException e) {
}
}
/**
* Tests the trivial methods of {@link JsonNull}.
*/
@Test
public void testJsonNull() {
JsonNull nul = JsonNull.INSTANCE;
assertThat(nul.getValue(), sameInstance(JsonNull.INSTANCE));
}
/**
* Tests the trivial methods of {@link JsonBoolean}.
*/
@Test
public void testJsonBoolean() {
JsonBoolean bool = JsonBoolean.get(true);
assertThat(bool.getValue(), is(true));
bool = JsonBoolean.get(false);
assertThat(bool.getValue(), is(false));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonBoolean}.
*/
@Test
@SuppressWarnings("null")
public void testEqualsBoolean() {
assertThat(JsonBoolean.TRUE, is(JsonBoolean.TRUE));
assertThat(JsonBoolean.FALSE, is(JsonBoolean.FALSE));
assertThat(JsonBoolean.FALSE, not(is(JsonBoolean.TRUE)));
assertThat(JsonBoolean.TRUE, not(is(JsonBoolean.FALSE)));
assertThat(JsonBoolean.TRUE, not(is(new JsonString("true"))));
assertThat(JsonBoolean.TRUE, not(is(new JsonNumber(1))));
assertThat(JsonBoolean.FALSE, not(is(new JsonString("false"))));
assertThat(JsonBoolean.FALSE, not(is(new JsonNumber(0))));
assertThat(JsonBoolean.TRUE.hashCode(), is(JsonBoolean.TRUE.hashCode()));
assertThat(JsonBoolean.FALSE.hashCode(), is(JsonBoolean.FALSE.hashCode()));
assertThat(JsonBoolean.FALSE.hashCode(), not(is(JsonBoolean.TRUE.hashCode())));
assertThat(JsonBoolean.TRUE.hashCode(), not(is(JsonBoolean.FALSE.hashCode())));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonNull}.
*/
@Test
@SuppressWarnings("null")
public void testEqualsNull() {
assertThat(JsonNull.INSTANCE, is(JsonNull.INSTANCE));
assertThat(JsonNull.INSTANCE, not(is(JsonBoolean.TRUE)));
assertThat(JsonNull.INSTANCE, not(is(JsonBoolean.FALSE)));
assertThat(JsonNull.INSTANCE, not(is(new JsonString("null"))));
assertThat(JsonNull.INSTANCE, not(is(new JsonNumber(0))));
assertThat(JsonNull.INSTANCE.hashCode(), is(JsonNull.INSTANCE.hashCode()));
assertThat(JsonNull.INSTANCE.hashCode(), not(is(JsonBoolean.TRUE.hashCode())));
assertThat(JsonNull.INSTANCE.hashCode(), not(is(JsonBoolean.FALSE.hashCode())));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonNumber}.
*/
@Test
@SuppressWarnings("null")
public void testEqualsNumber() {
assertThat(new JsonNumber(1), is(new JsonNumber(1)));
assertThat(new JsonNumber(1L), is(new JsonNumber(1L)));
assertThat(new JsonNumber(-2.5), is(new JsonNumber(-2.5)));
assertThat(new JsonNumber(1), not(is(new JsonString("1"))));
assertThat(new JsonNumber(1), not(is(JsonBoolean.TRUE)));
assertThat(new JsonNumber(1).hashCode(), is(new JsonNumber(1).hashCode()));
assertThat(new JsonNumber(25L).hashCode(), is(new JsonNumber(25L).hashCode()));
assertThat(new JsonNumber(-123.5).hashCode(), is(new JsonNumber(-123.5).hashCode()));
assertThat(new JsonNumber(1).hashCode(), not(is(new JsonNumber(2).hashCode())));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonString}.
*/
@Test
public void testEqualsString() {
assertThat(new JsonString("abc"), is(new JsonString("abc")));
assertThat(new JsonString(""), is(new JsonString("")));
assertThat(new JsonString(""), not(is(new JsonString("a"))));
assertThat(new JsonString("b"), not(is(new JsonString("a"))));
assertThat(new JsonString("5"), not(is(new JsonNumber(5))));
assertThat(new JsonString("abc").hashCode(), is(new JsonString("abc").hashCode()));
assertThat(new JsonString("").hashCode(), is(new JsonString("").hashCode()));
assertThat(new JsonString("").hashCode(), not(is(new JsonString("a").hashCode())));
assertThat(new JsonString("b").hashCode(), not(is(new JsonString("a").hashCode())));
assertThat(new JsonString("5").hashCode(), not(is(new JsonNumber(5).hashCode())));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonList}.
*/
@Test
public void testEqualsList() {
JsonList l1 = new JsonList();
JsonList l2 = new JsonList();
assertThat(l1, is(l2));
assertThat(l1.hashCode(), is(l2.hashCode()));
l1.addElement(new JsonNumber(1));
assertThat(l1, not(is(l2)));
assertThat(l1.hashCode(), not(is(l2.hashCode())));
l2.addElement(new JsonNumber(1));
assertThat(l1, is(l2));
assertThat(l1.hashCode(), is(l2.hashCode()));
l1.addElement(new JsonString("a"));
l2.addElement(new JsonString("b"));
assertThat(l1, not(is(l2)));
assertThat(l1.hashCode(), not(is(l2.hashCode())));
l1 = new JsonList();
l2 = new JsonList();
l1.addElement(new JsonNumber(1));
l1.addElement(new JsonNumber(2));
l2.addElement(new JsonNumber(2));
l2.addElement(new JsonNumber(1));
assertThat(l1, not(is(l2)));
assertThat(l1.hashCode(), not(is(l2.hashCode())));
assertThat(new JsonList(), not(is(new JsonString("[]"))));
}
/**
* Tests the equals() and hashCode() methods of {@link JsonObject}.
*/
@Test
public void testEqualsObject() {
JsonObject o1 = new JsonObject();
JsonObject o2 = new JsonObject();
assertThat(o1, is(o2));
assertThat(o1.hashCode(), is(o2.hashCode()));
o1.putElement("a", new JsonNumber(1));
assertThat(o1, not(is(o2)));
assertThat(o1.hashCode(), not(is(o2.hashCode())));
o2.putElement("a", new JsonNumber(1));
assertThat(o1, is(o2));
assertThat(o1.hashCode(), is(o2.hashCode()));
o1.putElement("b", new JsonString("b"));
o2.putElement("c", new JsonString("b"));
assertThat(o1, not(is(o2)));
assertThat(o1.hashCode(), not(is(o2.hashCode())));
o2.removeElement("c");
o2.putElement("b", new JsonString("b"));
assertThat(o1, is(o2));
assertThat(o1.hashCode(), is(o2.hashCode()));
assertThat(new JsonObject(), not(is(new JsonString("{}"))));
}
}
| |
/*
* Copyright (c) 2011-2014 Julien Nicoulaud <julien.nicoulaud@gmail.com>
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.nicoulaj.idea.markdown.editor;
import com.intellij.codeHighlighting.BackgroundEditorHighlighter;
import com.intellij.ide.structureView.StructureViewBuilder;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorLocation;
import com.intellij.openapi.fileEditor.FileEditorState;
import com.intellij.openapi.fileEditor.FileEditorStateLevel;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.ui.components.JBScrollPane;
import net.nicoulaj.idea.markdown.MarkdownBundle;
import net.nicoulaj.idea.markdown.settings.MarkdownGlobalSettings;
import net.nicoulaj.idea.markdown.settings.MarkdownGlobalSettingsListener;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.pegdown.PegDownProcessor;
import javax.swing.*;
import javax.swing.text.DefaultCaret;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.StyleSheet;
import java.awt.*;
import java.beans.PropertyChangeListener;
/**
* {@link FileEditor} implementation that provides rendering preview for Markdown documents.
* <p/>
* The preview is generated by <a href="https://github.com/sirthias/pegdown">pegdown</a>.
*
* @author Julien Nicoulaud <julien.nicoulaud@gmail.com>
* @author Roger Grantham (https://github.com/grantham)
* @see <a href="https://github.com/sirthias/pegdown">pegdown library</a>
* @see MarkdownPreviewEditorProvider
* @since 0.1
*/
public class MarkdownPreviewEditor extends UserDataHolderBase implements FileEditor {
/** Logger. */
private static final Logger LOGGER = Logger.getInstance(MarkdownPreviewEditor.class);
/** The editor name, displayed as the tab name of the editor. */
public static final String EDITOR_NAME = MarkdownBundle.message("markdown.editor.preview.tab-name");
/** The path to the stylesheet used for displaying the HTML preview of the document. */
@NonNls
public static final String PREVIEW_STYLESHEET_PATH = "/net/nicoulaj/idea/markdown/preview.css";
/** The {@link java.awt.Component} used to render the HTML preview. */
protected final JEditorPane jEditorPane = new JEditorPane();
/** The {@link JBScrollPane} allowing to browse {@link #jEditorPane}. */
protected final JBScrollPane scrollPane = new JBScrollPane(jEditorPane);
/** The {@link Document} previewed in this editor. */
protected final Document document;
/** The {@link PegDownProcessor} used for building the document AST. */
private ThreadLocal<PegDownProcessor> processor = initProcessor();
/** Init/reinit thread local {@link PegDownProcessor}. */
private static ThreadLocal<PegDownProcessor> initProcessor() {
return new ThreadLocal<PegDownProcessor>() {
@Override protected PegDownProcessor initialValue() {
return new PegDownProcessor(MarkdownGlobalSettings.getInstance().getExtensionsValue(),
MarkdownGlobalSettings.getInstance().getParsingTimeout());
}
};
}
/** Indicates whether the HTML preview is obsolete and should regenerated from the Markdown {@link #document}. */
protected boolean previewIsObsolete = true;
/**
* Build a new instance of {@link MarkdownPreviewEditor}.
*
* @param project the {@link Project} containing the document
* @param document the {@link com.intellij.openapi.editor.Document} previewed in this editor.
*/
public MarkdownPreviewEditor(@NotNull Project project, @NotNull Document document) {
this.document = document;
// Listen to the document modifications.
this.document.addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
previewIsObsolete = true;
}
});
// Listen to settings changes
MarkdownGlobalSettings.getInstance().addListener(new MarkdownGlobalSettingsListener() {
public void handleSettingsChanged(@NotNull final MarkdownGlobalSettings newSettings) {
initProcessor();
previewIsObsolete = true;
}
});
// Setup the editor pane for rendering HTML.
final HTMLEditorKit kit = new MarkdownEditorKit(document);
final StyleSheet style = new StyleSheet();
style.importStyleSheet(MarkdownPreviewEditor.class.getResource(PREVIEW_STYLESHEET_PATH));
kit.setStyleSheet(style);
jEditorPane.setEditorKit(kit);
jEditorPane.setEditable(false);
// Set the editor pane position to top left, and do not let it reset it
jEditorPane.getCaret().setMagicCaretPosition(new Point(0, 0));
((DefaultCaret) jEditorPane.getCaret()).setUpdatePolicy(DefaultCaret.NEVER_UPDATE);
// Add a custom link listener which can resolve local link references.
jEditorPane.addHyperlinkListener(new MarkdownLinkListener(jEditorPane, project, document));
}
/**
* Get the {@link java.awt.Component} to display as this editor's UI.
*
* @return a scrollable {@link JEditorPane}.
*/
@NotNull
public JComponent getComponent() {
return scrollPane;
}
/**
* Get the component to be focused when the editor is opened.
*
* @return {@link #scrollPane}
*/
@Nullable
public JComponent getPreferredFocusedComponent() {
return scrollPane;
}
/**
* Get the editor displayable name.
*
* @return {@link #EDITOR_NAME}
*/
@NotNull
@NonNls
public String getName() {
return EDITOR_NAME;
}
/**
* Get the state of the editor.
* <p/>
* Just returns {@link FileEditorState#INSTANCE} as {@link MarkdownPreviewEditor} is stateless.
*
* @param level the level.
* @return {@link FileEditorState#INSTANCE}
* @see #setState(com.intellij.openapi.fileEditor.FileEditorState)
*/
@NotNull
public FileEditorState getState(@NotNull FileEditorStateLevel level) {
return FileEditorState.INSTANCE;
}
/**
* Set the state of the editor.
* <p/>
* Does not do anything as {@link MarkdownPreviewEditor} is stateless.
*
* @param state the new state.
* @see #getState(com.intellij.openapi.fileEditor.FileEditorStateLevel)
*/
public void setState(@NotNull FileEditorState state) {
}
/**
* Indicates whether the document content is modified compared to its file.
*
* @return {@code false} as {@link MarkdownPreviewEditor} is read-only.
*/
public boolean isModified() {
return false;
}
/**
* Indicates whether the editor is valid.
*
* @return {@code true} if {@link #document} content is readable.
*/
public boolean isValid() {
return document.getText() != null;
}
/**
* Invoked when the editor is selected.
* <p/>
* Update the HTML content if obsolete.
*/
public void selectNotify() {
if (previewIsObsolete) {
try {
jEditorPane.setText("<div id=\"markdown-preview\">" +
processor.get().markdownToHtml(document.getText()) +
"</div>");
previewIsObsolete = false;
} catch (Exception e) {
LOGGER.error("Failed processing Markdown document", e);
}
}
}
/**
* Invoked when the editor is deselected.
* <p/>
* Does nothing.
*/
public void deselectNotify() {
}
/**
* Add specified listener.
* <p/>
* Does nothing.
*
* @param listener the listener.
*/
public void addPropertyChangeListener(@NotNull PropertyChangeListener listener) {
}
/**
* Remove specified listener.
* <p/>
* Does nothing.
*
* @param listener the listener.
*/
public void removePropertyChangeListener(@NotNull PropertyChangeListener listener) {
}
/**
* Get the background editor highlighter.
*
* @return {@code null} as {@link MarkdownPreviewEditor} does not require highlighting.
*/
@Nullable
public BackgroundEditorHighlighter getBackgroundHighlighter() {
return null;
}
/**
* Get the current location.
*
* @return {@code null} as {@link MarkdownPreviewEditor} is not navigable.
*/
@Nullable
public FileEditorLocation getCurrentLocation() {
return null;
}
/**
* Get the structure view builder.
*
* @return TODO {@code null} as parsing/PSI is not implemented.
*/
@Nullable
public StructureViewBuilder getStructureViewBuilder() {
return null;
}
/** Dispose the editor. */
public void dispose() {
Disposer.dispose(this);
}
}
| |
package com.game.classes;
import com.badlogic.gdx.assets.AssetManager;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.game.classes.Character;
import com.game.classes.Player;
import com.game.classes.Terrain;
import com.game.classes.TerrainProperties;
import org.junit.Test;
import static org.junit.Assert.*;
public class CharacterTest {
Character c = new Character("", 10, 20, 30, 40, 1, new Sprite(), new Terrain(TerrainProperties.Normal, 1, 1), "test", new Player(""));
@Test
public void getName() throws Exception
{
assertEquals("", c.getName());
}
@Test
public void setName() throws Exception
{
c.setName("test2");
assertEquals("test2", c.getName());
}
@Test
public void getMaxHealthPoints() throws Exception
{
assertEquals(10, c.getMaxHealthPoints());
}
@Test
public void setMaxHealthPoints() throws Exception
{
c.setMaxHealthPoints(15);
assertEquals(15, c.getMaxHealthPoints());
}
@Test
public void setCurrentHealthPoints() throws Exception
{
c.takeDamage(5);
assertEquals(35, c.getCurrentHealthPoints());
}
@Test
public void getAttackPoints() throws Exception
{
assertEquals(20, c.getAttackPoints());
}
@Test
public void setAttackPoints() throws Exception
{
c.setAttackPoints(25);
assertEquals(25, c.getAttackPoints());
}
@Test
public void getDefensePoints() throws Exception
{
assertEquals(30, c.getDefensePoints());
}
@Test
public void setDefensePoints() throws Exception
{
c.setDefensePoints(35);
assertEquals(35, c.getDefensePoints());
}
@Test
public void getMovementPoints() throws Exception
{
assertEquals(40, c.getMovementPoints());
}
@Test
public void setMovementPoints() throws Exception
{
c.setMovementPoints(45);
assertEquals(45, c.getMovementPoints());
}
@Test
public void setDead() throws Exception
{
c.setDead(true);
assertEquals(true, c.isDead());
}
@Test
public void getCurrentTerrain() throws Exception
{
}
@Test
public void setCurrentTerrain() throws Exception
{
Terrain t = new Terrain(TerrainProperties.Normal, 38, 38);
assertEquals(c.setCurrentTerrain(t, 0), false);
t = new Terrain(TerrainProperties.Normal, 1, 1);
t.setCharacter(null);
assertEquals(c.setCurrentTerrain(t, 0), true);
}
@Test
public void setPosition() throws Exception
{
int[] i = {2,2};
c.setPosition(i);
assertEquals(i, c.getPosition());
}
@Test
public void setPlayer() throws Exception
{
Player p = new Player("testPlayer");
c.setPlayer(p);
assertEquals(p, c.getPlayer());
}
@Test
public void getCurrentHealthPoints() throws Exception {
assertEquals(c.getCurrentHealthPoints(), 10);
}
@Test
public void isDead() throws Exception {
assertEquals(c.isDead(), false);
}
@Test
public void getPlayer() throws Exception {
assertEquals(c.getPlayer().getName(), "");
}
@Test
public void hasAttacked() throws Exception {
c.setHasAttacked(true);
assertEquals(c.hasAttacked(), true);
}
@Test
public void forceSetCurrentTerrain() throws Exception {
Terrain newTerrain = new Terrain(TerrainProperties.Normal, 1, 1);
c.forceSetCurrentTerrain(newTerrain);
assertEquals(c.getCurrentTerrain().getX(), 1);
}
@Test
public void takeDamage() throws Exception {
c.takeDamage(5);
assertEquals(c.getCurrentHealthPoints(), 35);
c.takeDamage(100);
assertEquals(c.isDead(), true);
}
@Test
public void canMove() throws Exception {
assertEquals(c.canMove(new Terrain(TerrainProperties.Normal, 38, 38)), false);
c.setCurrentMovementPoints(40);
Terrain t = new Terrain(TerrainProperties.Impassable,1 , 1);
t.setCharacter(c);
assertEquals(c.canMove(t), false);
t.setCharacter(null);
assertEquals(c.canMove(t), false);
t = new Terrain(TerrainProperties.Normal, 1, 1);
assertEquals(c.canMove(t), true);
}
@Test
public void canAttack() throws Exception {
Terrain t = new Terrain(TerrainProperties.Normal,1 , 1);
assertEquals(c.canAttack(t), false);
Terrain enemyterrain = new Terrain(TerrainProperties.Normal,1 , 1);
Character enemy = new Character("", 10, 20, 30, 40, 1, new Sprite(), new Terrain(TerrainProperties.Normal, 1, 1), "test", new Player(""));
enemyterrain.setCharacter(enemy);
assertEquals(c.canAttack(enemyterrain), true);
c.setHasAttacked(true);
assertEquals(c.canAttack(enemyterrain), false);
}
@Test
public void currentMovementPoints() throws Exception {
int movementpoints = 40;
c.setCurrentMovementPoints(movementpoints);
assertEquals(c.getCurrentMovementPoints(), movementpoints);
}
@Test
public void sprite() throws Exception {
Sprite sprite = new Sprite();
c.setSprite(sprite);
assertEquals(c.getSprite(), sprite);
assertEquals(c.getSpriteTexture(), "test");
}
@Test
public void attackRange() throws Exception {
assertEquals(c.getAttackRange(), 1);
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules;
import com.facebook.buck.android.AndroidBinaryDescription;
import com.facebook.buck.android.AndroidBuildConfigDescription;
import com.facebook.buck.android.AndroidInstrumentationApkDescription;
import com.facebook.buck.android.AndroidLibraryDescription;
import com.facebook.buck.android.AndroidManifestDescription;
import com.facebook.buck.android.AndroidPrebuiltAarDescription;
import com.facebook.buck.android.AndroidResourceDescription;
import com.facebook.buck.android.ApkGenruleDescription;
import com.facebook.buck.android.GenAidlDescription;
import com.facebook.buck.android.NdkLibraryDescription;
import com.facebook.buck.android.PrebuiltNativeLibraryDescription;
import com.facebook.buck.android.RobolectricTestDescription;
import com.facebook.buck.apple.AppleAssetCatalogDescription;
import com.facebook.buck.apple.AppleBundleDescription;
import com.facebook.buck.apple.AppleResourceDescription;
import com.facebook.buck.apple.AppleTestDescription;
import com.facebook.buck.apple.CoreDataModelDescription;
import com.facebook.buck.apple.AppleLibraryDescription;
import com.facebook.buck.apple.AppleBinaryDescription;
import com.facebook.buck.apple.IosPostprocessResourcesDescription;
import com.facebook.buck.apple.XcodeNativeDescription;
import com.facebook.buck.apple.XcodeProjectConfigDescription;
import com.facebook.buck.apple.XcodeWorkspaceConfigDescription;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cxx.Archives;
import com.facebook.buck.cxx.CxxBinaryDescription;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxLibraryDescription;
import com.facebook.buck.extension.BuckExtensionDescription;
import com.facebook.buck.gwt.GwtBinaryDescription;
import com.facebook.buck.java.JavaBinaryDescription;
import com.facebook.buck.java.JavaBuckConfig;
import com.facebook.buck.java.JavaCompilerEnvironment;
import com.facebook.buck.java.JavaLibraryDescription;
import com.facebook.buck.java.JavaTestDescription;
import com.facebook.buck.java.JavacOptions;
import com.facebook.buck.java.KeystoreDescription;
import com.facebook.buck.java.PrebuiltJarDescription;
import com.facebook.buck.parcelable.GenParcelableDescription;
import com.facebook.buck.python.PythonBinaryDescription;
import com.facebook.buck.python.PythonLibraryDescription;
import com.facebook.buck.python.PythonTestDescription;
import com.facebook.buck.shell.ExportFileDescription;
import com.facebook.buck.shell.GenruleDescription;
import com.facebook.buck.shell.ShBinaryDescription;
import com.facebook.buck.shell.ShTestDescription;
import com.facebook.buck.thrift.JavaThriftLibraryDescription;
import com.facebook.buck.util.AndroidDirectoryResolver;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import java.nio.file.Path;
import java.util.Map;
/**
* A registry of all the build rules types understood by Buck.
*/
public class KnownBuildRuleTypes {
private final ImmutableMap<BuildRuleType, Description<?>> descriptions;
private final ImmutableMap<String, BuildRuleType> types;
private static volatile KnownBuildRuleTypes defaultRules = null;
private KnownBuildRuleTypes(
Map<BuildRuleType, Description<?>> descriptions,
Map<String, BuildRuleType> types) {
this.descriptions = ImmutableMap.copyOf(descriptions);
this.types = ImmutableMap.copyOf(types);
}
public BuildRuleType getBuildRuleType(String named) {
BuildRuleType type = types.get(named);
if (type == null) {
throw new HumanReadableException("Unable to find build rule type: " + named);
}
return type;
}
public Description<? extends ConstructorArg> getDescription(BuildRuleType buildRuleType) {
Description<?> description = descriptions.get(buildRuleType);
if (description == null) {
throw new HumanReadableException(
"Unable to find description for build rule type: " + buildRuleType);
}
return description;
}
public ImmutableSet<Description<?>> getAllDescriptions() {
return ImmutableSet.copyOf(descriptions.values());
}
public static Builder builder() {
return new Builder();
}
@VisibleForTesting
static void resetDefaultInstance() {
defaultRules = null;
}
@VisibleForTesting
static KnownBuildRuleTypes replaceDefaultInstance(
BuckConfig config,
AndroidDirectoryResolver androidDirectoryResolver,
JavaCompilerEnvironment javacEnv) {
resetDefaultInstance();
return createInstance(config, androidDirectoryResolver, javacEnv);
}
public static KnownBuildRuleTypes createInstance(
BuckConfig config,
AndroidDirectoryResolver androidDirectoryResolver,
JavaCompilerEnvironment javacEnv) {
// Fast path
if (defaultRules == null) {
// Slow path
synchronized (KnownBuildRuleTypes.class) {
if (defaultRules == null) {
defaultRules = createBuilder(config, androidDirectoryResolver, javacEnv).build();
}
}
}
return defaultRules;
}
@VisibleForTesting
static Builder createBuilder(
BuckConfig config,
AndroidDirectoryResolver androidDirectoryResolver,
JavaCompilerEnvironment javacEnv) {
Optional<String> ndkVersion = config.getNdkVersion();
// If a NDK version isn't specified, we've got to reach into the runtime environment to find
// out which one we will end up using.
if (!ndkVersion.isPresent()) {
ndkVersion = androidDirectoryResolver.getNdkVersion();
}
// Construct the C/C++ config wrapping the buck config.
CxxBuckConfig cxxBuckConfig = new CxxBuckConfig(config);
// Look up the path to the "ar" tool in the buck config, falling back to the default
// if not found.
Path archiver = config.getPath("tools", "ar").or(Archives.DEFAULT_ARCHIVE_PATH);
// Look up the path to the PEX builder script.
Optional<Path> pythonPathToPex = config.getPath("python", "path_to_pex");
// Look up the path to the main module we use for python tests.
Optional<Path> pythonPathToPythonTestMain =
config.getPath("python", "path_to_python_test_main");
Builder builder = builder();
JavacOptions androidBinaryOptions = JavacOptions.builder(JavacOptions.DEFAULTS)
.setJavaCompilerEnvironment(javacEnv)
.build();
builder.register(new AndroidBinaryDescription(
androidBinaryOptions,
config.getProguardJarOverride()));
builder.register(new AndroidBuildConfigDescription());
builder.register(new AndroidInstrumentationApkDescription());
builder.register(new AndroidLibraryDescription(javacEnv));
builder.register(new AndroidManifestDescription());
builder.register(new AndroidPrebuiltAarDescription());
builder.register(new AndroidResourceDescription());
builder.register(new ApkGenruleDescription());
builder.register(new AppleAssetCatalogDescription());
builder.register(new AppleBundleDescription());
builder.register(new AppleTestDescription());
builder.register(new BuckExtensionDescription());
builder.register(new CoreDataModelDescription());
builder.register(new CxxBinaryDescription(cxxBuckConfig));
builder.register(new CxxLibraryDescription(cxxBuckConfig));
builder.register(new ExportFileDescription());
builder.register(new GenruleDescription());
builder.register(new GenAidlDescription());
builder.register(new GenParcelableDescription());
builder.register(new GwtBinaryDescription());
builder.register(new KeystoreDescription());
builder.register(new JavaBinaryDescription());
builder.register(new JavaLibraryDescription(javacEnv));
builder.register(new JavaTestDescription(javacEnv));
builder.register(new AppleLibraryDescription(archiver));
builder.register(new AppleBinaryDescription());
builder.register(new IosPostprocessResourcesDescription());
builder.register(new AppleResourceDescription());
builder.register(new JavaBinaryDescription());
builder.register(new JavaThriftLibraryDescription(javacEnv, new JavaBuckConfig(config)));
builder.register(new NdkLibraryDescription(ndkVersion));
builder.register(new PrebuiltJarDescription());
builder.register(new PrebuiltNativeLibraryDescription());
builder.register(new ProjectConfigDescription());
builder.register(new PythonTestDescription(
pythonPathToPex.or(PythonBinaryDescription.DEFAULT_PATH_TO_PEX),
pythonPathToPythonTestMain.or(PythonTestDescription.PYTHON_PATH_TO_PYTHON_TEST_MAIN)));
builder.register(new PythonBinaryDescription(
pythonPathToPex.or(PythonBinaryDescription.DEFAULT_PATH_TO_PEX)));
builder.register(new PythonLibraryDescription());
builder.register(new RobolectricTestDescription(javacEnv));
builder.register(new ShBinaryDescription());
builder.register(new ShTestDescription());
builder.register(new XcodeNativeDescription());
builder.register(new XcodeProjectConfigDescription());
builder.register(new XcodeWorkspaceConfigDescription());
return builder;
}
public static class Builder {
private final Map<BuildRuleType, Description<?>> descriptions;
private final Map<String, BuildRuleType> types;
protected Builder() {
this.descriptions = Maps.newConcurrentMap();
this.types = Maps.newConcurrentMap();
}
public void register(Description<?> description) {
Preconditions.checkNotNull(description);
BuildRuleType type = description.getBuildRuleType();
types.put(type.getName(), type);
descriptions.put(type, description);
}
public KnownBuildRuleTypes build() {
return new KnownBuildRuleTypes(descriptions, types);
}
}
}
| |
package org.dosomething.android.fragments;
import android.app.AlertDialog;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.TranslateAnimation;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import com.handmark.pulltorefresh.library.PullToRefreshBase;
import com.handmark.pulltorefresh.library.PullToRefreshListView;
import com.nostra13.universalimageloader.core.ImageLoader;
import org.dosomething.android.DSConstants;
import org.dosomething.android.R;
import org.dosomething.android.animations.CardFlipAnimation;
import org.dosomething.android.animations.Rotate3dAnimation;
import org.dosomething.android.cache.Cache;
import org.dosomething.android.cache.DSPreferences;
import org.dosomething.android.context.UserContext;
import org.dosomething.android.tasks.AbstractFetchCampaignsTask;
import org.dosomething.android.tasks.NoInternetException;
import org.dosomething.android.tasks.SurveyCheckTask;
import org.dosomething.android.transfer.Campaign;
import org.dosomething.android.widget.ProgressBarImageLoadingListener;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import roboguice.fragment.RoboFragment;
import roboguice.inject.InjectView;
/**
* Fragment to display campaigns in a ListView.
*/
public class CampaignsListFragment extends RoboFragment {
@Inject @Named("ProximaNova-Bold")Typeface typefaceBold;
@Inject @Named("ProximaNova-Reg")Typeface typefaceReg;
@Inject private LayoutInflater inflater;
@Inject private ImageLoader imageLoader;
@Inject private UserContext userContext;
@Inject private Cache cache;
@InjectView(R.id.list) private PullToRefreshListView pullToRefreshView;
@InjectView(R.id.popup) private RelativeLayout popupView;
@InjectView(R.id.popupMsg) private TextView popupMsgView;
@InjectView(R.id.popupClose) private Button popupCloseButton;
@InjectView(R.id.surveyPopup) private RelativeLayout popupSurveyView;
@InjectView(R.id.surveyText) private TextView popupSurveyText;
@InjectView(R.id.surveyButton) private Button popupSurveyButton;
@InjectView(R.id.surveyCloseButton) private Button popupSurveyCloseButton;
private ListView list;
private CampaignsTask mCampaignsTask;
private final AdapterView.OnItemClickListener itemClickListener = new CampaignItemClickListener();
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
View rootView = inflater.inflate(R.layout.fragment_campaigns_list, container, false);
return rootView;
}
@Override
public void onResume() {
super.onResume();
fetchCampaigns(false);
checkForSurvey();
}
@Override
public void onPause() {
super.onPause();
// Cancel the task in progress since we're leaving this fragment.
if (mCampaignsTask != null) {
mCampaignsTask.cancel(true);
}
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
Bundle args = getArguments();
list = pullToRefreshView.getRefreshableView();
pullToRefreshView.setOnRefreshListener(new PullToRefreshBase.OnRefreshListener<ListView>() {
@Override
public void onRefresh(PullToRefreshBase<ListView> refreshView) {
fetchCampaigns(true);
}
});
// Setup the popup section to notify when app update is available
popupMsgView.setTypeface(typefaceReg);
// Upgrade notification popup click listeners
popupMsgView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Open link to the Google Play Store
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://play.google.com/store/apps/details?id=org.dosomething.android")));
}
});
popupCloseButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
popupView.setVisibility(View.GONE);
}
});
// Setup the listener to close the survey popup view
popupSurveyText.setTypeface(typefaceReg);
popupSurveyButton.setTypeface(typefaceBold);
popupSurveyCloseButton.setTypeface(typefaceReg);
popupSurveyCloseButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
popupSurveyView.setVisibility(View.GONE);
}
});
}
private void checkForSurvey() {
SurveyCheckTask surveyTask = new SurveyCheckTask(userContext, popupSurveyView, popupSurveyText, popupSurveyButton);
surveyTask.execute();
}
/**
* Executes task to fetch the list of campaign.
*
* @param forceSearch Set to true if data should be forced to retrieve data
* from server instead of local cache even if the refresh
* time limit hasn't been reached yet.
*/
private void fetchCampaigns(boolean forceSearch) {
// Only allow a single task to be executed at a time
if (mCampaignsTask == null || !mCampaignsTask.getStatus().equals(AsyncTask.Status.RUNNING)) {
mCampaignsTask = new CampaignsTask();
mCampaignsTask.setForceSearch(forceSearch);
mCampaignsTask.execute();
}
}
/**
* Click listener for the list.
*/
private class CampaignItemClickListener implements AdapterView.OnItemClickListener {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Campaign campaign = (Campaign) list.getAdapter().getItem(position);
View cardBackside = view.findViewById(R.id.frame_backside);
if (cardBackside != null && cardBackside.getVisibility() == View.INVISIBLE) {
TextView title = (TextView)view.findViewById(R.id.preview_title);
title.setText(campaign.getName());
title.setTypeface(typefaceBold);
TextView body = (TextView)view.findViewById(R.id.preview_body);
body.setText(campaign.getTeaser());
body.setTypeface(typefaceReg);
CardFlipAnimation.animate(getActivity(), view, false);
}
else {
startActivity(org.dosomething.android.activities.Campaign.getIntent(getActivity(), campaign));
}
}
};
private class CampaignsTask extends AbstractFetchCampaignsTask {
private boolean currentVersionOutdated = false;
private boolean forceSearch;
public CampaignsTask() {
super(getActivity(), userContext, cache);
forceSearch = false;
}
@Override
protected void onSuccess() {
// Don't display campaigns that require a higher version than what we have
int version = 0;
try {
PackageInfo pInfo = this.context.getPackageManager().getPackageInfo(this.context.getPackageName(), 0);
version = pInfo.versionCode;
}
catch (PackageManager.NameNotFoundException e) {
}
List<Campaign> campaigns = getCampaigns();
Iterator<Campaign> iter = campaigns.iterator();
while (iter.hasNext()) {
Campaign campaign = iter.next();
if (campaign.getMinVersion() > version) {
currentVersionOutdated = true;
iter.remove();
}
else if (campaign.isHidden()) {
iter.remove();
}
}
// Adapter to display the list items
CampaignListAdapter listAdapter = new CampaignListAdapter(getActivity(), campaigns);
list.setAdapter(listAdapter);
// Handle click events
list.setOnItemClickListener(itemClickListener);
}
@Override
protected void onError(Exception e) {
String message;
if(e instanceof NoInternetException) {
message = getString(R.string.campaigns_no_internet);
} else {
message = getString(R.string.campaigns_failed);
}
new AlertDialog.Builder(getActivity())
.setMessage(message)
.setCancelable(false)
.setPositiveButton(getString(R.string.ok_upper), null)
.create()
.show();
}
@Override
protected void doWebOperation() throws Exception {
if (forceSearch) {
cache.clearCampaigns();
}
super.doWebOperation();
}
@Override
protected void onFinish() {
super.onFinish();
pullToRefreshView.onRefreshComplete();
if (currentVersionOutdated) {
popupView.setVisibility(View.VISIBLE);
}
else {
popupView.setVisibility(View.GONE);
}
}
protected void setForceSearch(boolean force) {
this.forceSearch = force;
}
}
private class CampaignListAdapter extends ArrayAdapter<Campaign> {
private int lastItemUpdatePosition;
public CampaignListAdapter(Context context, List<Campaign> objects) {
super(context, android.R.layout.simple_expandable_list_item_1, objects);
this.lastItemUpdatePosition = 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
v = inflater.inflate(R.layout.campaign_row, null);
// Set click listener for the close button
LinearLayout closeButton = (LinearLayout)v.findViewById(R.id.preview_close);
closeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// This seems sorta hacky. Must guarantee that this Button is two levels under
// the containing row layout, and that the Layout is a FrameLayout
CardFlipAnimation.animate(getActivity(), (FrameLayout)view.getParent().getParent(), true);
}
});
}
// Ensure front side of the item is visible
View backside = v.findViewById(R.id.frame_backside);
backside.setVisibility(View.INVISIBLE);
View frontside = v.findViewById(R.id.frame);
frontside.setVisibility(View.VISIBLE);
Campaign campaign = getItem(position);
ImageView bgImageView = (ImageView) v.findViewById(R.id.background);
bgImageView.setImageDrawable(null);
bgImageView.setBackgroundColor(Color.parseColor(campaign.getBackgroundColor()));
if (campaign.getBackgroundUrl() != null) {
imageLoader.displayImage(campaign.getBackgroundUrl(), bgImageView);
}
ImageView imageView = (ImageView) v.findViewById(R.id.image);
ProgressBar progressBar = (ProgressBar) v.findViewById(R.id.progressBar);
imageLoader.displayImage(campaign.getLogoUrl(), imageView, new ProgressBarImageLoadingListener(progressBar));
Calendar cal = Calendar.getInstance();
Date todayDate = cal.getTime();
int openHeight = getResources().getDimensionPixelSize(R.dimen.campaign_row_height_open);
if (v.getLayoutParams() != null) {
v.getLayoutParams().height = openHeight;
}
else {
v.setLayoutParams(new AbsListView.LayoutParams(AbsListView.LayoutParams.MATCH_PARENT, openHeight));
}
TextView textView = (TextView) v.findViewById(R.id.callout);
if(campaign.getCallout() != null && campaign.getCallout().length() > 0) {
textView.setText(campaign.getCallout());
textView.setTypeface(typefaceReg);
textView.setVisibility(TextView.VISIBLE);
// Change text color and background color if it's a past campaign
if (todayDate.after(campaign.getEndDate())) {
int bgColor = getResources().getColor(R.color.campaigns_past_campaign_callout_background);
textView.setBackgroundColor(bgColor);
int textColor = getResources().getColor(R.color.campaigns_past_campaign_callout_text);
textView.setTextColor(textColor);
}
else {
int bgColor = getResources().getColor(R.color.campaigns_callout_background);
textView.setBackgroundColor(bgColor);
int textColor = getResources().getColor(R.color.campaigns_callout_text);
textView.setTextColor(textColor);
}
}
else {
// we use GONE instead of INVISIBLE because we dont want it to leave a blank space
textView.setVisibility(TextView.GONE);
}
ImageView imageCause = (ImageView) v.findViewById(R.id.cause_tag);
imageCause.setVisibility(ImageView.GONE); // GONE by default
int[] tags = campaign.getCauseTags();
if(tags != null && tags.length > 0) {
boolean bValidTag = false;
DSPreferences prefs = new DSPreferences(getActivity());
int[] userCauses = prefs.getCauses();
for(int i=0; i<tags.length && !bValidTag; i++) {
for(int j=0; j<userCauses.length && !bValidTag; j++) {
if(tags[i] == userCauses[j] && getCauseDrawable(tags[i]) > 0) {
imageCause.setImageResource(getCauseDrawable(tags[i]));
imageCause.setVisibility(ImageView.VISIBLE);
bValidTag = true;
break;
}
}
}
}
// Apply animation when item comes onto the screen, but only when scrolling down
if (position > lastItemUpdatePosition) {
Animation animRotate = new Rotate3dAnimation(0, 0, -15, 0, 0, 0);
animRotate.setDuration(350);
Animation animTranslate = new TranslateAnimation(0, 0, v.getHeight() / 2, 0);
animTranslate.setDuration(400);
AnimationSet animSet = new AnimationSet(false);
animSet.addAnimation(animRotate);
animSet.addAnimation(animTranslate);
v.setAnimation(animSet);
}
lastItemUpdatePosition = position;
return v;
}
private int getCauseDrawable(int cause_id) {
if (cause_id == DSConstants.CAUSE_TAG.ANIMALS.getValue())
return R.drawable.cause_animals_tag;
else if (cause_id == DSConstants.CAUSE_TAG.BULLYING.getValue())
return R.drawable.cause_bullying_tag;
else if (cause_id == DSConstants.CAUSE_TAG.DISASTERS.getValue())
return R.drawable.cause_disasters_tag;
else if (cause_id == DSConstants.CAUSE_TAG.DISCRIMINATION.getValue())
return R.drawable.cause_discrimination_tag;
else if (cause_id == DSConstants.CAUSE_TAG.EDUCATION.getValue())
return R.drawable.cause_education_tag;
else if (cause_id == DSConstants.CAUSE_TAG.ENVIRONMENT.getValue())
return R.drawable.cause_environment_tag;
else if (cause_id == DSConstants.CAUSE_TAG.POVERTY.getValue())
return R.drawable.cause_poverty_tag;
else if (cause_id == DSConstants.CAUSE_TAG.HUMAN_RIGHTS.getValue())
return R.drawable.cause_human_rights_tag;
else if (cause_id == DSConstants.CAUSE_TAG.TROOPS.getValue())
return R.drawable.cause_troops_tag;
else if (cause_id == DSConstants.CAUSE_TAG.HEALTH.getValue())
return R.drawable.cause_health_tag;
else if (cause_id == DSConstants.CAUSE_TAG.RELATIONSHIPS.getValue())
return R.drawable.cause_relationships_tag;
else
return -1;
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201809.shoppingcampaigns;
import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME;
import com.beust.jcommander.Parameter;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.utils.v201809.shopping.ProductPartitionTree;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroup;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupAd;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdOperation;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdReturnValue;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterion;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionOperation;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionReturnValue;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupOperation;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupReturnValue;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.AdGroupType;
import com.google.api.ads.adwords.axis.v201809.cm.AdvertisingChannelSubType;
import com.google.api.ads.adwords.axis.v201809.cm.AdvertisingChannelType;
import com.google.api.ads.adwords.axis.v201809.cm.ApiError;
import com.google.api.ads.adwords.axis.v201809.cm.ApiException;
import com.google.api.ads.adwords.axis.v201809.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201809.cm.BiddingStrategyType;
import com.google.api.ads.adwords.axis.v201809.cm.Budget;
import com.google.api.ads.adwords.axis.v201809.cm.BudgetBudgetDeliveryMethod;
import com.google.api.ads.adwords.axis.v201809.cm.BudgetOperation;
import com.google.api.ads.adwords.axis.v201809.cm.BudgetServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.Campaign;
import com.google.api.ads.adwords.axis.v201809.cm.CampaignOperation;
import com.google.api.ads.adwords.axis.v201809.cm.CampaignReturnValue;
import com.google.api.ads.adwords.axis.v201809.cm.CampaignServiceInterface;
import com.google.api.ads.adwords.axis.v201809.cm.CampaignStatus;
import com.google.api.ads.adwords.axis.v201809.cm.GoalOptimizedShoppingAd;
import com.google.api.ads.adwords.axis.v201809.cm.Money;
import com.google.api.ads.adwords.axis.v201809.cm.Operator;
import com.google.api.ads.adwords.axis.v201809.cm.Setting;
import com.google.api.ads.adwords.axis.v201809.cm.ShoppingSetting;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.adwords.lib.factory.AdWordsServicesInterface;
import com.google.api.ads.adwords.lib.utils.examples.ArgumentNames;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.OAuthException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.ads.common.lib.utils.examples.CodeSampleParams;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
import java.util.List;
/**
* This example adds a Smart Shopping campaign with an ad group and ad group ad.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the "ads.properties" file.
* See README for more info.
*/
public class AddSmartShoppingAd {
private static class AddSmartShoppingAdParams extends CodeSampleParams {
@Parameter(names = ArgumentNames.MERCHANT_ID, required = true)
private Long merchantId;
@Parameter(
names = ArgumentNames.CREATE_DEFAULT_PARTITION,
required = true,
arity = 1,
description =
"If set to true, a default partition will be created. If running the"
+ " AddProductPartitionTree.java example right after this example, make sure this"
+ " stays set to false.")
private boolean createDefaultPartition;
}
public static void main(String[] args) {
AdWordsSession session;
try {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential =
new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
session =
new AdWordsSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
} catch (ConfigurationLoadException cle) {
System.err.printf(
"Failed to load configuration from the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, cle);
return;
} catch (ValidationException ve) {
System.err.printf(
"Invalid configuration in the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, ve);
return;
} catch (OAuthException oe) {
System.err.printf(
"Failed to create OAuth credentials. Check OAuth settings in the %s file. "
+ "Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, oe);
return;
}
AdWordsServicesInterface adWordsServices = AdWordsServices.getInstance();
AddSmartShoppingAdParams params = new AddSmartShoppingAdParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.merchantId = Long.parseLong("INSERT_MERCHANT_ID_HERE");
params.createDefaultPartition = false;
}
try {
runExample(adWordsServices, session, params.merchantId, params.createDefaultPartition);
} catch (ApiException apiException) {
// ApiException is the base class for most exceptions thrown by an API request. Instances
// of this exception have a message and a collection of ApiErrors that indicate the
// type and underlying cause of the exception. Every exception object in the adwords.axis
// packages will return a meaningful value from toString.
//
// ApiException extends RemoteException, so this catch block must appear before the
// catch block for RemoteException.
System.err.println("Request failed due to ApiException. Underlying ApiErrors:");
if (apiException.getErrors() != null) {
int i = 0;
for (ApiError apiError : apiException.getErrors()) {
System.err.printf(" Error %d: %s%n", i++, apiError);
}
}
} catch (RemoteException re) {
System.err.printf("Request failed unexpectedly due to RemoteException: %s%n", re);
}
}
/**
* Runs the example.
*
* @param adWordsServices the services factory.
* @param session the session.
* @param merchantId the Merchant Center ID for the new campaign.
* @param createDefaultPartition if true, a default product partition for all products will be
* created.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
*/
public static void runExample(
AdWordsServicesInterface adWordsServices,
AdWordsSession session,
long merchantId,
boolean createDefaultPartition)
throws RemoteException {
Budget budget = createBudget(adWordsServices, session);
Campaign campaign =
createSmartShoppingCampaign(adWordsServices, session, budget.getBudgetId(), merchantId);
AdGroup adGroup = createSmartShoppingAdGroup(adWordsServices, session, campaign.getId());
createSmartShoppingAd(adWordsServices, session, adGroup.getId());
if (createDefaultPartition) {
createDefaultPartition(adWordsServices, session, adGroup.getId());
}
}
/**
* Creates a non-shared budget for a Smart Shopping campaign. Smart Shopping campaigns support
* only non-shared budgets.
*/
private static Budget createBudget(
AdWordsServicesInterface adWordsServices, AdWordsSession session) throws RemoteException {
BudgetServiceInterface budgetService =
adWordsServices.get(session, BudgetServiceInterface.class);
// Create a budget.
Budget budget = new Budget();
budget.setName("Interplanetary Cruise #" + System.currentTimeMillis());
Money budgetAmount = new Money();
// This budget equals 50.00 units of your account's currency, e.g.,
// 50 USD if your currency is USD.
budgetAmount.setMicroAmount(50_000_000L);
budget.setAmount(budgetAmount);
budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD);
// Non-shared budgets are required for Smart Shopping campaigns.
budget.setIsExplicitlyShared(false);
// Create operation.
BudgetOperation budgetOperation = new BudgetOperation();
budgetOperation.setOperand(budget);
budgetOperation.setOperator(Operator.ADD);
// Add the budget.
Budget newBudget = budgetService.mutate(new BudgetOperation[] {budgetOperation}).getValue(0);
System.out.printf(
"Budget with name '%s' and ID %d was added.%n",
newBudget.getName(), newBudget.getBudgetId());
return newBudget;
}
/** Creates a Smart Shopping campaign. */
private static Campaign createSmartShoppingCampaign(
AdWordsServicesInterface adWordsServices,
AdWordsSession session,
Long budgetId,
long merchantId)
throws RemoteException {
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
// Create a campaign with required and optional settings.
Campaign campaign = new Campaign();
campaign.setName("Smart Shopping campaign #" + System.currentTimeMillis());
// The advertisingChannelType is what makes this a Shopping campaign.
campaign.setAdvertisingChannelType(AdvertisingChannelType.SHOPPING);
// Sets the advertisingChannelSubType to SHOPPING_GOAL_OPTIMIZED_ADS to
// make this a Smart Shopping campaign.
campaign.setAdvertisingChannelSubType(AdvertisingChannelSubType.SHOPPING_GOAL_OPTIMIZED_ADS);
// Recommendation: Set the campaign to PAUSED when creating it to stop
// the ads from immediately serving. Set to ENABLED once you've added
// targeting and the ads are ready to serve.
campaign.setStatus(CampaignStatus.PAUSED);
// Set a budget.
Budget budget = new Budget();
budget.setBudgetId(budgetId);
campaign.setBudget(budget);
// Set bidding strategy. Only MAXIMIZE_CONVERSION_VALUE is supported.
BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration();
biddingStrategyConfiguration.setBiddingStrategyType(
BiddingStrategyType.MAXIMIZE_CONVERSION_VALUE);
campaign.setBiddingStrategyConfiguration(biddingStrategyConfiguration);
// All Shopping campaigns need a ShoppingSetting.
ShoppingSetting shoppingSetting = new ShoppingSetting();
shoppingSetting.setSalesCountry("US");
shoppingSetting.setMerchantId(merchantId);
campaign.setSettings(new Setting[] {shoppingSetting});
// Create operation.
CampaignOperation campaignOperation = new CampaignOperation();
campaignOperation.setOperand(campaign);
campaignOperation.setOperator(Operator.ADD);
// Make the mutate request.
CampaignReturnValue campaignAddResult =
campaignService.mutate(new CampaignOperation[] {campaignOperation});
// Display result.
campaign = campaignAddResult.getValue(0);
System.out.printf(
"Smart Shopping campaign with name '%s' and ID %d was added.%n",
campaign.getName(), campaign.getId());
return campaign;
}
/**
* Creates a Smart Shopping ad group by setting the ad group type to SHOPPING_GOAL_OPTIMIZED_ADS.
*/
private static AdGroup createSmartShoppingAdGroup(
AdWordsServicesInterface adWordsServices, AdWordsSession session, long campaignId)
throws RemoteException {
// Get the AdGroupService.
AdGroupServiceInterface adGroupService =
adWordsServices.get(session, AdGroupServiceInterface.class);
// Create ad group.
AdGroup adGroup = new AdGroup();
adGroup.setCampaignId(campaignId);
adGroup.setName("Smart Shopping ad group #" + System.currentTimeMillis());
// Set the ad group type to SHOPPING_GOAL_OPTIMIZED_ADS.
adGroup.setAdGroupType(AdGroupType.SHOPPING_GOAL_OPTIMIZED_ADS);
// Create operation.
AdGroupOperation adGroupOperation = new AdGroupOperation();
adGroupOperation.setOperand(adGroup);
adGroupOperation.setOperator(Operator.ADD);
// Make the mutate request.
AdGroupReturnValue adGroupAddResult =
adGroupService.mutate(new AdGroupOperation[] {adGroupOperation});
// Display result.
adGroup = adGroupAddResult.getValue(0);
System.out.printf(
"Smart Shopping ad group with name '%s' and ID %d was added.%n",
adGroup.getName(), adGroup.getId());
return adGroup;
}
/** Creates a Smart Shopping ad. */
private static void createSmartShoppingAd(
AdWordsServicesInterface adWordsServices, AdWordsSession session, long adGroupId)
throws RemoteException {
AdGroupAdServiceInterface adGroupAdService =
adWordsServices.get(session, AdGroupAdServiceInterface.class);
// Create a Smart Shopping ad (Goal-optimized Shopping ad).
GoalOptimizedShoppingAd smartShoppingAd = new GoalOptimizedShoppingAd();
// Create ad group ad.
AdGroupAd adGroupAd = new AdGroupAd();
adGroupAd.setAdGroupId(adGroupId);
adGroupAd.setAd(smartShoppingAd);
// Create operation.
AdGroupAdOperation adGroupAdOperation = new AdGroupAdOperation();
adGroupAdOperation.setOperand(adGroupAd);
adGroupAdOperation.setOperator(Operator.ADD);
// Make the mutate request.
AdGroupAdReturnValue adGroupAdAddResult =
adGroupAdService.mutate(new AdGroupAdOperation[] {adGroupAdOperation});
// Display result.
adGroupAd = adGroupAdAddResult.getValue(0);
System.out.printf("Smart Shopping ad with ID %d was added.%n", adGroupAd.getAd().getId());
}
/** Creates a default product partition as an ad group criterion. */
private static void createDefaultPartition(
AdWordsServicesInterface adWordsServices, AdWordsSession session, long adGroupId)
throws RemoteException {
// Create an ad group criterion for 'All products' using the ProductPartitionTree utility.
ProductPartitionTree productPartitionTree =
ProductPartitionTree.createAdGroupTree(adWordsServices, session, adGroupId);
List<AdGroupCriterionOperation> mutateOperations = productPartitionTree.getMutateOperations();
// Make the mutate request.
AdGroupCriterionServiceInterface adGroupCriterionService =
adWordsServices.get(session, AdGroupCriterionServiceInterface.class);
AdGroupCriterionReturnValue adGroupCriterionResult =
adGroupCriterionService.mutate(mutateOperations.toArray(new AdGroupCriterionOperation[0]));
// Display result.
for (AdGroupCriterion adGroupCriterion : adGroupCriterionResult.getValue()) {
System.out.printf(
"Ad group criterion with ID %d in ad group with ID %d was added.%n",
adGroupCriterion.getCriterion().getId(), adGroupCriterion.getAdGroupId());
}
}
}
| |
/*
* This file provided by Facebook is for non-commercial testing and evaluation
* purposes only. Facebook reserves all rights not expressly granted.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.facebook.fresco.samples.showcase.imagepipeline;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.facebook.drawee.backends.pipeline.Fresco;
import com.facebook.drawee.interfaces.DraweeController;
import com.facebook.drawee.view.SimpleDraweeView;
import com.facebook.fresco.samples.showcase.BaseShowcaseFragment;
import com.facebook.fresco.samples.showcase.R;
import com.facebook.fresco.samples.showcase.misc.ImageUriProvider;
import com.facebook.fresco.samples.showcase.postprocessor.BenchmarkPostprocessorForDuplicatedBitmapInPlace;
import com.facebook.fresco.samples.showcase.postprocessor.BenchmarkPostprocessorForManualBitmapHandling;
import com.facebook.fresco.samples.showcase.postprocessor.CachedWatermarkPostprocessor;
import com.facebook.fresco.samples.showcase.postprocessor.FasterGreyScalePostprocessor;
import com.facebook.fresco.samples.showcase.postprocessor.ScalingBlurPostprocessor;
import com.facebook.fresco.samples.showcase.postprocessor.SlowGreyScalePostprocessor;
import com.facebook.fresco.samples.showcase.postprocessor.WatermarkPostprocessor;
import com.facebook.imagepipeline.postprocessors.IterativeBoxBlurPostProcessor;
import com.facebook.imagepipeline.postprocessors.RoundAsCirclePostprocessor;
import com.facebook.imagepipeline.request.ImageRequest;
import com.facebook.imagepipeline.request.ImageRequestBuilder;
import com.facebook.imagepipeline.request.Postprocessor;
import java.util.Locale;
/**
* Fragment that illustrates how to use the image pipeline directly in order to create
* notifications.
*/
public class ImagePipelinePostProcessorFragment extends BaseShowcaseFragment
implements DurationCallback {
private static final int WATERMARK_COUNT = 10;
private static final String WATERMARK_STRING = "WATERMARK";
private final Entry[] SPINNER_ENTRIES =
new Entry[] {
new Entry(R.string.imagepipeline_postprocessor_show_original, null),
new Entry(
R.string.imagepipeline_postprocessor_set_greyscale_slow,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new SlowGreyScalePostprocessor())),
new Entry(
R.string.imagepipeline_postprocessor_set_greyscale,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new FasterGreyScalePostprocessor())),
new Entry(
R.string.imagepipeline_postprocessor_set_watermark,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new WatermarkPostprocessor(WATERMARK_COUNT, WATERMARK_STRING))),
new Entry(
R.string.imagepipeline_postprocessor_set_watermark_cached,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new CachedWatermarkPostprocessor(WATERMARK_COUNT, WATERMARK_STRING))),
new Entry(
R.string.imagepipeline_postprocessor_set_blur,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new IterativeBoxBlurPostProcessor(25, 3))),
new Entry(
R.string.imagepipeline_postprocessor_set_scaling_blur,
new BenchmarkPostprocessorForManualBitmapHandling(
this, new ScalingBlurPostprocessor(25, 3, 4))),
new Entry(
R.string.imagepipeline_postprocessor_set_round_as_circle,
new BenchmarkPostprocessorForDuplicatedBitmapInPlace(
this, new RoundAsCirclePostprocessor())),
};
private Button mButton;
private SimpleDraweeView mDraweeMain;
private Spinner mSpinner;
private Uri mUri;
@Nullable
@Override
public View onCreateView(
LayoutInflater inflater,
@Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_imagepipeline_postprocessor, container, false);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
final ImageUriProvider imageUriProvider = ImageUriProvider.getInstance(getContext());
mUri = imageUriProvider.createSampleUri(ImageUriProvider.ImageSize.L);
mButton = (Button) view.findViewById(R.id.button);
mDraweeMain = (SimpleDraweeView) view.findViewById(R.id.drawee_view);
mSpinner = (Spinner) view.findViewById(R.id.spinner);
mSpinner.setAdapter(new SimplePostprocessorAdapter());
mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
final Entry spinnerEntry = SPINNER_ENTRIES[position];
setPostprocessor(spinnerEntry.postprocessor);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
mSpinner.setSelection(0);
mButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final Entry spinnerEntry = SPINNER_ENTRIES[mSpinner.getSelectedItemPosition()];
setPostprocessor(spinnerEntry.postprocessor);
}
});
}
@Override
public int getTitleId() {
return R.string.imagepipeline_postprocessor_title;
}
@Override
public void showDuration(long startNs) {
final float deltaMs = startNs / 1e6f;
final String message = String.format((Locale) null, "Duration: %.1f ms", deltaMs);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getContext(), message, Toast.LENGTH_SHORT).show();
}
});
}
private void setPostprocessor(Postprocessor postprocessor) {
final ImageRequest imageRequest = ImageRequestBuilder.newBuilderWithSource(mUri)
.setPostprocessor(postprocessor)
.build();
final DraweeController draweeController = Fresco.newDraweeControllerBuilder()
.setOldController(mDraweeMain.getController())
.setImageRequest(imageRequest)
.build();
mDraweeMain.setController(draweeController);
}
private class SimplePostprocessorAdapter extends BaseAdapter {
@Override
public int getCount() {
return SPINNER_ENTRIES.length;
}
@Override
public Object getItem(int position) {
return SPINNER_ENTRIES[position];
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final LayoutInflater layoutInflater = getLayoutInflater(null);
final View view = convertView != null
? convertView
: layoutInflater.inflate(android.R.layout.simple_spinner_dropdown_item, parent, false);
final TextView textView = (TextView) view.findViewById(android.R.id.text1);
textView.setText(SPINNER_ENTRIES[position].descriptionId);
return view;
}
}
private static class Entry {
final int descriptionId;
final Postprocessor postprocessor;
Entry(int descriptionId, Postprocessor postprocessor) {
this.descriptionId = descriptionId;
this.postprocessor = postprocessor;
}
}
}
| |
/**
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met :
*
* . Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* . Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* $Id: $
*/
package cppncss.counter;
import java.io.StringReader;
import junit.framework.TestCase;
import cppast.AstTranslationUnit;
import cppast.Node;
import cppast.ParseException;
import cppast.Parser;
/**
* @author Mathieu Champlon
*/
public class FunctionNameExtractorTest extends TestCase
{
private String extract( final String data ) throws ParseException
{
final Node node = new Parser( new StringReader( data ) ).translation_unit();
return (String)node.jjtAccept( new FunctionNameExtractor(), null );
}
public void testNotFunctionReturnsNull() throws ParseException
{
assertNull( extract( "" ) );
}
public void testFunctionDefinitionWithoutParameters() throws ParseException
{
assertEquals( "MyFunction()", extract( "void MyFunction() {}" ) );
}
public void testFunctionDefinitionWithIntegerParameter() throws ParseException
{
assertEquals( "MyFunction( int )", extract( "void MyFunction( int p ) {}" ) );
}
public void testFunctionDefinitionWithIntegerParameterWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( int )", extract( "void MyFunction( int ) {}" ) );
}
public void testFunctionDefinitionWithIntegerPointerParameter() throws ParseException
{
assertEquals( "MyFunction( int* )", extract( "void MyFunction( int* p ) {}" ) );
}
public void testFunctionDefinitionWithIntegerPointerParameterWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( int* )", extract( "void MyFunction( int* ) {}" ) );
}
public void testFunctionDefinitionWithIntegerReferencePointerConstParameter() throws ParseException
{
assertEquals( "MyFunction( int*& )", extract( "void MyFunction( int*& p ) {}" ) );
}
public void testFunctionDefinitionWithIntegerReferencePointerConstParameterWithoutParameterName()
throws ParseException
{
assertEquals( "MyFunction( int*& )", extract( "void MyFunction( int*& ) {}" ) );
}
public void testFunctionDefinitionWithConstPointerConstParameter() throws ParseException
{
assertEquals( "MyFunction( const int* const )", extract( "void MyFunction( const int* const p ) {}" ) );
}
public void testFunctionDefinitionWithConstPointerConstParameterWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( const int* const )", extract( "void MyFunction( const int* const ) {}" ) );
}
public void testFunctionDefinitionWithUnsignedIntegerParameter() throws ParseException
{
assertEquals( "MyFunction( unsigned int )", extract( "void MyFunction( unsigned int p ) {}" ) );
}
public void testFunctionDefinitionWithUnsignedIntegerParameterWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( unsigned int )", extract( "void MyFunction( unsigned int ) {}" ) );
}
public void testFunctionDefinitionWithUnsignedIntegerPointerParameter() throws ParseException
{
assertEquals( "MyFunction( unsigned int* )", extract( "void MyFunction( unsigned int* p ) {}" ) );
}
public void testFunctionDefinitionWithUnsignedIntegerPointerParameterWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( unsigned int* )", extract( "void MyFunction( unsigned int* ) {}" ) );
}
public void testFunctionDefinitionWithSeveralParameters() throws ParseException
{
assertEquals( "MyFunction( int, float&, const char* )",
extract( "void MyFunction( int p1, float& p2, const char* p3 ) {}" ) );
}
public void testFunctionDefinitionWithSeveralParametersWithoutParameterNames() throws ParseException
{
assertEquals( "MyFunction( int, float&, const char* )",
extract( "void MyFunction( int, float&, const char* ) {}" ) );
}
public void testConstMethodDefinition() throws ParseException
{
assertEquals( "MyClass::MyMethod()", extract( "void MyClass::MyMethod() const {}" ) );
}
public void testMethodDefinitionWithConstReferenceReturnType() throws ParseException
{
assertEquals( "MyClass::MyMethod()", extract( "const MyType& MyClass::MyMethod() {}" ) );
}
public void testEqualityOperatorDefinition() throws ParseException
{
assertEquals( "MyClass::operator ==( const MyClass& )",
extract( "bool MyClass::operator==( const MyClass& rhs ) const {}" ) );
}
public void testConversionOperatorDefinition() throws ParseException
{
assertEquals( "MyClass::operator const unsigned char*()",
extract( "MyClass::operator const unsigned char*() const {}" ) );
}
public void testConstructorDefinition() throws ParseException
{
assertEquals( "MyClass::MyClass()", extract( "MyClass::MyClass() {}" ) );
}
public void testConstructorDefinitionWithParameter() throws ParseException
{
assertEquals( "MyClass::MyClass( int )", extract( "MyClass::MyClass( int p ) {}" ) );
}
public void testDestructorDefinition() throws ParseException
{
assertEquals( "MyClass::~MyClass()", extract( "MyClass::~MyClass() {}" ) );
}
public void testFunctionBodyDoesNotAlterFunctionSignature() throws ParseException
{
assertEquals( "MyFunction()", extract( "void MyFunction() { char *p; }" ) );
}
public void testArrayArgument() throws ParseException
{
assertEquals( "MyFunction( MyType[3] )", extract( "void MyFunction( MyType p[3] ) {}" ) );
}
public void testArrayArgumentWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( MyType[3] )", extract( "void MyFunction( MyType[3] ) {}" ) );
}
public void testTemplateClassMethod() throws ParseException
{
assertEquals( "MyClass< T, F >::MyMethod()", extract( "void MyClass< T, F >::MyMethod() {}" ) );
}
public void testTemplateParameterFunction() throws ParseException
{
assertEquals( "MyFunction( MyClass< T, F >& )", extract( "void MyFunction( MyClass< T, F >& p ) {}" ) );
}
public void testTemplateParameterFunctionWithoutParameterName() throws ParseException
{
assertEquals( "MyFunction( MyClass< T, F >& )", extract( "void MyFunction( MyClass< T, F >& ) {}" ) );
}
public void testPointerOnMemberParameterFunction() throws ParseException
{
assertEquals( "MyFunction( void (C::*)( char, float ) )",
extract( "void MyFunction( void (C::*M)( char, float ) ) {}" ) );
}
public void testPointerOnFunctionParameterFunction() throws ParseException
{
assertEquals( "MyFunction( void (*)( char, float ) )",
extract( "void MyFunction( void (*F)( char, float ) ) {}" ) );
}
public void testFunctionReturningPointerOnFunction() throws ParseException
{
assertEquals( "MyFunction( int )", extract( "void (*MyFunction( int ))( char, float ) {}" ) );
}
public void testParenthesisOperatorDefinition() throws ParseException
{
assertEquals( "MyClass::operator()( int )", extract( "void MyClass::operator()( int i ) {}" ) );
}
public void testFunctionInAnonymousNamespace() throws ParseException
{
assertEquals( "`anonymous-namespace'::MyFunction()", extract( "namespace { void MyFunction(); }" ) );
}
public void testMethodOfClassDefinedInFunction() throws ParseException
{
final String content = "void MyFunction() { class MyClass{ void MyMethod(); }; }";
final AstTranslationUnit root = new Parser( new StringReader( content ) ).translation_unit();
final Node node = root.jjtGetChild( 0 ).jjtGetChild( 2 ).jjtGetChild( 0 );
final String actual = (String)node.jjtAccept( new FunctionNameExtractor(), null );
// final String actual = (String)new FunctionNameExtractor().visit( node, null );
assertEquals( "MyFunction::MyClass::MyMethod()", actual );
}
public void testMethodOfClassDefinedLocally() throws ParseException
{
final String content = "void MyFunction() { { class MyClass{ void MyMethod(); }; } }";
final AstTranslationUnit root = new Parser( new StringReader( content ) ).translation_unit();
final Node node = root.jjtGetChild( 0 ).jjtGetChild( 2 ).jjtGetChild( 0 );
final String actual = (String)node.jjtAccept( new FunctionNameExtractor(), null );
// final String actual = (String)new FunctionNameExtractor().visit( node, null );
assertEquals( "MyFunction::MyClass::MyMethod()", actual );
}
}
| |
// Code generated by Wire protocol buffer compiler, do not edit.
// Source file: google/protobuf/descriptor.proto at 549:1
package com.google.protobuf;
import com.squareup.wire.FieldEncoding;
import com.squareup.wire.Message;
import com.squareup.wire.ProtoAdapter;
import com.squareup.wire.ProtoReader;
import com.squareup.wire.ProtoWriter;
import com.squareup.wire.protos.custom_options.FooBar;
import java.io.IOException;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.Object;
import java.lang.Override;
import java.lang.String;
import java.lang.StringBuilder;
import java.util.List;
import okio.ByteString;
public final class EnumValueOptions extends Message<EnumValueOptions, EnumValueOptions.Builder> {
public static final ProtoAdapter<EnumValueOptions> ADAPTER = new ProtoAdapter<EnumValueOptions>(FieldEncoding.LENGTH_DELIMITED, EnumValueOptions.class) {
@Override
public int encodedSize(EnumValueOptions value) {
return (value.deprecated != null ? ProtoAdapter.BOOL.encodedSizeWithTag(1, value.deprecated) : 0)
+ UninterpretedOption.ADAPTER.asRepeated().encodedSizeWithTag(999, value.uninterpreted_option)
+ (value.enum_value_option != null ? ProtoAdapter.INT32.encodedSizeWithTag(70000, value.enum_value_option) : 0)
+ (value.complex_enum_value_option != null ? FooBar.More.ADAPTER.encodedSizeWithTag(70001, value.complex_enum_value_option) : 0)
+ (value.foreign_enum_value_option != null ? ProtoAdapter.BOOL.encodedSizeWithTag(70002, value.foreign_enum_value_option) : 0)
+ value.unknownFields().size();
}
@Override
public void encode(ProtoWriter writer, EnumValueOptions value) throws IOException {
if (value.deprecated != null) ProtoAdapter.BOOL.encodeWithTag(writer, 1, value.deprecated);
if (value.uninterpreted_option != null) UninterpretedOption.ADAPTER.asRepeated().encodeWithTag(writer, 999, value.uninterpreted_option);
if (value.enum_value_option != null) ProtoAdapter.INT32.encodeWithTag(writer, 70000, value.enum_value_option);
if (value.complex_enum_value_option != null) FooBar.More.ADAPTER.encodeWithTag(writer, 70001, value.complex_enum_value_option);
if (value.foreign_enum_value_option != null) ProtoAdapter.BOOL.encodeWithTag(writer, 70002, value.foreign_enum_value_option);
writer.writeBytes(value.unknownFields());
}
@Override
public EnumValueOptions decode(ProtoReader reader) throws IOException {
Builder builder = new Builder();
long token = reader.beginMessage();
for (int tag; (tag = reader.nextTag()) != -1;) {
switch (tag) {
case 1: builder.deprecated(ProtoAdapter.BOOL.decode(reader)); break;
case 999: builder.uninterpreted_option.add(UninterpretedOption.ADAPTER.decode(reader)); break;
case 70000: builder.enum_value_option(ProtoAdapter.INT32.decode(reader)); break;
case 70001: builder.complex_enum_value_option(FooBar.More.ADAPTER.decode(reader)); break;
case 70002: builder.foreign_enum_value_option(ProtoAdapter.BOOL.decode(reader)); break;
default: {
FieldEncoding fieldEncoding = reader.peekFieldEncoding();
Object value = fieldEncoding.rawProtoAdapter().decode(reader);
builder.addUnknownField(tag, fieldEncoding, value);
}
}
}
reader.endMessage(token);
return builder.build();
}
@Override
public EnumValueOptions redact(EnumValueOptions value) {
Builder builder = value.newBuilder();
redactElements(builder.uninterpreted_option, UninterpretedOption.ADAPTER);
if (builder.complex_enum_value_option != null) builder.complex_enum_value_option = FooBar.More.ADAPTER.redact(builder.complex_enum_value_option);
builder.clearUnknownFields();
return builder.build();
}
};
private static final long serialVersionUID = 0L;
public static final Boolean DEFAULT_DEPRECATED = false;
public static final Integer DEFAULT_ENUM_VALUE_OPTION = 0;
public static final Boolean DEFAULT_FOREIGN_ENUM_VALUE_OPTION = false;
/**
* Is this enum value deprecated?
* Depending on the target platform, this can emit Deprecated annotations
* for the enum value, or it will be completely ignored; in the very least,
* this is a formalization for deprecating enum values.
*/
public final Boolean deprecated;
/**
* The parser stores options it doesn't recognize here. See above.
*/
public final List<UninterpretedOption> uninterpreted_option;
/**
* Extension source: custom_options.proto at 71:3
*/
public final Integer enum_value_option;
/**
* Extension source: custom_options.proto at 72:3
*/
public final FooBar.More complex_enum_value_option;
/**
* Extension source: foreign.proto at 39:3
*/
public final Boolean foreign_enum_value_option;
public EnumValueOptions(Boolean deprecated, List<UninterpretedOption> uninterpreted_option, Integer enum_value_option, FooBar.More complex_enum_value_option, Boolean foreign_enum_value_option) {
this(deprecated, uninterpreted_option, enum_value_option, complex_enum_value_option, foreign_enum_value_option, ByteString.EMPTY);
}
public EnumValueOptions(Boolean deprecated, List<UninterpretedOption> uninterpreted_option, Integer enum_value_option, FooBar.More complex_enum_value_option, Boolean foreign_enum_value_option, ByteString unknownFields) {
super(unknownFields);
this.deprecated = deprecated;
this.uninterpreted_option = immutableCopyOf("uninterpreted_option", uninterpreted_option);
this.enum_value_option = enum_value_option;
this.complex_enum_value_option = complex_enum_value_option;
this.foreign_enum_value_option = foreign_enum_value_option;
}
@Override
public Builder newBuilder() {
Builder builder = new Builder();
builder.deprecated = deprecated;
builder.uninterpreted_option = copyOf("uninterpreted_option", uninterpreted_option);
builder.enum_value_option = enum_value_option;
builder.complex_enum_value_option = complex_enum_value_option;
builder.foreign_enum_value_option = foreign_enum_value_option;
builder.addUnknownFields(unknownFields());
return builder;
}
@Override
public boolean equals(Object other) {
if (other == this) return true;
if (!(other instanceof EnumValueOptions)) return false;
EnumValueOptions o = (EnumValueOptions) other;
return equals(unknownFields(), o.unknownFields())
&& equals(deprecated, o.deprecated)
&& equals(uninterpreted_option, o.uninterpreted_option)
&& equals(enum_value_option, o.enum_value_option)
&& equals(complex_enum_value_option, o.complex_enum_value_option)
&& equals(foreign_enum_value_option, o.foreign_enum_value_option);
}
@Override
public int hashCode() {
int result = super.hashCode;
if (result == 0) {
result = unknownFields().hashCode();
result = result * 37 + (deprecated != null ? deprecated.hashCode() : 0);
result = result * 37 + (uninterpreted_option != null ? uninterpreted_option.hashCode() : 1);
result = result * 37 + (enum_value_option != null ? enum_value_option.hashCode() : 0);
result = result * 37 + (complex_enum_value_option != null ? complex_enum_value_option.hashCode() : 0);
result = result * 37 + (foreign_enum_value_option != null ? foreign_enum_value_option.hashCode() : 0);
super.hashCode = result;
}
return result;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
if (deprecated != null) builder.append(", deprecated=").append(deprecated);
if (uninterpreted_option != null) builder.append(", uninterpreted_option=").append(uninterpreted_option);
if (enum_value_option != null) builder.append(", enum_value_option=").append(enum_value_option);
if (complex_enum_value_option != null) builder.append(", complex_enum_value_option=").append(complex_enum_value_option);
if (foreign_enum_value_option != null) builder.append(", foreign_enum_value_option=").append(foreign_enum_value_option);
return builder.replace(0, 2, "EnumValueOptions{").append('}').toString();
}
public static final class Builder extends Message.Builder<EnumValueOptions, Builder> {
public Boolean deprecated;
public List<UninterpretedOption> uninterpreted_option;
public Integer enum_value_option;
public FooBar.More complex_enum_value_option;
public Boolean foreign_enum_value_option;
public Builder() {
uninterpreted_option = newMutableList();
}
/**
* Is this enum value deprecated?
* Depending on the target platform, this can emit Deprecated annotations
* for the enum value, or it will be completely ignored; in the very least,
* this is a formalization for deprecating enum values.
*/
public Builder deprecated(Boolean deprecated) {
this.deprecated = deprecated;
return this;
}
/**
* The parser stores options it doesn't recognize here. See above.
*/
public Builder uninterpreted_option(List<UninterpretedOption> uninterpreted_option) {
checkElementsNotNull(uninterpreted_option);
this.uninterpreted_option = uninterpreted_option;
return this;
}
public Builder enum_value_option(Integer enum_value_option) {
this.enum_value_option = enum_value_option;
return this;
}
public Builder complex_enum_value_option(FooBar.More complex_enum_value_option) {
this.complex_enum_value_option = complex_enum_value_option;
return this;
}
public Builder foreign_enum_value_option(Boolean foreign_enum_value_option) {
this.foreign_enum_value_option = foreign_enum_value_option;
return this;
}
@Override
public EnumValueOptions build() {
return new EnumValueOptions(deprecated, uninterpreted_option, enum_value_option, complex_enum_value_option, foreign_enum_value_option, buildUnknownFields());
}
}
}
| |
/* Copyright (C) 2001, 2007 United States Government as represented by
the Administrator of the National Aeronautics and Space Administration.
All Rights Reserved.
*/
package gov.nasa.worldwind.layers.rpf.wizard;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.awt.*;
import java.awt.event.*;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeEvent;
/**
* @author dcollins
* @version $Id: DataChooserPanel.java 4856 2008-03-28 23:41:31Z dcollins $
*/
public class DataChooserPanel extends JPanel
{
private JLabel title;
private JLabel description;
// Logical data components.
private Collection<FileSet> fileSetList;
private PropertyEvents propertyEvents;
private Map<FileSet, JToggleButton> selectButtons;
// Data UI components.
private JComponent dataPanel;
private JButton selectAllButton;
private JButton deselectAllButton;
private JScrollPane dataScrollPane;
private JLabel dataDescription;
public DataChooserPanel()
{
this.propertyEvents = new PropertyEvents();
makeComponents();
layoutComponents();
}
public String getTitle()
{
return this.title.getText();
}
public void setTitle(String title)
{
this.title.setText(title);
}
public String getDescription()
{
return this.description.getText();
}
public void setDescription(String description)
{
this.description.setText(description);
}
public String getDataDescription()
{
return this.dataDescription.getText();
}
public void setDataDescription(String dataDescription)
{
this.dataDescription.setText(dataDescription);
}
public void setFileSetList(Collection<FileSet> fileSetList)
{
removeListeners(this.fileSetList);
this.fileSetList = fileSetList;
addListeners(this.fileSetList);
this.selectButtons = new HashMap<FileSet, JToggleButton>();
if (fileSetList != null && fileSetList.size() > 0)
{
Box box = Box.createVerticalBox();
for (FileSet set : fileSetList)
{
JCheckBox checkBox = new JCheckBox();
checkBox.putClientProperty("fileSet", set);
checkBox.setSelected(set.isSelected());
checkBox.setText(makeTitle(set));
checkBox.setAlignmentX(Component.LEFT_ALIGNMENT);
checkBox.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
fileSetClicked(e);
}
});
box.add(checkBox);
this.selectButtons.put(set, checkBox);
}
this.dataScrollPane.setViewportView(box);
this.dataPanel.setVisible(true);
}
else
{
this.dataScrollPane.setViewportView(null);
this.dataPanel.setVisible(false);
}
this.dataPanel.validate();
fileSetSelectionChanged(null);
}
private String makeTitle(FileSet set)
{
String title = null;
if (set != null)
{
StringBuilder sb = new StringBuilder();
sb.append("<html>");
if (set.getTitle() != null)
sb.append(set.getTitle());
else if (set.getIdentifier() != null)
sb.append(set.getIdentifier());
else
sb.append("Various");
int fileCount = set.getFileCount();
if (fileCount > 0)
{
sb.append("<font size=\"-2\">");
if (sb.length() > 0)
sb.append(" - ");
sb.append(String.format("%,d", fileCount)).append(" file").append(fileCount > 1 ? "s" : "");
sb.append("</font>");
}
sb.append("</html>");
title = sb.toString();
}
return title;
}
private void fileSetClicked(ItemEvent e)
{
if (e != null)
{
FileSet set = null;
if (e.getItem() != null && e.getItem() instanceof JComponent)
{
Object property = ((JComponent) e.getItem()).getClientProperty("fileSet");
if (property != null && property instanceof FileSet)
set = (FileSet) property;
}
if (set != null)
{
if (e.getStateChange() == ItemEvent.SELECTED)
set.setSelected(true);
else if (e.getStateChange() == ItemEvent.DESELECTED)
set.setSelected(false);
}
}
}
private void setAllSelected(boolean b)
{
if (this.fileSetList != null)
{
for (FileSet set : this.fileSetList)
{
set.setSelected(b);
}
}
}
private void selectAllPressed()
{
setAllSelected(true);
}
private void deselectAllPressed()
{
setAllSelected(false);
}
private void fileSetSelectionChanged(Object source)
{
// Make sure the CheckBox selection reflects the FileSet selection state.
if (source != null && source instanceof FileSet)
{
FileSet set = (FileSet) source;
JToggleButton button = this.selectButtons.get(set);
if (button != null)
button.setSelected(set.isSelected());
}
// Enable "Select All" and "Select None" only when necessary.
boolean allSelected = true;
boolean anySelected = false;
if (this.fileSetList != null)
{
for (FileSet set : this.fileSetList)
{
allSelected &= set.isSelected();
anySelected |= set.isSelected();
}
}
this.selectAllButton.setEnabled(!allSelected);
this.deselectAllButton.setEnabled(anySelected);
}
private void addListeners(Collection<FileSet> fileSetList)
{
if (fileSetList != null)
{
for (FileSet set : fileSetList)
{
set.addPropertyChangeListener(this.propertyEvents);
}
}
}
private void removeListeners(Collection<FileSet> fileSetList)
{
if (fileSetList != null)
{
for (FileSet set : fileSetList)
{
set.removePropertyChangeListener(this.propertyEvents);
}
}
}
private class PropertyEvents implements PropertyChangeListener {
public void propertyChange(PropertyChangeEvent evt) {
if (evt != null && evt.getPropertyName() != null) {
String propertyName = evt.getPropertyName();
if (propertyName.equals(FileSet.SELECTED)) {
fileSetSelectionChanged(evt.getSource());
firePropertyChange(propertyName, null, evt.getSource());
}
}
}
}
private void makeComponents()
{
this.title = new JLabel(" ");
this.title.setBackground(Color.gray);
this.title.setOpaque(true);
this.description = new JLabel();
this.dataPanel = Box.createVerticalBox();
this.selectAllButton = new JButton("Select All");
this.deselectAllButton = new JButton("Select None");
Font font = this.selectAllButton.getFont();
font = new Font(font.getName(), font.getStyle(), 9);
Dimension size = new Dimension(35, 20);
this.selectAllButton.setFont(font);
this.selectAllButton.setPreferredSize(size);
this.selectAllButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
selectAllPressed();
}
});
this.deselectAllButton.setFont(font);
this.deselectAllButton.setPreferredSize(size);
this.deselectAllButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
deselectAllPressed();
}
});
this.dataScrollPane = new JScrollPane();
this.dataScrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
size = this.dataScrollPane.getPreferredSize();
this.dataScrollPane.setPreferredSize(new Dimension(size.width, Short.MAX_VALUE));
this.dataScrollPane.setOpaque(false);
this.dataScrollPane.getViewport().setOpaque(false);
this.dataDescription = new JLabel(" ");
}
private void layoutComponents()
{
setLayout(new BorderLayout());
JPanel p = new JPanel();
p.setLayout(new BorderLayout());
p.setBackground(this.title.getBackground());
this.title.setBorder(new EmptyBorder(10, 10, 10, 10));
p.add(this.title, BorderLayout.WEST);
p.add(new JSeparator(), BorderLayout.SOUTH);
add(p, BorderLayout.NORTH);
Box b = Box.createVerticalBox();
b.setBorder(new EmptyBorder(10, 10, 10, 10));
this.description.setAlignmentX(Component.LEFT_ALIGNMENT);
b.add(this.description);
b.add(Box.createVerticalStrut(10));
this.dataPanel.setAlignmentX(Component.LEFT_ALIGNMENT);
Box b2 = Box.createHorizontalBox();
b2.setAlignmentX(Component.LEFT_ALIGNMENT);
b2.add(this.selectAllButton);
b2.add(Box.createHorizontalStrut(2));
b2.add(new JSeparator(SwingConstants.VERTICAL));
b2.add(Box.createHorizontalStrut(2));
b2.add(this.deselectAllButton);
b2.add(Box.createHorizontalStrut(Short.MAX_VALUE));
this.dataPanel.add(b2);
this.dataPanel.add(Box.createVerticalStrut(5));
this.dataScrollPane.setAlignmentX(Component.LEFT_ALIGNMENT);
this.dataPanel.add(this.dataScrollPane);
b.add(this.dataPanel);
b.add(Box.createVerticalStrut(5));
this.dataPanel.setVisible(false);
this.dataDescription.setAlignmentX(Component.LEFT_ALIGNMENT);
b.add(this.dataDescription);
add(b, BorderLayout.CENTER);
}
}
| |
package com.compomics.peptizer.util.datatools.implementations.pride;
import com.compomics.peptizer.util.datatools.interfaces.PeptizerFragmentIon;
import com.compomics.peptizer.util.enumerator.IonTypeEnum;
import com.compomics.peptizer.util.enumerator.SearchEngineEnum;
import com.compomics.util.gui.interfaces.SpectrumAnnotation;
import org.apache.log4j.Logger;
import uk.ac.ebi.pride.jaxb.model.CvParam;
import uk.ac.ebi.pride.jaxb.model.FragmentIon;
import java.awt.*;
import java.io.Serializable;
/**
* Created by IntelliJ IDEA.
* User: vaudel
* Date: 19.07.2010
* Time: 13:25:49
* To change this template use File | Settings | File Templates.
*/
public class PrideFragmentIon extends PeptizerFragmentIon implements SpectrumAnnotation, Serializable {
// Class specific log4j logger for PrideFragmentIon instances.
private static Logger logger = Logger.getLogger(PrideFragmentIon.class);
private FragmentIon originalFragmentIon;
private SearchEngineEnum searchEngine;
private double intensity;
private double mz;
private int number;
private IonTypeEnum ionType;
private double errorMargin;
public PrideFragmentIon(FragmentIon fragmentIon, SearchEngineEnum searchEngine) {
this.searchEngine = searchEngine;
this.originalFragmentIon = fragmentIon;
for (CvParam cvParam : originalFragmentIon.getCvParam()) {
if (cvParam.getAccession().equals("PRIDE:0000189")) {
intensity = new Double(cvParam.getValue());
} else if (cvParam.getAccession().equals("PRIDE:0000190")) {
errorMargin = new Double(cvParam.getValue());
} else if (cvParam.getAccession().equals("PRIDE:0000188")) {
mz = new Double(cvParam.getValue());
} else if (cvParam.getAccession().equals("PRIDE:0000233")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.a;
} else if (cvParam.getAccession().equals("PRIDE:0000234")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.aH2O;
} else if (cvParam.getAccession().equals("PRIDE:0000235")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.aNH3;
} else if (cvParam.getAccession().equals("PRIDE:0000194")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.b;
} else if (cvParam.getAccession().equals("PRIDE:0000196")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.bH2O;
} else if (cvParam.getAccession().equals("PRIDE:0000195")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.bNH3;
} else if (cvParam.getAccession().equals("PRIDE:0000236")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.c;
} else if (cvParam.getAccession().equals("PRIDE:0000227")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.x;
} else if (cvParam.getAccession().equals("PRIDE:0000193")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.y;
} else if (cvParam.getAccession().equals("PRIDE:0000197")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.yH2O;
} else if (cvParam.getAccession().equals("PRIDE:0000198")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.yNH3;
} else if (cvParam.getAccession().equals("PRIDE:0000230")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.z;
} else if (cvParam.getAccession().equals("PRIDE:0000263")) {
ionType = IonTypeEnum.MH;
} else if (cvParam.getAccession().equals("PRIDE:0000261")) {
ionType = IonTypeEnum.MHH2O;
} else if (cvParam.getAccession().equals("PRIDE:0000262")) {
ionType = IonTypeEnum.MHNH3;
} else if (cvParam.getAccession().equals("PRIDE:0000239")) {
ionType = IonTypeEnum.immonium;
} else if (cvParam.getAccession().equals("PRIDE:0000240")) {
ionType = IonTypeEnum.immoniumA;
} else if (cvParam.getAccession().equals("PRIDE:0000241")) {
ionType = IonTypeEnum.immoniumC;
} else if (cvParam.getAccession().equals("PRIDE:0000242")) {
ionType = IonTypeEnum.immoniumD;
} else if (cvParam.getAccession().equals("PRIDE:0000243")) {
ionType = IonTypeEnum.immoniumE;
} else if (cvParam.getAccession().equals("PRIDE:0000244")) {
ionType = IonTypeEnum.immoniumF;
} else if (cvParam.getAccession().equals("PRIDE:0000245")) {
ionType = IonTypeEnum.immoniumG;
} else if (cvParam.getAccession().equals("PRIDE:0000246")) {
ionType = IonTypeEnum.immoniumH;
} else if (cvParam.getAccession().equals("PRIDE:0000247")) {
ionType = IonTypeEnum.immoniumI;
} else if (cvParam.getAccession().equals("PRIDE:0000248")) {
ionType = IonTypeEnum.immoniumK;
} else if (cvParam.getAccession().equals("PRIDE:0000249")) {
ionType = IonTypeEnum.immoniumL;
} else if (cvParam.getAccession().equals("PRIDE:0000250")) {
ionType = IonTypeEnum.immoniumM;
} else if (cvParam.getAccession().equals("PRIDE:0000251")) {
ionType = IonTypeEnum.immoniumN;
} else if (cvParam.getAccession().equals("PRIDE:0000252")) {
ionType = IonTypeEnum.immoniumP;
} else if (cvParam.getAccession().equals("PRIDE:0000253")) {
ionType = IonTypeEnum.immoniumQ;
} else if (cvParam.getAccession().equals("PRIDE:0000254")) {
ionType = IonTypeEnum.immoniumR;
} else if (cvParam.getAccession().equals("PRIDE:0000255")) {
ionType = IonTypeEnum.immoniumS;
} else if (cvParam.getAccession().equals("PRIDE:0000256")) {
ionType = IonTypeEnum.immoniumT;
} else if (cvParam.getAccession().equals("PRIDE:0000257")) {
ionType = IonTypeEnum.immoniumV;
} else if (cvParam.getAccession().equals("PRIDE:0000258")) {
ionType = IonTypeEnum.immoniumW;
} else if (cvParam.getAccession().equals("PRIDE:0000259")) {
ionType = IonTypeEnum.immoniumY;
} else if (cvParam.getAccession().contains("ion")) {
number = new Integer(cvParam.getValue());
ionType = IonTypeEnum.other;
}
}
}
@Override
public SearchEngineEnum getSearchEngineEnum() {
return searchEngine;
}
@Override
public double getIntensity() {
return intensity;
}
@Override
public int getNumber() {
return number;
}
@Override
public IonTypeEnum getType() {
return ionType;
}
@Override
public double getMZ() {
return mz;
}
@Override
public double getErrorMargin() {
return errorMargin;
}
@Override
public Color getColor() {
switch (ionType) {
// A ion
case a:
case aH2O:
case aNH3:
return new Color(153, 0, 0);
// B ion
case b:
case bH2O:
case bNH3:
return new Color(0, 0, 255);
// C ion
case c:
return new Color(188, 0, 255);
// X ion
case x:
return new Color(78, 200, 0);
// Y ion
case y:
case yH2O:
case yNH3:
return new Color(0, 0, 0);
// Z ion
case z:
return new Color(255, 140, 0);
// Parent ion
case MH:
return Color.red;
// Immonium ion
case immonium:
return Color.gray;
// Unknown ion
default:
return new Color(150, 150, 150);
}
}
@Override
public String getLabel() {
return ionType.getName() + number;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import org.apache.ignite.*;
import org.apache.ignite.cache.affinity.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.spi.discovery.tcp.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*;
import org.apache.ignite.testframework.junits.common.*;
import org.apache.ignite.transactions.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.transactions.TransactionConcurrency.*;
import static org.apache.ignite.transactions.TransactionIsolation.*;
/**
* Tests for local transactions.
*/
@SuppressWarnings( {"BusyWait"})
abstract class IgniteTxAbstractTest extends GridCommonAbstractTest {
/** Random number generator. */
private static final Random RAND = new Random();
/** Execution count. */
private static final AtomicInteger cntr = new AtomicInteger();
/** */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/**
* Start grid by default.
*/
protected IgniteTxAbstractTest() {
super(false /*start grid. */);
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration c = super.getConfiguration(gridName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
return c;
}
/**
* @return Grid count.
*/
protected abstract int gridCount();
/**
* @return Key count.
*/
protected abstract int keyCount();
/**
* @return Maximum key value.
*/
protected abstract int maxKeyValue();
/**
* @return Thread iterations.
*/
protected abstract int iterations();
/**
* @return True if in-test logging is enabled.
*/
protected abstract boolean isTestDebug();
/**
* @return {@code True} if memory stats should be printed.
*/
protected abstract boolean printMemoryStats();
/** {@inheritDoc} */
private void debug(String msg) {
if (isTestDebug())
info(msg);
}
/**
* @throws Exception If failed.
*/
@Override protected void beforeTestsStarted() throws Exception {
for (int i = 0; i < gridCount(); i++)
startGrid(i);
}
/**
* @throws Exception If failed.
*/
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
}
/**
* @return Keys.
*/
protected Iterable<Integer> getKeys() {
List<Integer> keys = new ArrayList<>(keyCount());
for (int i = 0; i < keyCount(); i++)
keys.add(RAND.nextInt(maxKeyValue()) + 1);
Collections.sort(keys);
return Collections.unmodifiableList(keys);
}
/**
* @return Random cache operation.
*/
protected OP getOp() {
switch (RAND.nextInt(3)) {
case 0: { return OP.READ; }
case 1: { return OP.WRITE; }
case 2: { return OP.REMOVE; }
// Should never be reached.
default: { assert false; return null; }
}
}
/**
* @param concurrency Concurrency.
* @param isolation Isolation.
* @throws Exception If check failed.
*/
protected void checkCommit(TransactionConcurrency concurrency, TransactionIsolation isolation) throws Exception {
int gridIdx = RAND.nextInt(gridCount());
Ignite ignite = grid(gridIdx);
if (isTestDebug())
debug("Checking commit on grid: " + ignite.cluster().localNode().id());
for (int i = 0; i < iterations(); i++) {
IgniteCache<Integer, String> cache = jcache(gridIdx);
Transaction tx = ignite(gridIdx).transactions().txStart(concurrency, isolation, 0, 0);
try {
int prevKey = -1;
for (Integer key : getKeys()) {
// Make sure we have the same locking order for all concurrent transactions.
assert key >= prevKey : "key: " + key + ", prevKey: " + prevKey;
if (isTestDebug()) {
AffinityFunction aff = cache.getConfiguration(CacheConfiguration.class).getAffinity();
int part = aff.partition(key);
debug("Key affinity [key=" + key + ", partition=" + part + ", affinity=" +
U.toShortString(ignite(gridIdx).affinity(null).mapPartitionToPrimaryAndBackups(part)) + ']');
}
String val = Integer.toString(key);
switch (getOp()) {
case READ: {
if (isTestDebug())
debug("Reading key [key=" + key + ", i=" + i + ']');
val = cache.get(key);
if (isTestDebug())
debug("Read value for key [key=" + key + ", val=" + val + ']');
break;
}
case WRITE: {
if (isTestDebug())
debug("Writing key and value [key=" + key + ", val=" + val + ", i=" + i + ']');
cache.put(key, val);
break;
}
case REMOVE: {
if (isTestDebug())
debug("Removing key [key=" + key + ", i=" + i + ']');
cache.remove(key);
break;
}
default: { assert false; }
}
}
tx.commit();
if (isTestDebug())
debug("Committed transaction [i=" + i + ", tx=" + tx + ']');
}
catch (TransactionOptimisticException e) {
if (concurrency != OPTIMISTIC || isolation != SERIALIZABLE) {
error("Received invalid optimistic failure.", e);
throw e;
}
if (isTestDebug())
info("Optimistic transaction failure (will rollback) [i=" + i + ", msg=" + e.getMessage() +
", tx=" + tx.xid() + ']');
try {
tx.rollback();
}
catch (IgniteException ex) {
error("Failed to rollback optimistic failure: " + tx, ex);
throw ex;
}
}
catch (Exception e) {
error("Transaction failed (will rollback): " + tx, e);
tx.rollback();
throw e;
}
catch (Error e) {
error("Error when executing transaction (will rollback): " + tx, e);
tx.rollback();
throw e;
}
finally {
Transaction t = ignite(gridIdx).transactions().tx();
assert t == null : "Thread should not have transaction upon completion ['t==tx'=" + (t == tx) +
", t=" + t + (t != tx ? "tx=" + tx : "tx=''") + ']';
}
}
if (printMemoryStats()) {
if (cntr.getAndIncrement() % 100 == 0)
// Print transaction memory stats.
((IgniteKernal)grid(gridIdx)).internalCache().context().tm().printMemoryStats();
}
}
/**
* @param concurrency Concurrency.
* @param isolation Isolation.
* @throws IgniteCheckedException If check failed.
*/
protected void checkRollback(TransactionConcurrency concurrency, TransactionIsolation isolation)
throws Exception {
checkRollback(new ConcurrentHashMap<Integer, String>(), concurrency, isolation);
}
/**
* @param map Map to check.
* @param concurrency Concurrency.
* @param isolation Isolation.
* @throws IgniteCheckedException If check failed.
*/
protected void checkRollback(ConcurrentMap<Integer, String> map, TransactionConcurrency concurrency,
TransactionIsolation isolation) throws Exception {
int gridIdx = RAND.nextInt(gridCount());
Ignite ignite = grid(gridIdx);
if (isTestDebug())
debug("Checking commit on grid: " + ignite.cluster().localNode().id());
for (int i = 0; i < iterations(); i++) {
IgniteCache<Integer, String> cache = jcache(gridIdx);
Transaction tx = ignite(gridIdx).transactions().txStart(concurrency, isolation, 0, 0);
try {
for (Integer key : getKeys()) {
if (isTestDebug()) {
AffinityFunction aff = cache.getConfiguration(CacheConfiguration.class).getAffinity();
int part = aff.partition(key);
debug("Key affinity [key=" + key + ", partition=" + part + ", affinity=" +
U.toShortString(ignite(gridIdx).affinity(null).mapPartitionToPrimaryAndBackups(part)) + ']');
}
String val = Integer.toString(key);
switch (getOp()) {
case READ: {
debug("Reading key: " + key);
checkMap(map, key, cache.get(key));
break;
}
case WRITE: {
debug("Writing key and value [key=" + key + ", val=" + val + ']');
checkMap(map, key, cache.getAndPut(key, val));
break;
}
case REMOVE: {
debug("Removing key: " + key);
checkMap(map, key, cache.getAndRemove(key));
break;
}
default: { assert false; }
}
}
tx.rollback();
debug("Rolled back transaction: " + tx);
}
catch (TransactionOptimisticException e) {
tx.rollback();
log.warning("Rolled back transaction due to optimistic exception [tx=" + tx + ", e=" + e + ']');
throw e;
}
catch (Exception e) {
tx.rollback();
error("Rolled back transaction due to exception [tx=" + tx + ", e=" + e + ']');
throw e;
}
finally {
Transaction t1 = ignite(gridIdx).transactions().tx();
debug("t1=" + t1);
assert t1 == null : "Thread should not have transaction upon completion ['t==tx'=" + (t1 == tx) +
", t=" + t1 + ']';
}
}
}
/**
* @param map Map to check against.
* @param key Key.
* @param val Value.
*/
private void checkMap(ConcurrentMap<Integer, String> map, Integer key, String val) {
if (val != null) {
String v = map.putIfAbsent(key, val);
assert v == null || v.equals(val);
}
}
/**
* Checks integrity of all caches after tests.
*
* @throws IgniteCheckedException If check failed.
*/
@SuppressWarnings({"ErrorNotRethrown"})
protected void finalChecks() throws Exception {
for (int i = 1; i <= maxKeyValue(); i++) {
for (int k = 0; k < 3; k++) {
try {
String v1 = null;
for (int j = 0; j < gridCount(); j++) {
IgniteCache<Integer, String> cache = jcache(j);
Transaction tx = ignite(j).transactions().tx();
assertNull("Transaction is not completed: " + tx, tx);
if (j == 0) {
v1 = cache.get(i);
}
else {
String v2 = cache.get(i);
if (!F.eq(v2, v1)) {
v1 = this.<Integer, String>jcache(0).get(i);
v2 = cache.get(i);
}
assert F.eq(v2, v1) :
"Invalid cached value [key=" + i + ", v1=" + v1 + ", v2=" + v2 + ", grid=" + j + ']';
}
}
break;
}
catch (AssertionError e) {
if (k == 2)
throw e;
else
// Wait for transactions to complete.
Thread.sleep(500);
}
}
}
for (int i = 1; i <= maxKeyValue(); i++) {
for (int k = 0; k < 3; k++) {
try {
for (int j = 0; j < gridCount(); j++) {
IgniteCache<Integer, String> cache = jcache(j);
cache.removeAll();
// assert cache.keySet().isEmpty() : "Cache is not empty: " + cache.entrySet();
}
break;
}
catch (AssertionError e) {
if (k == 2)
throw e;
else
// Wait for transactions to complete.
Thread.sleep(500);
}
}
}
}
/**
* Cache operation.
*/
protected enum OP {
/** Cache read. */
READ,
/** Cache write. */
WRITE,
/** Cache remove. */
REMOVE
}
}
| |
package com.platypii.baseline.views.tracks;
import com.platypii.baseline.Intents;
import com.platypii.baseline.R;
import com.platypii.baseline.Services;
import com.platypii.baseline.cloud.AuthState;
import com.platypii.baseline.databinding.ActivityTrackRemoteBinding;
import com.platypii.baseline.events.SyncEvent;
import com.platypii.baseline.tracks.TrackData;
import com.platypii.baseline.tracks.TrackMetadata;
import com.platypii.baseline.util.ABundle;
import com.platypii.baseline.util.Exceptions;
import com.platypii.baseline.views.charts.ChartStatsFragment;
import com.platypii.baseline.views.charts.ChartsFragment;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentTransaction;
import java.io.File;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
public class TrackRemoteActivity extends TrackDataActivity implements DialogInterface.OnClickListener {
private static final String TAG = "TrackRemoteActivity";
private ActivityTrackRemoteBinding binding;
@Nullable
private AlertDialog deleteConfirmation;
private TrackMetadata track;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
binding = ActivityTrackRemoteBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
// Load track from extras
try {
loadTrack();
// Setup button listeners
binding.buttons.open.setOnClickListener(this::clickOpen);
binding.buttons.earth.setOnClickListener(this::clickKml);
binding.buttons.delete.setOnClickListener(this::clickDelete);
setupMenu();
} catch (IllegalStateException e) {
Exceptions.report(e);
finish();
}
}
/**
* Check if track file exists, and download or load charts
*/
private void loadTrack() {
track = TrackLoader.loadCloudData(getIntent().getExtras());
final File trackFile = track.abbrvFile(this);
if (trackFile.exists()) {
loadCharts(trackFile);
} else {
// File not downloaded to device, start TrackDownloadFragment
final TrackDownloadFragment downloadFrag = new TrackDownloadFragment();
downloadFrag.setArguments(TrackLoader.trackBundle(track));
downloadFrag.trackFile.thenAccept(this::loadCharts);
// downloadFrag.trackFile.exceptionally(error -> {
// // TODO: Show download error
// });
getSupportFragmentManager()
.beginTransaction()
.replace(R.id.charts, downloadFrag)
.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN)
.commit();
}
}
/**
* Load chart fragments
*/
private void loadCharts(@NonNull File trackFile) {
// Load track data async
new Thread(() -> trackData.complete(new TrackData(trackFile))).start();
// Load fragments
final FragmentManager fm = getSupportFragmentManager();
final Fragment charts = new ChartsFragment();
charts.setArguments(TrackLoader.trackBundle(trackFile));
fm.beginTransaction()
.replace(R.id.charts, charts)
.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN)
.commit();
final Fragment stats = new ChartStatsFragment();
stats.setArguments(TrackLoader.trackBundle(trackFile));
fm.beginTransaction()
.replace(R.id.chartStats, stats)
.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN)
.commit();
}
/**
* Update header
*/
private void updateViews() {
if (track != null) {
binding.trackLocation.setText(track.location());
}
}
private void clickOpen(View v) {
// Analytics
firebaseAnalytics.logEvent("click_track_open", ABundle.of("track_id", track.track_id));
// Open web app
if (track.trackUrl != null) {
Intents.openTrackUrl(this, track.trackUrl);
}
}
private void clickKml(View v) {
// Analytics
firebaseAnalytics.logEvent("click_track_kml", ABundle.of("track_id", track.track_id));
if (track != null) {
// Open google earth
Intents.openTrackKml(this, track.trackKml);
} else {
Exceptions.report(new NullPointerException("Track should not be null"));
}
}
private void clickDelete(View v) {
Log.i(TAG, "User clicked delete track " + track.track_id);
// Analytics
firebaseAnalytics.logEvent("click_track_delete_remote_1", ABundle.of("track_id", track.track_id));
// Prompt user for confirmation
deleteConfirmation = new AlertDialog.Builder(this)
.setIcon(android.R.drawable.ic_dialog_alert)
.setTitle("Delete this track?")
.setMessage(R.string.delete_remote)
.setPositiveButton(R.string.action_delete, this)
.setNegativeButton(android.R.string.cancel, null)
.show();
}
/**
* User clicked "ok" on delete track
*/
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON_POSITIVE) {
Log.i(TAG, "User confirmed delete track " + track.track_id);
// Analytics
firebaseAnalytics.logEvent("click_track_delete_remote_2", ABundle.of("track_id", track.track_id));
// Disable delete button
binding.buttons.delete.setEnabled(false);
// Delete track
deleteRemote();
}
}
private void deleteRemote() {
// Delete on baseline server
Services.tracks.deleteTrack(this, track);
}
// Listen for deletion of this track
@Subscribe(threadMode = ThreadMode.MAIN)
public void onDeleteSuccess(@NonNull SyncEvent.DeleteSuccess event) {
if (event.track_id.equals(track.track_id)) {
// Exit activity
finish();
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onDeleteFailure(@NonNull SyncEvent.DeleteFailure event) {
if (event.track_id.equals(track.track_id)) {
binding.buttons.delete.setEnabled(true);
// Notify user
Toast.makeText(getApplicationContext(), "Track delete failed", Toast.LENGTH_SHORT).show();
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onSignOut(@NonNull AuthState.SignedOut event) {
// If user gets signed out, close the track activity
Log.i(TAG, "User signed out, closing cloud track");
finish();
}
@Override
protected void onResume() {
super.onResume();
// Listen for sync and auth updates
EventBus.getDefault().register(this);
updateViews();
}
@Override
protected void onPause() {
super.onPause();
EventBus.getDefault().unregister(this);
}
@Override
protected void onStop() {
super.onStop();
// Dismiss alert to prevent context leak
if (deleteConfirmation != null) {
deleteConfirmation.dismiss();
deleteConfirmation = null;
}
}
}
| |
package org.apache.lucene.index.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.ByteBlockPool;
import org.apache.lucene.util.ByteBlockPool.Allocator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.RecyclingByteBlockAllocator;
import org.apache.lucene.util.TestUtil;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.CoreMatchers.equalTo;
/**
* Verifies that Lucene MemoryIndex and RAMDirectory have the same behaviour,
* returning the same results for queries on some randomish indexes.
*/
public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
private Set<String> queries = new HashSet<>();
public static final int ITERATIONS = 100 * RANDOM_MULTIPLIER;
@Override
public void setUp() throws Exception {
super.setUp();
queries.addAll(readQueries("testqueries.txt"));
queries.addAll(readQueries("testqueries2.txt"));
}
/**
* read a set of queries from a resource file
*/
private Set<String> readQueries(String resource) throws IOException {
Set<String> queries = new HashSet<>();
InputStream stream = getClass().getResourceAsStream(resource);
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8));
String line = null;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.length() > 0 && !line.startsWith("#") && !line.startsWith("//")) {
queries.add(line);
}
}
return queries;
}
/**
* runs random tests, up to ITERATIONS times.
*/
public void testRandomQueries() throws Exception {
MemoryIndex index = randomMemoryIndex();
for (int i = 0; i < ITERATIONS; i++) {
assertAgainstRAMDirectory(index);
}
}
/**
* Build a randomish document for both RAMDirectory and MemoryIndex,
* and run all the queries against it.
*/
public void assertAgainstRAMDirectory(MemoryIndex memory) throws Exception {
memory.reset();
StringBuilder fooField = new StringBuilder();
StringBuilder termField = new StringBuilder();
// add up to 250 terms to field "foo"
final int numFooTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numFooTerms; i++) {
fooField.append(" ");
fooField.append(randomTerm());
}
// add up to 250 terms to field "term"
final int numTermTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numTermTerms; i++) {
termField.append(" ");
termField.append(randomTerm());
}
Directory ramdir = new RAMDirectory();
Analyzer analyzer = randomAnalyzer();
IndexWriter writer = new IndexWriter(ramdir,
new IndexWriterConfig(analyzer).setCodec(
TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
Document doc = new Document();
Field field1 = newTextField("foo", fooField.toString(), Field.Store.NO);
Field field2 = newTextField("term", termField.toString(), Field.Store.NO);
doc.add(field1);
doc.add(field2);
writer.addDocument(doc);
writer.close();
memory.addField("foo", fooField.toString(), analyzer);
memory.addField("term", termField.toString(), analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
DirectoryReader competitor = DirectoryReader.open(ramdir);
duellReaders(competitor, reader);
IOUtils.close(reader, competitor);
assertAllQueries(memory, ramdir, analyzer);
ramdir.close();
}
private void duellReaders(CompositeReader other, LeafReader memIndexReader)
throws IOException {
LeafReader competitor = SlowCompositeReaderWrapper.wrap(other);
Fields memFields = memIndexReader.fields();
for (String field : competitor.fields()) {
Terms memTerms = memFields.terms(field);
Terms iwTerms = memIndexReader.terms(field);
if (iwTerms == null) {
assertNull(memTerms);
} else {
NumericDocValues normValues = competitor.getNormValues(field);
NumericDocValues memNormValues = memIndexReader.getNormValues(field);
if (normValues != null) {
// mem idx always computes norms on the fly
assertNotNull(memNormValues);
assertEquals(normValues.get(0), memNormValues.get(0));
}
assertNotNull(memTerms);
assertEquals(iwTerms.getDocCount(), memTerms.getDocCount());
assertEquals(iwTerms.getSumDocFreq(), memTerms.getSumDocFreq());
assertEquals(iwTerms.getSumTotalTermFreq(), memTerms.getSumTotalTermFreq());
TermsEnum iwTermsIter = iwTerms.iterator();
TermsEnum memTermsIter = memTerms.iterator();
if (iwTerms.hasPositions()) {
final boolean offsets = iwTerms.hasOffsets() && memTerms.hasOffsets();
while(iwTermsIter.next() != null) {
assertNotNull(memTermsIter.next());
assertEquals(iwTermsIter.term(), memTermsIter.term());
PostingsEnum iwDocsAndPos = iwTermsIter.postings(null, PostingsEnum.ALL);
PostingsEnum memDocsAndPos = memTermsIter.postings(null, PostingsEnum.ALL);
while(iwDocsAndPos.nextDoc() != PostingsEnum.NO_MORE_DOCS) {
assertEquals(iwDocsAndPos.docID(), memDocsAndPos.nextDoc());
assertEquals(iwDocsAndPos.freq(), memDocsAndPos.freq());
for (int i = 0; i < iwDocsAndPos.freq(); i++) {
assertEquals("term: " + iwTermsIter.term().utf8ToString(), iwDocsAndPos.nextPosition(), memDocsAndPos.nextPosition());
if (offsets) {
assertEquals(iwDocsAndPos.startOffset(), memDocsAndPos.startOffset());
assertEquals(iwDocsAndPos.endOffset(), memDocsAndPos.endOffset());
}
if (iwTerms.hasPayloads()) {
assertEquals(iwDocsAndPos.getPayload(), memDocsAndPos.getPayload());
}
}
}
}
} else {
while(iwTermsIter.next() != null) {
assertEquals(iwTermsIter.term(), memTermsIter.term());
PostingsEnum iwDocsAndPos = iwTermsIter.postings(null);
PostingsEnum memDocsAndPos = memTermsIter.postings(null);
while(iwDocsAndPos.nextDoc() != PostingsEnum.NO_MORE_DOCS) {
assertEquals(iwDocsAndPos.docID(), memDocsAndPos.nextDoc());
assertEquals(iwDocsAndPos.freq(), memDocsAndPos.freq());
}
}
}
}
}
}
/**
* Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are the same.
*/
public void assertAllQueries(MemoryIndex memory, Directory ramdir, Analyzer analyzer) throws Exception {
IndexReader reader = DirectoryReader.open(ramdir);
IndexSearcher ram = newSearcher(reader);
IndexSearcher mem = memory.createSearcher();
QueryParser qp = new QueryParser("foo", analyzer);
for (String query : queries) {
TopDocs ramDocs = ram.search(qp.parse(query), 1);
TopDocs memDocs = mem.search(qp.parse(query), 1);
assertEquals(query, ramDocs.totalHits, memDocs.totalHits);
}
reader.close();
}
/**
* Return a random analyzer (Simple, Stop, Standard) to analyze the terms.
*/
private Analyzer randomAnalyzer() {
switch(random().nextInt(4)) {
case 0: return new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
case 1: return new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
case 2: return new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer();
return new TokenStreamComponents(tokenizer, new CrazyTokenFilter(tokenizer));
}
};
default: return new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
}
}
// a tokenfilter that makes all terms starting with 't' empty strings
static final class CrazyTokenFilter extends TokenFilter {
final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
CrazyTokenFilter(TokenStream input) {
super(input);
}
@Override
public boolean incrementToken() throws IOException {
if (input.incrementToken()) {
if (termAtt.length() > 0 && termAtt.buffer()[0] == 't') {
termAtt.setLength(0);
}
return true;
} else {
return false;
}
}
};
/**
* Some terms to be indexed, in addition to random words.
* These terms are commonly used in the queries.
*/
private static final String[] TEST_TERMS = {"term", "Term", "tErm", "TERM",
"telm", "stop", "drop", "roll", "phrase", "a", "c", "bar", "blar",
"gack", "weltbank", "worlbank", "hello", "on", "the", "apache", "Apache",
"copyright", "Copyright"};
/**
* half of the time, returns a random term from TEST_TERMS.
* the other half of the time, returns a random unicode string.
*/
private String randomTerm() {
if (random().nextBoolean()) {
// return a random TEST_TERM
return TEST_TERMS[random().nextInt(TEST_TERMS.length)];
} else {
// return a random unicode term
return TestUtil.randomUnicodeString(random());
}
}
public void testDocsEnumStart() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
MemoryIndex memory = new MemoryIndex(random().nextBoolean(), false, random().nextInt(50) * 1024 * 1024);
memory.addField("foo", "bar", analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
PostingsEnum disi = TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, PostingsEnum.NONE);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
// now reuse and check again
TermsEnum te = reader.terms("foo").iterator();
assertTrue(te.seekExact(new BytesRef("bar")));
disi = te.postings(disi, PostingsEnum.NONE);
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
reader.close();
}
private Allocator randomByteBlockAllocator() {
if (random().nextBoolean()) {
return new RecyclingByteBlockAllocator();
} else {
return new ByteBlockPool.DirectAllocator();
}
}
private MemoryIndex randomMemoryIndex() {
return new MemoryIndex(random().nextBoolean(), random().nextBoolean(), random().nextInt(50) * 1024 * 1024);
}
public void testDocsAndPositionsEnumStart() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
int numIters = atLeast(3);
MemoryIndex memory = new MemoryIndex(true, false, random().nextInt(50) * 1024 * 1024);
for (int i = 0; i < numIters; i++) { // check reuse
memory.addField("foo", "bar", analyzer);
LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
assertEquals(1, reader.terms("foo").getSumTotalTermFreq());
PostingsEnum disi = reader.postings(new Term("foo", "bar"), PostingsEnum.ALL);
int docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, disi.nextPosition());
assertEquals(0, disi.startOffset());
assertEquals(3, disi.endOffset());
// now reuse and check again
TermsEnum te = reader.terms("foo").iterator();
assertTrue(te.seekExact(new BytesRef("bar")));
disi = te.postings(disi);
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
reader.close();
memory.reset();
}
}
// LUCENE-3831
public void testNullPointerException() throws IOException {
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<>(regex);
MemoryIndex mindex = randomMemoryIndex();
mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", "hello there"));
// This throws an NPE
assertEquals(0, mindex.search(wrappedquery), 0.00001f);
TestUtil.checkReader(mindex.createSearcher().getIndexReader());
}
// LUCENE-3831
public void testPassesIfWrapped() throws IOException {
RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<>(regex));
MemoryIndex mindex = randomMemoryIndex();
mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", "hello there"));
// This passes though
assertEquals(0, mindex.search(wrappedquery), 0.00001f);
TestUtil.checkReader(mindex.createSearcher().getIndexReader());
}
public void testSameFieldAddedMultipleTimes() throws IOException {
MemoryIndex mindex = randomMemoryIndex();
MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
mindex.addField("field", "the quick brown fox", mockAnalyzer);
mindex.addField("field", "jumps over the", mockAnalyzer);
LeafReader reader = (LeafReader) mindex.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
assertEquals(7, reader.terms("field").getSumTotalTermFreq());
PhraseQuery query = new PhraseQuery("field", "fox", "jumps");
assertTrue(mindex.search(query) > 0.1);
mindex.reset();
mockAnalyzer.setPositionIncrementGap(1 + random().nextInt(10));
mindex.addField("field", "the quick brown fox", mockAnalyzer);
mindex.addField("field", "jumps over the", mockAnalyzer);
assertEquals(0, mindex.search(query), 0.00001f);
query = new PhraseQuery(10, "field", "fox", "jumps");
assertTrue("posGap" + mockAnalyzer.getPositionIncrementGap("field") , mindex.search(query) > 0.0001);
TestUtil.checkReader(mindex.createSearcher().getIndexReader());
}
public void testNonExistentField() throws IOException {
MemoryIndex mindex = randomMemoryIndex();
MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
mindex.addField("field", "the quick brown fox", mockAnalyzer);
LeafReader reader = (LeafReader) mindex.createSearcher().getIndexReader();
TestUtil.checkReader(reader);
assertNull(reader.getNumericDocValues("not-in-index"));
assertNull(reader.getNormValues("not-in-index"));
assertNull(reader.postings(new Term("not-in-index", "foo")));
assertNull(reader.postings(new Term("not-in-index", "foo"), PostingsEnum.ALL));
assertNull(reader.terms("not-in-index"));
}
public void testDuellMemIndex() throws IOException {
LineFileDocs lineFileDocs = new LineFileDocs(random());
int numDocs = atLeast(10);
MemoryIndex memory = randomMemoryIndex();
for (int i = 0; i < numDocs; i++) {
Directory dir = newDirectory();
MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
mockAnalyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), mockAnalyzer));
Document nextDoc = lineFileDocs.nextDoc();
Document doc = new Document();
for (IndexableField field : nextDoc.getFields()) {
if (field.fieldType().indexOptions() != IndexOptions.NONE) {
doc.add(field);
if (random().nextInt(3) == 0) {
doc.add(field); // randomly add the same field twice
}
}
}
writer.addDocument(doc);
writer.close();
for (IndexableField field : doc.getFields()) {
memory.addField(field.name(), ((Field)field).stringValue(), mockAnalyzer);
}
DirectoryReader competitor = DirectoryReader.open(dir);
LeafReader memIndexReader= (LeafReader) memory.createSearcher().getIndexReader();
TestUtil.checkReader(memIndexReader);
duellReaders(competitor, memIndexReader);
IOUtils.close(competitor, memIndexReader);
memory.reset();
dir.close();
}
lineFileDocs.close();
}
// LUCENE-4880
public void testEmptyString() throws IOException {
MemoryIndex memory = new MemoryIndex();
memory.addField("foo", new CannedTokenStream(new Token("", 0, 5)));
IndexSearcher searcher = memory.createSearcher();
TopDocs docs = searcher.search(new TermQuery(new Term("foo", "")), 10);
assertEquals(1, docs.totalHits);
TestUtil.checkReader(searcher.getIndexReader());
}
public void testDuelMemoryIndexCoreDirectoryWithArrayField() throws Exception {
final String field_name = "text";
MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
if (random().nextBoolean()) {
mockAnalyzer.setOffsetGap(random().nextInt(100));
}
//index into a random directory
FieldType type = new FieldType(TextField.TYPE_STORED);
type.setStoreTermVectorOffsets(true);
type.setStoreTermVectorPayloads(false);
type.setStoreTermVectorPositions(true);
type.setStoreTermVectors(true);
type.freeze();
Document doc = new Document();
doc.add(new Field(field_name, "la la", type));
doc.add(new Field(field_name, "foo bar foo bar foo", type));
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), mockAnalyzer));
writer.updateDocument(new Term("id", "1"), doc);
writer.commit();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
//Index document in Memory index
MemoryIndex memIndex = new MemoryIndex(true);
memIndex.addField(field_name, "la la", mockAnalyzer);
memIndex.addField(field_name, "foo bar foo bar foo", mockAnalyzer);
//compare term vectors
Terms ramTv = reader.getTermVector(0, field_name);
IndexReader memIndexReader = memIndex.createSearcher().getIndexReader();
TestUtil.checkReader(memIndexReader);
Terms memTv = memIndexReader.getTermVector(0, field_name);
compareTermVectors(ramTv, memTv, field_name);
memIndexReader.close();
reader.close();
dir.close();
}
protected void compareTermVectors(Terms terms, Terms memTerms, String field_name) throws IOException {
TermsEnum termEnum = terms.iterator();
TermsEnum memTermEnum = memTerms.iterator();
while (termEnum.next() != null) {
assertNotNull(memTermEnum.next());
assertThat(termEnum.totalTermFreq(), equalTo(memTermEnum.totalTermFreq()));
PostingsEnum docsPosEnum = termEnum.postings(null, PostingsEnum.POSITIONS);
PostingsEnum memDocsPosEnum = memTermEnum.postings(null, PostingsEnum.POSITIONS);
String currentTerm = termEnum.term().utf8ToString();
assertThat("Token mismatch for field: " + field_name, currentTerm, equalTo(memTermEnum.term().utf8ToString()));
docsPosEnum.nextDoc();
memDocsPosEnum.nextDoc();
int freq = docsPosEnum.freq();
assertThat(freq, equalTo(memDocsPosEnum.freq()));
for (int i = 0; i < freq; i++) {
String failDesc = " (field:" + field_name + " term:" + currentTerm + ")";
int memPos = memDocsPosEnum.nextPosition();
int pos = docsPosEnum.nextPosition();
assertThat("Position test failed" + failDesc, memPos, equalTo(pos));
assertThat("Start offset test failed" + failDesc, memDocsPosEnum.startOffset(), equalTo(docsPosEnum.startOffset()));
assertThat("End offset test failed" + failDesc, memDocsPosEnum.endOffset(), equalTo(docsPosEnum.endOffset()));
assertThat("Missing payload test failed" + failDesc, docsPosEnum.getPayload(), equalTo(docsPosEnum.getPayload()));
}
}
assertNull("Still some tokens not processed", memTermEnum.next());
}
}
| |
/*
* Copyright (C) Tony Green, LitePal Framework Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.litepal.util;
import android.content.res.AssetManager;
import android.text.TextUtils;
import org.litepal.LitePalApplication;
import org.litepal.exceptions.DataSupportException;
import org.litepal.parser.LitePalAttr;
import java.io.IOException;
import java.util.Collection;
import java.util.Locale;
/**
* A utility class to help LitePal with some base actions that might through any
* components. These actions can help classes just do the jobs they care, and
* help them out of the trivial work.
*
* @author Tony Green
* @since 1.0
*/
public class BaseUtility {
/**
* Disable to create an instance of BaseUtility.
*/
private BaseUtility() {
}
/**
* It will change the case of the passing parameter into the case defined in
* litepal.xml file.
*
* @param string
* The string want to change case.
* @return The string after changing case. If the name is null, then simply
* return null.
*/
public static String changeCase(String string) {
if (string != null) {
LitePalAttr litePalAttr = LitePalAttr.getInstance();
String cases = litePalAttr.getCases();
if (Const.Config.CASES_KEEP.equals(cases)) {
return string;
} else if (Const.Config.CASES_UPPER.equals(cases)) {
return string.toUpperCase(Locale.US);
}
return string.toLowerCase(Locale.US);
}
return null;
}
/**
* This helper method makes up the shortage of contains method in Collection
* to support the function of case insensitive contains. It only supports
* the String generic type of collection, cause other types have no cases
* concept.
*
* @param collection
* The collection contains string data.
* @param string
* The string want to look for in the collection.
* @return If the string is in the collection without case concern return
* true, otherwise return false. If the collection is null, return
* false.
*/
public static boolean containsIgnoreCases(Collection<String> collection, String string) {
if (collection == null) {
return false;
}
if (string == null) {
return collection.contains(null);
}
boolean contains = false;
for (String element : collection) {
if (string.equalsIgnoreCase(element)) {
contains = true;
break;
}
}
return contains;
}
/**
* Capitalize make the first letter of the word be upper case.
*
* @param string
* The word to capitalize.
* @return The word after capitalize.
*/
public static String capitalize(String string) {
if (!TextUtils.isEmpty(string)) {
return string.substring(0, 1).toUpperCase(Locale.US) + string.substring(1);
}
return string == null ? null : "";
}
/**
* Count how many marks existed in string.
*
* @param string
* The source sentence.
* @param mark
* The specific substring to count.
* @return The number of marks existed in string.
*/
public static int count(String string, String mark) {
if (!TextUtils.isEmpty(string) && !TextUtils.isEmpty(mark)) {
int count = 0;
int index = string.indexOf(mark);
while (index != -1) {
count++;
string = string.substring(index + mark.length());
index = string.indexOf(mark);
}
return count;
}
return 0;
}
/**
* Check the number of question mark existed in conditions[0] equals the
* number of rest conditions elements or not. If not equals, throws
* DataSupportException.
*
* @param conditions
* A string array representing the WHERE part of an SQL
* statement.
* @throws org.litepal.exceptions.DataSupportException
*/
public static void checkConditionsCorrect(String... conditions) {
if (conditions != null) {
int conditionsSize = conditions.length;
if (conditionsSize > 0) {
String whereClause = conditions[0];
int placeHolderSize = BaseUtility.count(whereClause, "?");
if (conditionsSize != placeHolderSize + 1) {
throw new DataSupportException(DataSupportException.UPDATE_CONDITIONS_EXCEPTION);
}
}
}
}
/**
* Judge a field type is supported or not. Currently only basic data types
* and String are supported.
*
* @param fieldType
* Type of the field.
* @return Supported return true, not supported return false.
*/
public static boolean isFieldTypeSupported(String fieldType) {
if ("boolean".equals(fieldType) || "java.lang.Boolean".equals(fieldType)) {
return true;
}
if ("float".equals(fieldType) || "java.lang.Float".equals(fieldType)) {
return true;
}
if ("double".equals(fieldType) || "java.lang.Double".equals(fieldType)) {
return true;
}
if ("int".equals(fieldType) || "java.lang.Integer".equals(fieldType)) {
return true;
}
if ("long".equals(fieldType) || "java.lang.Long".equals(fieldType)) {
return true;
}
if ("short".equals(fieldType) || "java.lang.Short".equals(fieldType)) {
return true;
}
if ("char".equals(fieldType) || "java.lang.Character".equals(fieldType)) {
return true;
}
if ("[B".equals(fieldType) || "[Ljava.lang.Byte;".equals(fieldType)) {
return true;
}
if ("java.lang.String".equals(fieldType) || "java.util.Date".equals(fieldType)) {
return true;
}
return false;
}
/**
* Judge a generic type is supported or not. Currently only basic data types
* and String are supported.
*
* @param genericType
* Type of the generic field.
* @return Supported return true, not supported return false.
*/
public static boolean isGenericTypeSupported(String genericType) {
if ("java.lang.String".equals(genericType)) {
return true;
} else if ("java.lang.Integer".equals(genericType)) {
return true;
} else if ("java.lang.Float".equals(genericType)) {
return true;
} else if ("java.lang.Double".equals(genericType)) {
return true;
} else if ("java.lang.Long".equals(genericType)) {
return true;
} else if ("java.lang.Short".equals(genericType)) {
return true;
} else if ("java.lang.Boolean".equals(genericType)) {
return true;
} else if ("java.lang.Character".equals(genericType)) {
return true;
}
return false;
}
/**
* If the litepal.xml configuration file exists.
* @return True if exists, false otherwise.
*/
public static boolean isLitePalXMLExists() {
try {
AssetManager assetManager = LitePalApplication.getContext().getAssets();
String[] fileNames = assetManager.list("");
if (fileNames != null && fileNames.length > 0) {
for (String fileName : fileNames) {
if (Const.Config.CONFIGURATION_FILE_NAME.equalsIgnoreCase(fileName)) {
return true;
}
}
}
} catch (IOException e) {
}
return false;
}
}
| |
/*
* (c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* [See end of file]
*/
package com.hp.hpl.jena.n3;
import java.io.*;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.CharacterIterator;
import java.text.StringCharacterIterator;
import java.util.*;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.rdf.model.*;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import com.hp.hpl.jena.JenaRuntime;
import com.hp.hpl.jena.shared.JenaException;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.XSD;
/** Common framework for implementing N3 writers.
*
* @author Andy Seaborne
* @version $Id: N3JenaWriterCommon.java,v 1.2 2009/08/05 16:08:51 andy_seaborne Exp $
*/
public class N3JenaWriterCommon implements RDFWriter
{
static Logger logger = LoggerFactory.getLogger(N3JenaWriterCommon.class) ;
// N3 writing proceeds in 2 stages.
// First, it analysis the model to be written to extract information
// that is going to be specially formatted (RDF lists, one ref anon nodes)
// Second do the writing walk.
// The simple N3 writer does nothing during preparation.
protected Map<String, Object> writerPropertyMap = null ;
// BaseURI - <#>
// final boolean doAbbreviatedBaseURIref = getBooleanValue("abbrevBaseURI", true) ;
protected boolean alwaysAllocateBNodeLabel = false ;
// Common variables
protected RDFErrorHandler errorHandler = null;
protected Map<String, String> prefixMap = new HashMap<String, String>() ; // Prefixes to actually use
protected Map<String, String> reversePrefixMap = new HashMap<String, String>() ; // URI->prefix
protected Map<Resource, String> bNodesMap = null ; // BNodes seen.
protected int bNodeCounter = 0 ;
// Specific properties that have a short form.
// Not Turtle.
protected static final String NS_W3_log = "http://www.w3.org/2000/10/swap/log#" ;
protected static Map<String, String> wellKnownPropsMapN3 = new HashMap<String, String>() ;
static {
wellKnownPropsMapN3.put(NS_W3_log+"implies", "=>" ) ;
wellKnownPropsMapN3.put(OWL.sameAs.getURI(), "=" ) ;
wellKnownPropsMapN3.put(RDF.type.getURI(), "a" ) ;
}
protected static Map<String, String> wellKnownPropsMapTurtle = new HashMap<String, String>() ;
static {
//wellKnownPropsMapTurtle.put(OWL.sameAs.getURI(), "=" ) ;
wellKnownPropsMapTurtle.put(RDF.type.getURI(), "a" ) ;
}
protected Map<String, String> wellKnownPropsMap = wellKnownPropsMapN3 ;
// Work variables controlling the output
protected N3IndentedWriter out = null ;
//Removed base URI specials - look for "// BaseURI - <#>" & doAbbreviatedBaseURIref
//String baseURIref = null ;
//String baseURIrefHash = null ;
// Min spacing of items
protected int minGap = getIntValue("minGap", 1) ;
protected String minGapStr = pad(minGap) ;
// Gap from subject to property
protected int indentProperty = getIntValue("indentProperty", 6) ;
// Width of property before wrapping.
// This is not necessarily a control of total width
// e.g. the pretty writer may be writing properties inside indented one ref bNodes
protected int widePropertyLen = getIntValue("widePropertyLen", 20) ;
// Column for property when an object follows a property on the same line
protected int propertyCol = getIntValue("propertyColumn", 8) ;
// Minimum gap from property to object when object on a new line.
protected int indentObject = propertyCol ;
// If a subject is shorter than this, the first property may go on same line.
protected int subjectColumn = getIntValue("subjectColumn", indentProperty) ;
// Require shortSubject < subjectCol (strict less than)
protected int shortSubject = subjectColumn-minGap;
protected boolean useWellKnownPropertySymbols = getBooleanValue("usePropertySymbols", true) ;
protected boolean allowTripleQuotedStrings = getBooleanValue("useTripleQuotedStrings", true) ;
protected boolean allowDoubles = getBooleanValue("useDoubles", true) ;
protected boolean allowDecimals = getBooleanValue("useDecimals", true) ;
// ----------------------------------------------------
// Jena RDFWriter interface
public RDFErrorHandler setErrorHandler(RDFErrorHandler errHandler)
{
RDFErrorHandler old = errorHandler;
errorHandler = errHandler;
return old;
}
public Object setProperty(String propName, Object propValue)
{
if ( ! ( propValue instanceof String ) )
{
logger.warn("N3.setProperty: Property for '"+propName+"' is not a string") ;
propValue = propValue.toString() ;
}
// Store absolute name of property
propName = absolutePropName(propName) ;
if ( writerPropertyMap == null )
writerPropertyMap = new HashMap<String, Object>() ;
Object oldValue = writerPropertyMap.get(propName);
writerPropertyMap.put(propName, propValue);
return oldValue;
}
/** Write the model out in N3. The writer should be one suitable for UTF-8 which
* excludes a PrintWriter or a FileWriter which use default character set.
*
* Examples:
* <pre>
* try {
* Writer w = new BufferedWriter(new OutputStreamWriter(output, "UTF-8")) ;
* model.write(w, base) ;
* try { w.flush() ; } catch (IOException ioEx) {}
* } catch (java.io.UnsupportedEncodingException ex) {} //UTF-8 is required so can't happen
* </pre>
* or
* <pre>
* try {
* OutputStream out = new FileOutputStream(file) ;
* Writer w = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")) ;
* model.write(w, base) ;
* }
* catch (java.io.UnsupportedEncodingException ex) {}
* catch (java.io.FileNotFoundException noFileEx) { ... }
* </pre>
* @see #write(Model,Writer,String)
*/
public void write(Model baseModel, Writer _out, String base)
{
if (!(_out instanceof BufferedWriter))
_out = new BufferedWriter(_out);
out = new N3IndentedWriter(_out);
// BaseURI - <#>
// if ( base != null )
// {
// baseURIref = base ;
// if ( !base.endsWith("#") &&! isOpaque(base) )
// baseURIrefHash = baseURIref+"#" ;
// }
processModel(baseModel) ;
}
/** Write the model out in N3, encoded in in UTF-8
* @see #write(Model,Writer,String)
*/
public synchronized void write(Model model, OutputStream output, String base)
{
try {
Writer w = new BufferedWriter(new OutputStreamWriter(output, "UTF-8")) ;
write(model, w, base) ;
try { w.flush() ; } catch (IOException ioEx) {}
} catch (java.io.UnsupportedEncodingException ex)
{
System.err.println("Failed to create UTF-8 writer") ;
}
}
// ----------------------------------------------------
// The assumed processing model is:
// Writing N3 involves ordering the graph into:
// -- Subjects
// -- Property lists within subjects
// -- Object lists with in properties
// A derived class may choose to intercept and implement at any of these levels.
// Standard layout is:
// subject
// property1 value1 ;
// property2 value2 ;
// property3 value3 .
// Normal hook points for subclasses.
protected void startWriting() {}
protected void finishWriting() {}
protected void prepare(Model model) {}
protected void processModel(Model baseModel)
{
prefixMap = baseModel.getNsPrefixMap() ;
Model model = ModelFactory.withHiddenStatements( baseModel );
bNodesMap = new HashMap<Resource, String>() ;
// PrefixMapping (to Jena 2.5.7 at least)
// is specialized to XML-isms and Turle prefixed names aren't quite qnames.
// Build temporary maps of acceptable prefixes and URIs.
// If no base defined for the model, but one given to writer,
// then use this.
String base2 = prefixMap.get("") ;
// BaseURI - <#>
// if ( base2 == null && baseURIrefHash != null )
// prefixMap.put("", baseURIrefHash) ;
for ( Iterator<Entry<String, String>> iter = prefixMap.entrySet().iterator() ; iter.hasNext() ; )
{
Entry<String, String> e = iter.next() ;
String prefix = e.getKey() ;
String uri = e.getValue();
// XML namespaces name can include '.'
// Turtle prefixed names can't.
if ( ! checkPrefixPart(prefix) )
iter.remove() ;
else
// Build acceptable reverse mapping
reversePrefixMap.put(uri, prefix) ;
}
startWriting() ;
prepare(model) ;
writeHeader(model) ;
writePrefixes(model) ;
if (prefixMap.size() != 0)
out.println();
// Do the output.
writeModel(model) ;
// Release intermediate memory - allows reuse of a writer
finishWriting() ;
bNodesMap = null ;
}
protected void writeModel(Model model)
{
// Needed only for no prefixes, no blank first line.
boolean doingFirst = true;
ResIterator rIter = listSubjects(model);
for (; rIter.hasNext();)
{
// Subject:
// First - it is something we will write out as a structure in an object field?
// That is, a RDF list or the object of exactly one statement.
Resource subject = rIter.nextResource();
if ( skipThisSubject(subject) )
{
if (N3JenaWriter.DEBUG)
out.println("# Skipping: " + formatResource(subject));
continue;
}
// We really are going to print something via writeTriples
if (doingFirst)
doingFirst = false;
else
out.println();
writeOneGraphNode(subject) ;
}
rIter.close();
}
protected ResIterator listSubjects(Model model) { return model.listSubjects(); }
protected void writeOneGraphNode(Resource subject)
{
// New top level item.
// Does not take effect until newline.
out.incIndent(indentProperty) ;
writeSubject(subject);
ClosableIterator<Property> iter = preparePropertiesForSubject(subject);
writePropertiesForSubject(subject, iter) ;
out.decIndent(indentProperty) ;
out.println(" .");
}
protected void writePropertiesForSubject(Resource subj, ClosableIterator<Property> iter)
{
// For each property.
for (; iter.hasNext();)
{
Property property = iter.next();
// Object list
writeObjectList(subj, property);
if (iter.hasNext())
out.println(" ;");
}
iter.close();
}
// Hook called on every resource.
// Since there is spacing bewteen resource frames, need to know
// whether an item will cause any output.
protected boolean skipThisSubject(Resource r) { return false ; }
// This is the hook called within writeModel.
// NB May not be at the top level (indent = 0)
protected void writeSubject(Resource subject)
{
String subjStr = formatResource(subject);
out.print(subjStr);
// May be very short : if so, stay on this line.
// Currently at end of subject.
// NB shortSubject is (subjectColumn-minGap) so there is a gap.
if (subjStr.length() < shortSubject )
{
out.print(pad(subjectColumn - subjStr.length()) );
}
else
// Does not fit this line.
out.println();
}
protected void writeHeader(Model model)
{
// BaseURI - <#>
// if (baseURIref != null && !baseURIref.equals("") )
// out.println("# Base: " + baseURIref);
}
protected N3IndentedWriter getOutput() { return out ; }
protected Map<String, String> getPrefixes() { return prefixMap ; }
protected void writePrefixes(Model model)
{
for (Iterator<String> pIter = prefixMap.keySet().iterator(); pIter.hasNext();)
{
String p = pIter.next();
String u = prefixMap.get(p);
// BaseURI - <#>
// // Special cases: N3 handling of base names.
// if (doAbbreviatedBaseURIref && p.equals(""))
// {
// if (baseURIrefHash != null && u.equals(baseURIrefHash))
// u = "#";
// if (baseURIref != null && u.equals(baseURIref))
// u = "";
// }
String tmp = "@prefix " + p + ": ";
out.print(tmp);
out.print(pad(16 - tmp.length()));
// NB Starts with a space to ensure a gap.
out.println(" <" + u + "> .");
}
}
protected void writeObjectList(Resource subject, Property property)
{
String propStr = formatProperty(property) ;
// if (wellKnownPropsMap.containsKey(property.getURI()))
// propStr = (String) wellKnownPropsMap.get(property.getURI());
// else
// propStr = formatResource(property);
// Write with object lists as clusters of statements with the same property
// Looks more like a machine did it but fewer bad cases.
StmtIterator sIter = subject.listProperties(property);
for (; sIter.hasNext();)
{
Statement stmt = sIter.nextStatement() ;
String objStr = formatNode(stmt.getObject()) ;
out.print(propStr);
out.incIndent(indentObject);
if ( (propStr.length()+minGap) <= widePropertyLen )
{
// Property col allows for min gap but widePropertyLen > propertyCol
// (which looses alignment - this is intentional.
// Ensure there is at least min gap.
int padding = calcPropertyPadding(propStr) ;
out.print(pad(padding)) ;
// if ( propStr.length() < propertyWidth )
// out.print( pad(propertyCol-minGap-propStr.length()) ) ;
// out.print(minGapStr) ;
}
else
// Does not fit this line.
out.println();
// Write one object - simple writing.
out.print(objStr) ;
out.decIndent(indentObject);
if ( sIter.hasNext() )
{
out.println(" ;") ;
}
}
sIter.close() ;
}
protected String formatNode(RDFNode node)
{
if (node instanceof Literal)
return formatLiteral((Literal) node);
else
return formatResource((Resource)node) ;
}
protected void writeObject(RDFNode node)
{
if (node instanceof Literal)
{
writeLiteral((Literal) node);
return;
}
Resource rObj = (Resource) node;
out.print(formatResource(rObj));
}
protected void writeLiteral(Literal literal)
{
out.print(formatLiteral(literal)) ;
}
protected ClosableIterator<Property> preparePropertiesForSubject(Resource r)
{
// Properties to do.
Set<Property> properties = new HashSet<Property>() ;
StmtIterator sIter = r.listProperties();
for ( ; sIter.hasNext() ; )
properties.add(sIter.nextStatement().getPredicate()) ;
sIter.close() ;
return WrappedIterator.create(properties.iterator()) ;
}
// Utility operations
protected String formatResource(Resource r)
{
if ( r.isAnon() )
{
if ( ! alwaysAllocateBNodeLabel )
{
// Does anything point to it?
StmtIterator sIter = r.getModel().listStatements(null, null, r) ;
if ( ! sIter.hasNext() )
{
sIter.close() ;
// This bNode is not referenced so don't need the bNode Id.
// Must be a subject - indent better be zero!
// This only happens for subjects because object bNodes
// referred to once (the other case for [] syntax)
// are handled elsewhere (by oneRef set)
// Later: use [ prop value ] for this.
return "[]" ;
}
sIter.close() ;
}
if ( ! bNodesMap.containsKey(r) )
bNodesMap.put(r, "_:b"+(++bNodeCounter)) ;
return bNodesMap.get(r) ;
}
// It has a URI.
if ( r.equals(RDF.nil) )
return "()" ;
return formatURI(r.getURI()) ;
}
protected String formatLiteral(Literal literal)
{
String datatype = literal.getDatatypeURI() ;
String lang = literal.getLanguage() ;
String s = literal.getLexicalForm() ;
if ( datatype != null )
{
// Special form we know how to handle?
// Assume valid text
if ( datatype.equals(XSD.integer.getURI()) )
{
try {
new java.math.BigInteger(s) ;
return s ;
} catch (NumberFormatException nfe) {}
// No luck. Continue.
// Continuing is always safe.
}
if ( datatype.equals(XSD.decimal.getURI()) )
{
// Must have ., can't have e or E
if ( s.indexOf('.') >= 0 &&
s.indexOf('e') == -1 && s.indexOf('E') == -1 )
{
// See if parsable.
try {
BigDecimal d = new BigDecimal(s) ;
return s ;
} catch (NumberFormatException nfe) {}
}
}
if ( this.allowDoubles && datatype.equals(XSD.xdouble.getURI()) )
{
// Must have 'e' or 'E' (N3 and Turtle now read 2.3 as a decimal).
if ( s.indexOf('e') >= 0 ||
s.indexOf('E') >= 0 )
{
try {
// Validate it.
Double.parseDouble(s) ;
return s ;
} catch (NumberFormatException nfe) {}
// No luck. Continue.
}
}
}
// Format the text - with escaping.
StringBuffer sbuff = new StringBuffer() ;
boolean singleQuoteLiteral = true ;
String quoteMarks = "\"" ;
// Things that force the use of """ strings
if ( this.allowTripleQuotedStrings &&
( s.indexOf("\n") != -1 ||
s.indexOf("\r") != -1 ||
s.indexOf("\f") != -1 ) )
{
quoteMarks = "\"\"\"" ;
singleQuoteLiteral = false ;
}
sbuff.append(quoteMarks);
string(sbuff, s, singleQuoteLiteral) ;
sbuff.append(quoteMarks);
// Format the language tag
if ( lang != null && lang.length()>0)
{
sbuff.append("@") ;
sbuff.append(lang) ;
}
// Format the datatype
if ( datatype != null )
{
sbuff.append("^^") ;
sbuff.append(formatURI(datatype)) ;
}
return sbuff.toString() ;
}
protected String formatProperty(Property p)
{
String prop = p.getURI() ;
if ( this.useWellKnownPropertySymbols && wellKnownPropsMap.containsKey(prop) )
return wellKnownPropsMap.get(prop);
return formatURI(prop) ;
}
protected String formatURI(String uriStr)
{
String matchURI = "" ;
String matchPrefix = null ;
// BaseURI - <#>
// if ( doAbbreviatedBaseURIref && uriStr.equals(baseURIref) )
// return "<>" ;
// Try for a prefix and write as prefixed name.
// 1/ Try splitting as a prefixed name
// 2/ Search for possibilities
// Stage 1.
int idx = splitIdx(uriStr) ;
// Depends on legal URIs.
if ( idx >= 0 )
{
// Include the # itself.
String x = uriStr.substring(0,idx+1) ;
String prefix = reversePrefixMap.get(x) ;
if ( prefix != null )
{
String localPart = uriStr.substring(idx+1) ;
if ( checkLocalPart(localPart) )
return prefix+':'+localPart ;
}
}
// Unsplit. Could just return here.
// // Find the longest if several.
// // Possible optimization: split URI and have URI=> ns: map.
// // Ordering prefixes by length, then first hit is better.
// //
// // Also: could just assume that the split is on / or #
// // Means we need to find a prefix just once.
// for ( Iterator<String> pIter = prefixMap.keySet().iterator() ; pIter.hasNext() ; )
// {
// String p = pIter.next() ;
// String u = prefixMap.get(p) ;
// if ( uriStr.startsWith(u) )
// if ( matchURI.length() < u.length() )
// {
// matchPrefix = p ;
// matchURI = u ;
// }
// }
// if ( matchPrefix != null )
// {
// String localname = uriStr.substring(matchURI.length()) ;
//
// if ( checkPrefixedName(matchPrefix, localname) )
// return matchPrefix+":"+localname ;
//
// // Continue and return quoted URIref
// }
// Not as a prefixed name - write as a quoted URIref
// Should we unicode escape here?
// It should be right - the writer should be UTF-8 on output.
return "<"+uriStr+">" ;
}
protected static int splitIdx(String uriStr)
{
int idx = uriStr.lastIndexOf('#') ;
if ( idx >= 0 )
return idx ;
// No # - try for /
idx = uriStr.lastIndexOf('/') ;
return idx ;
}
// Prefxied names in N3 and Turtle aren't really qnames
// No dots in preifx part; digit can be first in local part
// These tests must agree, or be more restrictive, than the parser.
protected static boolean checkPrefixedName(String ns, String local)
{
return checkPrefixPart(ns) && checkLocalPart(local) ;
}
protected static boolean checkPrefixPart(String s)
{
return checkNamePart(s) ;
}
protected static boolean checkLocalPart(String s)
{
// This is too restrictive (but safe)
// local parts with dots are legal.
return checkNamePart(s) ;
}
protected static boolean checkNamePart(String s)
{
CharacterIterator cIter = new StringCharacterIterator(s) ;
for ( char ch = cIter.first() ;
ch != java.text.CharacterIterator.DONE ;
ch = cIter.next() )
{
if ( Character.isLetterOrDigit(ch) )
continue ;
switch (ch)
{
case '_': case '-':
continue ;
}
// Not an acceptable character
return false ;
}
return true ;
}
protected final static String WS = "\n\r\t" ;
protected static void string(StringBuffer sbuff, String s, boolean singleQuoteLiteral)
{
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// Escape escapes and quotes
if (c == '\\' || c == '"' )
{
sbuff.append('\\') ;
sbuff.append(c) ;
continue ;
}
// Characters to literally output.
// This would generate 7-bit safe files
// if (c >= 32 && c < 127)
// {
// sbuff.append(c) ;
// continue;
// }
// Whitespace
if ( singleQuoteLiteral && ( c == '\n' || c == '\r' || c == '\f' ) )
{
if (c == '\n') sbuff.append("\\n");
if (c == '\t') sbuff.append("\\t");
if (c == '\r') sbuff.append("\\r");
if (c == '\f') sbuff.append("\\f");
continue ;
}
// Output as is (subject to UTF-8 encoding on output that is)
sbuff.append(c) ;
// // Unicode escapes
// // c < 32, c >= 127, not whitespace or other specials
// String hexstr = Integer.toHexString(c).toUpperCase();
// int pad = 4 - hexstr.length();
// sbuff.append("\\u");
// for (; pad > 0; pad--)
// sbuff.append("0");
// sbuff.append(hexstr);
}
}
protected int calcPropertyPadding(String propStr)
{
int padding = propertyCol - propStr.length();
if (padding < minGap)
padding = minGap;
return padding ;
}
protected static String pad(int cols)
{
StringBuffer sb = new StringBuffer() ;
for ( int i = 0 ; i < cols ; i++ )
sb.append(' ') ;
return sb.toString() ;
}
// Utilities
protected int countProperties(Resource r)
{
int numProp = 0 ;
StmtIterator sIter = r.listProperties() ;
for ( ; sIter.hasNext() ; )
{
sIter.nextStatement() ;
numProp++ ;
}
sIter.close() ;
return numProp ;
}
protected int countProperties(Resource r, Property p)
{
int numProp = 0 ;
StmtIterator sIter = r.listProperties(p) ;
for ( ; sIter.hasNext() ; )
{
sIter.nextStatement() ;
numProp++ ;
}
sIter.close() ;
return numProp ;
}
protected int countArcsTo(Resource resource)
{
return countArcsTo(null, resource) ;
}
protected int countArcsTo(Property prop, Resource resource)
{
int numArcs = 0 ;
StmtIterator sIter = resource.getModel().listStatements(null, prop, resource) ;
for ( ; sIter.hasNext() ; )
{
sIter.nextStatement() ;
numArcs++ ;
}
sIter.close() ;
return numArcs ;
}
protected Iterator<RDFNode> rdfListIterator(Resource r)
{
List<RDFNode> list = new ArrayList<RDFNode>() ;
for ( ; ! r.equals(RDF.nil); )
{
StmtIterator sIter = r.getModel().listStatements(r, RDF.first, (RDFNode)null) ;
list.add(sIter.nextStatement().getObject()) ;
if ( sIter.hasNext() )
// @@ need to cope with this (unusual) case
throw new JenaException("N3: Multi valued list item") ;
sIter = r.getModel().listStatements(r, RDF.rest, (RDFNode)null) ;
r = (Resource)sIter.nextStatement().getObject() ;
if ( sIter.hasNext() )
throw new JenaException("N3: List has two tails") ;
}
return list.iterator() ;
}
// Convenience operations for accessing system properties.
protected String getStringValue(String prop, String defaultValue)
{
String p = getPropValue(prop) ;
if ( p == null )
return defaultValue ;
return p ;
}
protected boolean getBooleanValue(String prop, boolean defaultValue)
{
String p = getPropValue(prop) ;
if ( p == null )
return defaultValue ;
if ( p.equalsIgnoreCase("true") )
return true ;
if ( p.equals("1") )
return true ;
return false ;
}
protected int getIntValue(String prop, int defaultValue)
{
String p = getPropValue(prop) ;
if ( p == null )
return defaultValue ;
try {
return Integer.parseInt(p) ;
} catch (NumberFormatException ex)
{
logger.warn("Format error for property: "+prop) ;
return defaultValue ;
}
}
// May be the absolute or local form of the property name
protected String getPropValue(String prop)
{
prop = absolutePropName(prop) ;
if ( writerPropertyMap != null && writerPropertyMap.containsKey(prop) )
{
Object obj = writerPropertyMap.get(prop) ;
if ( ! ( obj instanceof String ) )
logger.warn("getPropValue: N3 Property for '"+prop+"' is not a string") ;
return (String)obj ;
}
String s = JenaRuntime.getSystemProperty(prop) ;
if ( s == null )
s = JenaRuntime.getSystemProperty(localPropName(prop)) ;
return s ;
}
protected String absolutePropName(String propName)
{
if ( propName.indexOf(':') == -1 )
return N3JenaWriter.propBase + propName ;
return propName ;
}
protected String localPropName(String propName)
{
if ( propName.startsWith(N3JenaWriter.propBase) )
propName = propName.substring(N3JenaWriter.propBase.length()) ;
return propName ;
}
private boolean isOpaque(String uri)
{
try {
return new URI(uri).isOpaque() ;
} catch (URISyntaxException ex) { return true ; }
}
}
/*
* (c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
| |
/*
* Copyright 2002-2016 The Jamocha Team
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.jamocha.org/
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the License.
*/
package org.jamocha.languages.common;
import com.google.common.collect.Sets;
import lombok.*;
import org.jamocha.dn.memory.SlotAddress;
import org.jamocha.dn.memory.SlotType;
import org.jamocha.dn.memory.Template;
import org.jamocha.dn.memory.Template.Slot;
import org.jamocha.filter.ECCollector;
import org.jamocha.filter.Path;
import org.jamocha.function.fwa.*;
import org.jamocha.languages.common.ScopeStack.Scope;
import org.jamocha.languages.common.ScopeStack.VariableSymbol;
import org.jamocha.languages.common.SingleFactVariable.SingleSlotVariable;
import java.util.*;
import static java.util.stream.Collectors.toSet;
/**
* @author Fabian Ohler <fabian.ohler1@rwth-aachen.de>
* @author Christoph Terwelp <christoph.terwelp@rwth-aachen.de>
*/
@Getter
@RequiredArgsConstructor
public class RuleCondition {
@Getter
@RequiredArgsConstructor
public static class MatchingConfiguration {
final SingleFactVariable factVariable;
final List<SlotAddress> matchingAddresses = new ArrayList<>();
}
private final Scope scope;
private final List<MatchingConfiguration> matchings = new ArrayList<>();
private final List<ConditionalElement<SymbolLeaf>> conditionalElements = new ArrayList<>();
private final Set<VariableSymbol> variableSymbols = new HashSet<>();
public void addSymbol(final VariableSymbol symbol) {
this.variableSymbols.add(symbol);
}
public void addMatchingConfiguration(final MatchingConfiguration matchingConfiguration) {
this.matchings.add(matchingConfiguration);
}
/**
* Equivalence class whose elements are equal to each other.
*
* @author Fabian Ohler <fabian.ohler1@rwth-aachen.de>
*/
@Getter
@AllArgsConstructor(access = AccessLevel.PRIVATE)
// do not implement hashCode (or at least don't include anything that might change)
public static class EquivalenceClass {
final LinkedList<SingleFactVariable> factVariables;
final LinkedList<SingleSlotVariable> slotVariables;
final LinkedList<FunctionWithArguments<ECLeaf>> constantExpressions;
final LinkedList<FunctionWithArguments<ECLeaf>> functionalExpressions;
final Set<EquivalenceClass> equalParentEquivalenceClasses = new HashSet<>();
final Set<SingleFactVariable> merged = new HashSet<>();
protected Scope maximalScope;
@Setter
SlotType type;
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(Objects.toString(this.type));
sb.append("-EC: {");
if (!this.factVariables.isEmpty()) sb.append(Objects.toString(this.factVariables));
if (!this.slotVariables.isEmpty()) sb.append(Objects.toString(this.slotVariables));
if (!this.constantExpressions.isEmpty()) sb.append(Objects.toString(this.constantExpressions));
if (!this.functionalExpressions.isEmpty()) sb.append(Objects.toString(this.functionalExpressions));
if (!this.equalParentEquivalenceClasses.isEmpty()) {
sb.append(Objects.toString(this.equalParentEquivalenceClasses));
}
sb.append("}@");
sb.append(Integer.toHexString(System.identityHashCode(this)));
return sb.toString();
}
public static EquivalenceClass newPlainEC(final Scope maximalScope) {
return new EquivalenceClass(new LinkedList<>(), new LinkedList<>(), new LinkedList<>(), new LinkedList<>(),
maximalScope, null);
}
public static EquivalenceClass newECFromType(final Scope maximalScope, final SlotType type) {
return new EquivalenceClass(new LinkedList<>(), new LinkedList<>(), new LinkedList<>(), new LinkedList<>(),
maximalScope, type);
}
public static EquivalenceClass newECFromFactVariable(final Scope maximalScope, final SingleFactVariable fv) {
return new EquivalenceClass(new LinkedList<>(Collections.singleton(fv)), new LinkedList<>(),
new LinkedList<>(), new LinkedList<>(), maximalScope, SlotType.FACTADDRESS);
}
public static EquivalenceClass newECFromSlotVariable(final Scope maximalScope, final SingleSlotVariable sv) {
return new EquivalenceClass(new LinkedList<>(), new LinkedList<>(Collections.singleton(sv)),
new LinkedList<>(), new LinkedList<>(), maximalScope, sv.getType());
}
public static EquivalenceClass newECFromConstantExpression(final Scope maximalScope,
final FunctionWithArguments<ECLeaf> constantExpression) {
return new EquivalenceClass(new LinkedList<>(), new LinkedList<>(),
new LinkedList<>(Collections.singleton(constantExpression)), new LinkedList<>(), maximalScope,
constantExpression.getReturnType());
}
public static EquivalenceClass newECFromFunctionalExpression(final Scope maximalScope,
final FunctionWithArguments<ECLeaf> functionalExpression) {
return new EquivalenceClass(new LinkedList<>(), new LinkedList<>(), new LinkedList<>(),
new LinkedList<>(Collections.singleton(functionalExpression)), maximalScope,
functionalExpression.getReturnType());
}
public EquivalenceClass(final EquivalenceClass copy) {
this(new LinkedList<>(copy.factVariables), new LinkedList<>(copy.slotVariables),
new LinkedList<>(copy.constantExpressions), new LinkedList<>(copy.functionalExpressions),
copy.maximalScope, copy.type);
}
public Set<SingleFactVariable> getDirectlyDependentFactVariables() {
return Sets.union(Sets.newHashSet(this.getFactVariables()),
this.slotVariables.stream().map(SingleSlotVariable::getFactVariable).collect(toSet()));
}
public Set<SingleFactVariable> getDependentFactVariables() {
return Sets.union(Sets.newHashSet(this.getFactVariables()),
Sets.union(this.slotVariables.stream().map(SingleSlotVariable::getFactVariable).collect(toSet()),
this.functionalExpressions.stream().flatMap(fwa -> ECCollector.collect(fwa).stream())
.distinct().flatMap(ec -> ec.getFactVariables().stream()).collect(toSet())));
}
public void merge(final EquivalenceClass other) {
if (this.maximalScope != other.maximalScope) {
throw new IllegalArgumentException("Only equivalence classes of the same scope can be merged!");
}
if (this == other) return;
other.factVariables.forEach(this::add);
other.slotVariables.forEach(this::add);
other.constantExpressions.forEach(this::add);
if (null == this.type) this.type = other.type;
else if (null != other.type && other.type != this.type) {
throw new IllegalArgumentException("Only equivalence classes of equal types can be merged!");
}
other.factVariables.forEach(fv -> fv.setEqual(this));
other.slotVariables.forEach(sv -> {
assert 1 == sv.getEqualSet().size();
sv.getEqualSet().clear();
sv.getEqualSet().add(this);
});
}
/**
* Merges the equivalence classes contained in the slots of the equal fact variables. Does nothing if there are
* less than two fact variables. Does not necessarily completely merge all equivalence classes if there are fact
* variables of different templates in the equivalence class.
*/
public void mergeEquivalenceClassesOfFactVariables() {
if (this.factVariables.isEmpty()) {
return;
}
assert SlotType.FACTADDRESS == this.type;
while (true) {
final Optional<SingleFactVariable> optFactVariable =
this.factVariables.stream().filter(fv -> !this.merged.contains(fv)).findAny();
if (!optFactVariable.isPresent()) break;
final SingleFactVariable thisFV = optFactVariable.get();
final SingleFactVariable mergeFV = this.merged.stream().findAny()
.orElseGet(() -> this.factVariables.stream().filter(fv -> thisFV != fv).findAny().orElse(null));
if (null == mergeFV) break;
this.merged.add(thisFV);
this.merged.add(mergeFV);
this.factVariables.remove(thisFV);
this.factVariables.remove(mergeFV);
final Template template = thisFV.template;
assert thisFV.template == mergeFV.template;
for (final Slot slot : template.getSlots()) {
final SlotAddress slotAddress = template.getSlotAddress(slot.getName());
final SingleSlotVariable thisSV = thisFV.getSlots().get(slotAddress);
final SingleSlotVariable mergeSV = mergeFV.getSlots().get(slotAddress);
if (null != thisSV && null != mergeSV) {
final EquivalenceClass thisEC = thisSV.getEqual();
final EquivalenceClass mergeEC = mergeSV.getEqual();
thisEC.merge(mergeEC);
thisEC.mergeEquivalenceClassesOfFactVariables();
}
}
}
if (this.factVariables.isEmpty()) {
this.factVariables.add(this.merged.iterator().next());
}
}
public void add(final SingleFactVariable fv) {
if (null == this.type) this.type = SlotType.FACTADDRESS;
if (SlotType.FACTADDRESS != this.type) throw new IllegalArgumentException(
"Tried to add a SingleFactVariable to an EquivalenceClass of type " + this.type
+ " instead of FACTADDRESS!");
if (!this.factVariables.isEmpty() && fv.getTemplate() != this.factVariables.iterator().next()
.getTemplate()) {
throw new IllegalArgumentException(
"All fact variables of an equivalence class need to have the same template!");
}
this.factVariables.add(fv);
}
public void add(final SingleSlotVariable sv) {
if (null == this.type) this.type = sv.getType();
if (sv.getType() != this.type) {
throw new IllegalArgumentException(
"Tried to add a SingleSlotVariable of type " + sv.getType() + " to an EquivalenceClass of type "
+ this.type + "!");
}
if (this.slotVariables.contains(sv)) {
throw new IllegalArgumentException(
"Tried to add a SingleSlotVariable to an EquivalenceClass that already contained it!");
}
this.slotVariables.add(sv);
}
public void add(final FunctionWithArguments<ECLeaf> fwa) {
if (null == this.type) this.type = fwa.getReturnType();
if (fwa.getReturnType() != this.type) {
throw new IllegalArgumentException("Tried to add a FunctionWithArguments of type " + fwa.getReturnType()
+ " to an EquivalenceClass of type " + this.type + "!");
}
checkContainmentAndAdd(FunctionalExpressionIdentifier.isConstant(fwa) ? this.constantExpressions
: this.functionalExpressions, fwa);
}
public void add(final long value) {
if (null == this.type) this.type = SlotType.LONG;
if (SlotType.LONG != this.type) throw new IllegalArgumentException(
"Tried to add a LONG to an EquivalenceClass of type " + this.type + "!");
checkContainmentAndAdd(this.constantExpressions, new ConstantLeaf<>(value, SlotType.LONG));
}
public void add(final double value) {
if (null == this.type) this.type = SlotType.DOUBLE;
if (SlotType.DOUBLE != this.type) throw new IllegalArgumentException(
"Tried to add a DOUBLE to an EquivalenceClass of type " + this.type + "!");
checkContainmentAndAdd(this.constantExpressions, new ConstantLeaf<>(value, SlotType.DOUBLE));
}
public void add(final String value) {
if (null == this.type) this.type = SlotType.STRING;
if (SlotType.STRING != this.type) throw new IllegalArgumentException(
"Tried to add a STRING to an EquivalenceClass of type " + this.type + "!");
checkContainmentAndAdd(this.constantExpressions, new ConstantLeaf<>(value, SlotType.STRING));
}
private static void checkContainmentAndAdd(final LinkedList<FunctionWithArguments<ECLeaf>> target,
final FunctionWithArguments<ECLeaf> fwa) {
if (target.contains(fwa)) {
throw new IllegalArgumentException(
"Tried to add a FunctionWithArguments to an EquivalenceClass that already contained it!");
}
target.add(fwa);
}
public static void addEqualParentEquivalenceClassRelation(final EquivalenceClass a, final EquivalenceClass b) {
if (a.maximalScope.isParentOf(b.maximalScope)) {
b.addEqualParentEquivalenceClass(a);
} else if (b.maximalScope.isParentOf(a.maximalScope)) {
a.addEqualParentEquivalenceClass(b);
} else {
throw new IllegalArgumentException(
"The given equivalence classes are not in any child-parent relationship!");
}
}
public void addEqualParentEquivalenceClass(final EquivalenceClass parent) {
if (!parent.maximalScope.isParentOf(this.maximalScope)) {
throw new IllegalArgumentException(
"Given equivalence class is not part of a parenting scope w.r.t. this equivalence class!");
}
this.equalParentEquivalenceClasses.add(parent);
}
public PathLeaf getPathLeaf(final Map<EquivalenceClass, Path> ec2Path, final SingleSlotVariable sv) {
if (!this.factVariables.isEmpty()) {
return Optional.ofNullable(ec2Path.get(this.factVariables.getFirst().getEqual()))
.map(path -> new PathLeaf(path, (SlotAddress) null)).orElse(null);
}
return Optional.ofNullable(sv).map(var -> var.getPathLeaf(ec2Path)).orElse(null);
}
public PathLeaf getPathLeaf(final Map<EquivalenceClass, Path> ec2Path) {
return getPathLeaf(ec2Path, this.slotVariables.peekFirst());
}
public boolean hasMoreThanOneElementOrAParent() {
return getElementCount() + this.equalParentEquivalenceClasses.size() > 1;
}
public boolean hasMoreThanOneElement() {
return getElementCount() > 1;
}
public boolean containsAnyBinding() {
return !(this.factVariables.isEmpty() && this.slotVariables.isEmpty() && this.constantExpressions.isEmpty()
&& this.functionalExpressions.isEmpty());
}
public int getElementCount() {
return (this.factVariables.isEmpty() ? 0 : 1) + this.slotVariables.size() + this.constantExpressions.size()
+ this.functionalExpressions.size();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.security.authentication.token;
import java.io.UnsupportedEncodingException;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Map;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.PropertyIterator;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.SimpleCredentials;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.authentication.token.TokenCredentials;
import org.apache.jackrabbit.api.security.principal.ItemBasedPrincipal;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.core.NodeImpl;
import org.apache.jackrabbit.core.SessionImpl;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.core.id.NodeIdFactory;
import org.apache.jackrabbit.core.security.SecurityConstants;
import org.apache.jackrabbit.core.security.user.UserImpl;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.util.ISO8601;
import org.apache.jackrabbit.util.Text;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Backport of the TokenProvider implementation present with OAK adjusted to
* match some subtle differences in jackrabbit token login.
*/
class CompatTokenProvider {
private static final Logger log = LoggerFactory.getLogger(CompatTokenProvider.class);
private static final String TOKEN_ATTRIBUTE = ".token";
private static final String TOKEN_ATTRIBUTE_EXPIRY = TOKEN_ATTRIBUTE + ".exp";
private static final String TOKEN_ATTRIBUTE_KEY = TOKEN_ATTRIBUTE + ".key";
private static final String TOKENS_NODE_NAME = ".tokens";
private static final String TOKENS_NT_NAME = "nt:unstructured"; // TODO: configurable
private static final char DELIM = '_';
private final SessionImpl session;
private final UserManager userManager;
private final long tokenExpiration;
CompatTokenProvider(SessionImpl session, long tokenExpiration) throws RepositoryException {
this.session = session;
this.userManager = session.getUserManager();
this.tokenExpiration = tokenExpiration;
}
/**
* Create a separate token node underneath a dedicated token store within
* the user home node. That token node contains the hashed token, the
* expiration time and additional mandatory attributes that will be verified
* during login.
*
* @param user
* @param sc The current simple credentials.
* @return A new {@code TokenInfo} or {@code null} if the token could not
* be created.
*/
public TokenInfo createToken(User user, SimpleCredentials sc) throws RepositoryException {
String userPath = null;
Principal pr = user.getPrincipal();
if (pr instanceof ItemBasedPrincipal) {
userPath = ((ItemBasedPrincipal) pr).getPath();
}
TokenCredentials tokenCredentials;
if (userPath != null && session.nodeExists(userPath)) {
Node userNode = session.getNode(userPath);
Node tokenParent;
if (!userNode.hasNode(TOKENS_NODE_NAME)) {
userNode.addNode(TOKENS_NODE_NAME, TOKENS_NT_NAME);
try {
session.save();
} catch (RepositoryException e) {
// may happen when .tokens node is created concurrently
session.refresh(false);
}
}
tokenParent = userNode.getNode(TOKENS_NODE_NAME);
long creationTime = new Date().getTime();
long expirationTime = creationTime + tokenExpiration;
Calendar cal = GregorianCalendar.getInstance();
cal.setTimeInMillis(creationTime);
// generate key part of the login token
String key = generateKey(8);
// create the token node
String tokenName = Text.replace(ISO8601.format(cal), ":", ".");
Node tokenNode;
// avoid usage of sequential nodeIDs
if (System.getProperty(NodeIdFactory.SEQUENTIAL_NODE_ID) == null) {
tokenNode = tokenParent.addNode(tokenName);
} else {
tokenNode = ((NodeImpl) tokenParent).addNodeWithUuid(tokenName, NodeId.randomId().toString());
}
StringBuilder sb = new StringBuilder(tokenNode.getIdentifier());
sb.append(DELIM).append(key);
String token = sb.toString();
tokenCredentials = new TokenCredentials(token);
sc.setAttribute(TOKEN_ATTRIBUTE, token);
// add key property
tokenNode.setProperty(TOKEN_ATTRIBUTE_KEY, getDigestedKey(key));
// add expiration time property
cal.setTimeInMillis(expirationTime);
tokenNode.setProperty(TOKEN_ATTRIBUTE_EXPIRY, session.getValueFactory().createValue(cal));
// add additional attributes passed in by the credentials.
for (String name : sc.getAttributeNames()) {
if (!TOKEN_ATTRIBUTE.equals(name)) {
String value = sc.getAttribute(name).toString();
tokenNode.setProperty(name, value);
tokenCredentials.setAttribute(name, value);
}
}
session.save();
return new CompatModeInfo(token, tokenNode);
} else {
throw new RepositoryException("Cannot create login token: No corresponding node for User " + user.getID() +" in workspace '" + session.getWorkspace().getName() + "'.");
}
}
/**
* Retrieves the token information associated with the specified login
* token. If no accessible {@code Tree} exists for the given token or if
* the token is not associated with a valid user this method returns {@code null}.
*
* @param token A valid login token.
* @return The {@code TokenInfo} associated with the specified token or
* {@code null} of the corresponding information does not exist or is not
* associated with a valid user.
*/
public TokenInfo getTokenInfo(String token) throws RepositoryException {
if (token == null) {
return null;
}
NodeImpl tokenNode = (NodeImpl) getTokenNode(token, session);
String userId = getUserId(tokenNode, userManager);
if (userId == null || !isValidTokenTree(tokenNode)) {
return null;
} else {
return new CompatModeInfo(token);
}
}
static Node getTokenNode(String token, Session session) throws RepositoryException {
int pos = token.indexOf(DELIM);
String id = (pos == -1) ? token : token.substring(0, pos);
return session.getNodeByIdentifier(id);
}
public static String getUserId(TokenCredentials tokenCredentials, Session session) throws RepositoryException {
if (!(session instanceof JackrabbitSession)) {
throw new RepositoryException("JackrabbitSession expected");
}
NodeImpl n = (NodeImpl) getTokenNode(tokenCredentials.getToken(), session);
return getUserId(n, ((JackrabbitSession) session).getUserManager());
}
private static String getUserId(NodeImpl tokenNode, UserManager userManager) throws RepositoryException {
if (tokenNode != null) {
final NodeImpl userNode = (NodeImpl) tokenNode.getParent().getParent();
final String principalName = userNode.getProperty(UserImpl.P_PRINCIPAL_NAME).getString();
if (userNode.isNodeType(UserImpl.NT_REP_USER)) {
Authorizable a = userManager.getAuthorizable(new ItemBasedPrincipal() {
public String getPath() throws RepositoryException {
return userNode.getPath();
}
public String getName() {
return principalName;
}
});
if (a != null && !a.isGroup() && !((User)a).isDisabled()) {
return a.getID();
}
} else {
throw new RepositoryException("Failed to calculate userId from token credentials");
}
}
return null;
}
/**
* Returns {@code true} if the specified {@code attributeName}
* starts with or equals {@link #TOKEN_ATTRIBUTE}.
*
* @param attributeName The attribute name.
* @return {@code true} if the specified {@code attributeName}
* starts with or equals {@link #TOKEN_ATTRIBUTE}.
*/
static boolean isMandatoryAttribute(String attributeName) {
return attributeName != null && attributeName.startsWith(TOKEN_ATTRIBUTE);
}
/**
* Returns <code>false</code> if the specified attribute name doesn't have
* a 'jcr' or 'rep' namespace prefix; <code>true</code> otherwise. This is
* a lazy evaluation in order to avoid testing the defining node type of
* the associated jcr property.
*
* @param propertyName
* @return <code>true</code> if the specified property name doesn't seem
* to represent repository internal information.
*/
private static boolean isInfoAttribute(String propertyName) {
String prefix = Text.getNamespacePrefix(propertyName);
return !Name.NS_JCR_PREFIX.equals(prefix) && !Name.NS_REP_PREFIX.equals(prefix);
}
private static boolean isValidTokenTree(NodeImpl tokenNode) throws RepositoryException {
if (tokenNode == null) {
return false;
} else {
return TOKENS_NODE_NAME.equals(tokenNode.getParent().getName());
}
}
private static String generateKey(int size) {
SecureRandom random = new SecureRandom();
byte key[] = new byte[size];
random.nextBytes(key);
StringBuffer res = new StringBuffer(key.length * 2);
for (byte b : key) {
res.append(Text.hexTable[(b >> 4) & 15]);
res.append(Text.hexTable[b & 15]);
}
return res.toString();
}
private static String getDigestedKey(TokenCredentials tc) throws RepositoryException {
String tk = tc.getToken();
int pos = tk.indexOf(DELIM);
if (pos > -1) {
return getDigestedKey(tk.substring(pos+1));
}
return null;
}
private static String getDigestedKey(String key) throws RepositoryException {
try {
StringBuilder sb = new StringBuilder();
sb.append("{").append(SecurityConstants.DEFAULT_DIGEST).append("}");
sb.append(Text.digest(SecurityConstants.DEFAULT_DIGEST, key, "UTF-8"));
return sb.toString();
} catch (NoSuchAlgorithmException e) {
throw new RepositoryException("Failed to generate login token.");
} catch (UnsupportedEncodingException e) {
throw new RepositoryException("Failed to generate login token.");
}
}
private final class CompatModeInfo implements TokenInfo {
private final String token;
private final Map<String, String> attributes;
private final Map<String, String> info;
private final long expiry;
private final String key;
private CompatModeInfo(String token) throws RepositoryException {
this(token, getTokenNode(token, session));
}
private CompatModeInfo(String token, Node n) throws RepositoryException {
this.token = token;
long expTime = Long.MAX_VALUE;
String keyV = null;
if (token != null) {
attributes = new HashMap<String, String>();
info = new HashMap<String, String>();
PropertyIterator it = n.getProperties();
while (it.hasNext()) {
Property p = it.nextProperty();
String name = p.getName();
if (TOKEN_ATTRIBUTE_EXPIRY.equals(name)) {
expTime = p.getLong();
} else if (TOKEN_ATTRIBUTE_KEY.equals(name)) {
keyV = p.getString();
} else if (isMandatoryAttribute(name)) {
attributes.put(name, p.getString());
} else if (isInfoAttribute(name)) {
info.put(name, p.getString());
} // else: jcr property -> ignore
}
} else {
attributes = Collections.emptyMap();
info = Collections.emptyMap();
}
expiry = expTime;
key = keyV;
}
public String getToken() {
return token;
}
public boolean isExpired(long loginTime) {
return expiry < loginTime;
}
public boolean remove() {
Session s = null;
try {
s = ((SessionImpl) session).createSession(session.getWorkspace().getName());
Node tokenNode = getTokenNode(token, s);
tokenNode.remove();
s.save();
return true;
} catch (RepositoryException e) {
log.warn("Internal error while removing token node.", e);
} finally {
if (s != null) {
s.logout();
}
}
return false;
}
public boolean matches(TokenCredentials tokenCredentials) throws RepositoryException {
// test for matching key
if (key != null && !key.equals(getDigestedKey(tokenCredentials))) {
return false;
}
// check if all other required attributes match
for (String name : attributes.keySet()) {
if (!attributes.get(name).equals(tokenCredentials.getAttribute(name))) {
// no match -> login fails.
return false;
}
}
// update set of informative attributes on the credentials
// based on the properties present on the token node.
Collection<String> attrNames = Arrays.asList(tokenCredentials.getAttributeNames());
for (String key : info.keySet()) {
if (!attrNames.contains(key)) {
tokenCredentials.setAttribute(key, info.get(key));
}
}
return true;
}
public boolean resetExpiration(long loginTime) throws RepositoryException {
Node tokenNode;
Session s = null;
try {
// expiry...
if (expiry - loginTime <= tokenExpiration/2) {
long expirationTime = loginTime + tokenExpiration;
Calendar cal = GregorianCalendar.getInstance();
cal.setTimeInMillis(expirationTime);
s = ((SessionImpl) session).createSession(session.getWorkspace().getName());
tokenNode = getTokenNode(token, s);
tokenNode.setProperty(TOKEN_ATTRIBUTE_EXPIRY, s.getValueFactory().createValue(cal));
s.save();
return true;
}
} catch (RepositoryException e) {
log.warn("Failed to update expiry or informative attributes of token node.", e);
} finally {
if (s != null) {
s.logout();
}
}
return false;
}
public TokenCredentials getCredentials() {
TokenCredentials tc = new TokenCredentials(token);
for (String name : attributes.keySet()) {
tc.setAttribute(name, attributes.get(name));
}
for (String name : info.keySet()) {
tc.setAttribute(name, info.get(name));
}
return tc;
}
}
}
| |
package org.semanticweb.yars.nx.cli;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.semanticweb.yars.nx.BNode;
import org.semanticweb.yars.nx.Literal;
import org.semanticweb.yars.nx.Node;
import org.semanticweb.yars.nx.NodeComparator;
import org.semanticweb.yars.nx.Resource;
import org.semanticweb.yars.nx.namespace.RDF;
import org.semanticweb.yars.nx.parser.NxParser;
public class CreateRDFXML {
/**
* Create RDF/XML from NTriples files (sorted by subject)
*/
public static void main(String[] args) throws IOException, org.semanticweb.yars.nx.parser.ParseException {
Option inputO = new Option("i", "name of file to read, - for stdin");
inputO.setArgs(1);
Option outputO = new Option("o", "name of file to write, - for stdout");
outputO.setArgs(1);
Option helpO = new Option("h", "print help");
Options options = new Options();
options.addOption(inputO);
options.addOption(outputO);
options.addOption(helpO);
CommandLineParser parser = new BasicParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.err.println("***ERROR: " + e.getClass() + ": " + e.getMessage());
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("parameters:", options );
return;
}
if (cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("parameters:", options );
return;
}
InputStream in = System.in;
PrintStream out = System.out;
int elements = -1;
if (cmd.hasOption("i")) {
if (cmd.getOptionValue("i").equals("-")) {
in = System.in;
} else {
in = new FileInputStream(cmd.getOptionValue("i"));
}
}
if (cmd.hasOption("o")) {
if (cmd.getOptionValue("o").equals("-")) {
out = System.out;
} else {
out = new PrintStream(new FileOutputStream(cmd.getOptionValue("o")));
}
}
printHeaderRDFXML(out);
Node oldsubj = null;
Node subj = null;
List<Node[]> list = new ArrayList<Node[]>();
NxParser nxp = new NxParser(in);
while (nxp.hasNext()) {
Node[] nx = nxp.next();
subj = nx[0];
// new subject encountered
if (oldsubj != null && !subj.equals(oldsubj)) {
printRDFXML(list, out);
list = new ArrayList<Node[]>();
}
list.add(nx);
oldsubj = subj;
}
printRDFXML(list, out);
printFooterRDFXML(out);
in.close();
out.close();
}
static void printHeaderRDFXML(PrintStream out) {
out.println("<?xml version='1.0'?>");
out.println("<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>");
}
static void printFooterRDFXML(PrintStream out) {
out.println("</rdf:RDF>");
}
static void printRDFXML(List<Node[]> list, PrintStream out) {
if (list.isEmpty()) {
return;
}
for (Node[] nx : list) {
if (nx[2].toString().equals("0.0")) {
return;
}
}
Node subj = list.get(0)[0];
out.print("<rdf:Description");
if (subj instanceof Resource) {
out.println(" rdf:about='" + escape(subj.toString()) + "'>");
} else if (subj instanceof BNode) {
out.println(" rdf:nodeID='" + subj.toString() + "'>");
}
for (Node[] ns: list) {
String r = ns[1].toString();
String namespace = null, localname = null;
int i = r.indexOf('#');
if (i > 0) {
namespace = r.substring(0, i+1);
localname = r.substring(i+1, r.length());
} else {
i = r.lastIndexOf('/');
if (i > 0) {
namespace = r.substring(0, i+1);
localname = r.substring(i+1, r.length());
}
}
if (namespace == null && localname == null) {
System.err.println("couldn't separate namespace and localname");
break;
}
out.print("\t<" + localname + " xmlns='" + namespace + "'");
if (ns[2] instanceof BNode) {
out.println(" rdf:nodeID='" + ns[2].toString() + "'/>");
} else if (ns[2] instanceof Resource) {
out.println(" rdf:resource='" + escape(ns[2].toString()) + "'/>");
} else if (ns[2] instanceof Literal) {
Literal l = (Literal)ns[2];
if (l.getLanguageTag() != null) {
out.print(" xml:lang='" + l.getLanguageTag() + "'");
} else if (l.getDatatype() != null) {
out.print(" rdf:datatype='" + l.getDatatype().toString() + "'");
}
out.println(">" + escape(ns[2].toString()) + "</" + localname + ">");
}
}
out.println("</rdf:Description>");
}
private static String escape(String s){
String e;
e = s.replaceAll("&", "&");
e = e.replaceAll("<", "<");
e = e.replaceAll(">", ">");
e = e.replaceAll("\"",""");
e = e.replaceAll("'","'");
return e;
}
}
| |
package org.tools.hqlbuilder.client;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.collections4.CollectionUtils;
import org.jhaws.common.lang.StringUtils;
import org.jhaws.common.web.resteasy.RestResource;
import org.slf4j.LoggerFactory;
import org.swingeasy.ObjectWrapper;
import org.tools.hqlbuilder.common.CommonUtilsAdd;
import org.tools.hqlbuilder.common.DelegatingHqlService;
import org.tools.hqlbuilder.common.HqlService;
/**
* @author Jurgen
*/
public class HqlServiceClientImpl extends DelegatingHqlService implements HqlServiceClient {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(HqlServiceClientImpl.class);
public static final String NEWLINE = "\n";
private HqlService hqlService;
private String serviceUrl;
private String[] keywordGroups = {
"cross join",
"right outer join",
"left outer join",
"inner join",
"from",
"where",
"having",
"and",
"or",
"group by",
"order by",
"select",
"," };
/** when cleaning up HQL: replace key by value */
private Map<String, String> hqlReplacers = new HashMap<>();
public HqlService getHqlService() {
return hqlService;
}
public void setHqlService(HqlService hqlService) {
this.hqlService = hqlService;
}
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#getServiceUrl()
*/
@Override
public String getServiceUrl() {
return this.serviceUrl;
}
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#setServiceUrl(java.lang.String)
*/
@Override
public void setServiceUrl(String serviceUrl) {
this.serviceUrl = serviceUrl;
}
/**
* @see org.tools.hqlbuilder.common.DelegatingHqlService#getDelegate()
*/
@Override
public HqlService getDelegate() {
return hqlService;
}
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#cleanupSql(java.lang.String, java.lang.String[], java.lang.String[][], boolean, boolean,
* boolean)
*/
@Override
public String cleanupSql(String sqlString, String[] queryReturnAliases, String[][] scalarColumnNames, boolean replaceProperties,
boolean formatLines, boolean removeReplacers) {
if (sqlString == null) {
return "";
}
// kolom alias (kan enkel maar wanneer de query al is omgezet, dus de tweede maal dat deze methode wordt opgeroepen-
if (queryReturnAliases != null) {
for (int i = 0; i < queryReturnAliases.length; i++) {
String queryReturnAlias = queryReturnAliases[i];
if (queryReturnAlias != null) {
try {
String scalarColumnName = scalarColumnNames[i][0];
if (queryReturnAlias != null) {
try {
// nummers worden vervangen door 'kolom${nummer}' want nummer alleen wordt niet aanvaard
Long.parseLong(queryReturnAlias);
String newAlias = queryReturnAlias.replace('.', ' ').replace('(', ' ').replace(')', ' ').trim().replace(' ', '_');
logger.trace(": " + scalarColumnName + " >> " + queryReturnAlias + " >> " + newAlias);
sqlString = sqlString.replace(scalarColumnName, newAlias);
} catch (NumberFormatException ex) {
logger.trace(": " + scalarColumnName + " >> " + queryReturnAlias);
sqlString = sqlString.replace(scalarColumnName, queryReturnAlias);
}
}
} catch (ArrayIndexOutOfBoundsException ex) {
//
}
}
}
}
// maakt replacers aan
HashMap<String, String> replacers = new HashMap<>();
// vervang tabel_?_?_ door tabelnamen in "from ..." en "... join ..."
// vervang replacers
{
String prefix = "((from)|(join)|(,))";
String joinfromgroup = "( ([a-zA-Z0-9_]+) ([a-zA-Z0-9_]+))";
Matcher matcher = Pattern.compile(prefix + joinfromgroup, Pattern.CASE_INSENSITIVE).matcher(sqlString);
int startpos = 0;
while (matcher.find(startpos)) {
String replacing = matcher.group(7);
if ("when".equals(replacing)) {
startpos++;
continue;
}
String replaceBy = matcher.group(6);
for (Map.Entry<String, String> hqlReplacer : hqlReplacers.entrySet()) {
if (replaceBy.contains(hqlReplacer.getKey())) {
logger.trace("-> " + replaceBy + " >> " + replaceBy.replace(hqlReplacer.getKey(), hqlReplacer.getValue()));
replaceBy = replaceBy.replace(hqlReplacer.getKey(), hqlReplacer.getValue());
}
}
@SuppressWarnings("deprecation")
int existing = CollectionUtils.cardinality(replaceBy, replacers.values());
if (existing > 0) {
logger.trace("-> " + replaceBy + " >> " + replaceBy + (existing + 1));
replaceBy = replaceBy + (existing + 1);
}
logger.trace("- " + replacing + " >> " + replaceBy);
replacers.put(replacing, replaceBy);
startpos = matcher.end();
}
}
// vervang (1) door (2) om geen dubbels te hebben
// (1) tabel_?_?_=tabelnaamY EN tabel_?_=tabelnaamX
// (2) tabel_?_?_=tabelnaamX_tabelnaamY EN tabel_?_=tabelnaamX
List<String> hqlReplacerMap = new ArrayList<>();
for (Map.Entry<String, String> replacer : replacers.entrySet()) {
for (Map.Entry<String, String> replacerOther : replacers.entrySet()) {
if (!replacer.getKey().equals(replacerOther.getKey()) && replacer.getKey().startsWith(replacerOther.getKey())) {
String newvalue = replacerOther.getValue() + "_" + replacer.getValue();
// oracle heeft 30 len limiet
if (newvalue.length() > 30) {
newvalue = newvalue.substring(0, 30);
}
logger.trace("* " + replacer + " EN " + replacerOther + " >> " + replacer.getValue() + "=" + newvalue);
replacer.setValue(newvalue);
hqlReplacerMap.add(newvalue);
}
}
}
// sorteer replacers op langste eerst
List<String> keys = new ArrayList<>(replacers.keySet());
Collections.sort(keys, (o1, o2) -> {
if (o1.length() < o2.length()) {
return 1;
} else if (o1.length() > o2.length()) {
return -1;
} else {
return 0;
}
});
// vervang nu replacers
for (String key : keys) {
String value = replacers.get(key);
logger.trace("+ " + key + " > " + value);
sqlString = sqlString.replace(key, value);
}
// vervang kolomnamen
if (replaceProperties) {
Matcher matcher = Pattern.compile("(( )([^ ]+)( as )([a-zA-Z0-9_]+))", Pattern.CASE_INSENSITIVE).matcher(sqlString);
while (matcher.find()) {
String newvalue = matcher.group(3).replace('.', ' ').replace('(', ' ').replace(')', ' ').trim().replace(' ', '_');
// oracle heeft 30 len limiet
if (newvalue.length() > 30) {
newvalue = newvalue.substring(0, 30);
}
newvalue = " " + matcher.group(3) + " as " + newvalue;
String group = matcher.group();
try {
logger.trace("/ " + group + " > " + newvalue);
sqlString = sqlString.replaceAll("\\Q" + group + "\\E", newvalue);
} catch (Exception ex) {
logger.warn("ERROR: " + ex);
}
}
}
logger.debug(sqlString);
if (formatLines) {
sqlString = makeMultiline(sqlString);
}
sqlString = removeBlanks(sqlString);
@SuppressWarnings("unused")
String[] sqlStringParts = sqlString.split(getNewline());
String[] lines = sqlString.split(getNewline());
if (removeReplacers) {
for (int i = 0; i < lines.length; i++) {
String line = lines[i];
boolean keep = true;
for (String hqlReplacer : hqlReplacers.values()) {
if (line.contains(hqlReplacer)) {
keep &= keep(hqlReplacer, hqlReplacerMap, lines, i, line);
}
}
// zal verwijderd worden
if (!keep) {
lines[i] = null;
}
}
}
StringBuilder anew = new StringBuilder();
for (String line : lines) {
if (line != null) {
anew.append(line).append(getNewline());
}
}
sqlString = anew.toString();
// logger.debug(sqlString);
try {
sqlString = CommonUtilsAdd.call(
Class.forName("org.hibernate.jdbc.util.BasicFormatterImpl").getDeclaredConstructor().newInstance(),
"format", String.class,
sqlString);
} catch (Throwable ex) {
if (!warn1) {
warn1 = true;
logger.warn("{}", String.valueOf(ex));
}
}
try {
Object formatter = Class.forName("org.hibernate.engine.jdbc.internal.BasicFormatterImpl")
.getDeclaredConstructor().newInstance();
try {
@SuppressWarnings("unchecked")
Set<String> BEGIN_CLAUSES = (Set<String>) new ObjectWrapper(formatter).get("BEGIN_CLAUSES");
if (!BEGIN_CLAUSES.contains("cross")) {
BEGIN_CLAUSES.add("cross");
}
} catch (Exception ex) {
//
}
sqlString = CommonUtilsAdd.call(formatter, "format", String.class, sqlString);
} catch (Throwable ex) {
if (!warn2) {
warn2 = true;
logger.warn("{}", String.valueOf(ex));
}
}
logger.info(sqlString);
return sqlString;
}
private boolean warn1 = false;
private boolean warn2 = false;
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#getNewline()
*/
@Override
public String getNewline() {
return NEWLINE;
}
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#makeMultiline(java.lang.String)
*/
@Override
public String makeMultiline(String string) {
for (String kw : keywordGroups) {
string = lineformat1replace(string, kw);
}
// (?i) : case insensitive
// *+ : zero or more, possessive
// $1 : replace with value of first group
string = string.replaceAll("(?i) (ASC)[ ]*+,[ ]*+", " $1," + getNewline());
string = string.replaceAll("(?i) (DESC)[ ]*+,[ ]*+", " $1," + getNewline());
return string;
}
/**
* @see org.tools.hqlbuilder.client.HqlServiceClient#removeBlanks(java.lang.String)
*/
@Override
public String removeBlanks(String string) {
return StringUtils.removeUnnecessaryWhiteSpaces(string);
}
private String lineformat1replace(String string, String splitter) {
Matcher matcher = Pattern.compile(" " + splitter + " ", Pattern.CASE_INSENSITIVE).matcher(string);
String CTE = "AAAAAAAAAAA";
String replaceAll = matcher.replaceAll(" " + getNewline() + CTE + " ").replaceAll(CTE, splitter);
return replaceAll;
}
private boolean keep(String hqlReplacerValue, List<String> hqlReplacerValueX, String[] lines, int i, String line) {
if (!line.contains(hqlReplacerValue)) {
return true;
}
for (String hqlReplacerValueXEl : hqlReplacerValueX) {
if (line.contains(hqlReplacerValueXEl)) {
for (int j = 0; j < lines.length; j++) {
if (i != j) {
if (lines[j] != null && lines[j].contains(hqlReplacerValueXEl)) {
// deze mag niet vervangen worden
return true;
}
}
}
}
}
return false;
}
public Map<String, String> getHqlReplacers() {
return this.hqlReplacers;
}
public void setHqlReplacers(Map<String, String> hqlReplacers) {
this.hqlReplacers = hqlReplacers;
}
@Override
public String getHibernateHelpURL() {
return this.hqlService.getHibernateHelpURL().replace(RestResource.INTERNET_SHORTCUT_URL, "");
}
@Override
public String getHqlHelpURL() {
return this.hqlService.getHqlHelpURL().replace(RestResource.INTERNET_SHORTCUT_URL, "");
}
@Override
public String getLuceneHelpURL() {
return this.hqlService.getLuceneHelpURL().replace(RestResource.INTERNET_SHORTCUT_URL, "");
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ximplementation.spring;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Scope;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.stereotype.Component;
import org.ximplementation.Implement;
import org.ximplementation.Implementor;
import org.ximplementation.Validity;
/**
* {@linkplain ImplementeeBeanCreationPostProcessor} unit tests.
*
* @author earthangry@gmail.com
* @date 2016-8-16
*
*/
public class ImplementeeBeanCreationPostProcessorTest
{
private ApplicationContext applicationContext;
@Before
public void setUp() throws Exception
{
applicationContext = new ClassPathXmlApplicationContext(
"classpath:applicationContext.xml");
}
@After
public void tearDown() throws Exception
{
}
@Test
public void testInjectionAndAop_byAnnotation()
{
Controller controller = applicationContext.getBean(Controller.class);
String re = controller.handle(new Byte((byte) 5));
assertEquals(MyAspect.PREFIX + ServiceImpl0.MY_RE, re);
re = controller.handle(ServiceImpl1.MY_NUMBER);
assertEquals(MyAspect.PREFIX + ServiceImpl1.MY_RE, re);
re = controller.handle(12345);
assertEquals(MyAspect.PREFIX + ServiceImpl2.MY_RE, re);
re = controller.handle(new Float(5.0F));
assertEquals(MyAspect.PREFIX + ServiceImpl3.MY_RE, re);
}
@Test
public void testInjectionAndAop_byXMLConfig()
{
{
ControllerForXMLConfig controller = (ControllerForXMLConfig) applicationContext
.getBean("controller0-xml-config");
String re = controller.handle(new Byte((byte) 5));
assertEquals(MyAspect.PREFIX + ServiceImpl0.MY_RE, re);
re = controller.handle(ServiceImpl1.MY_NUMBER);
assertEquals(MyAspect.PREFIX + ServiceImpl1.MY_RE, re);
re = controller.handle(12345);
assertEquals(MyAspect.PREFIX + ServiceImpl2.MY_RE, re);
re = controller.handle(new Float(5.0F));
assertEquals(MyAspect.PREFIX + ServiceImpl3.MY_RE, re);
}
{
ControllerForXMLConfig controller = (ControllerForXMLConfig) applicationContext
.getBean("controller1-xml-config");
String re = controller.handle(new Byte((byte) 5));
assertEquals(MyAspect.PREFIX + ServiceImpl0.MY_RE, re);
re = controller.handle(ServiceImpl1.MY_NUMBER);
assertEquals(MyAspect.PREFIX + ServiceImpl1.MY_RE, re);
re = controller.handle(12345);
assertEquals(MyAspect.PREFIX + ServiceImpl2.MY_RE, re);
re = controller.handle(new Float(5.0F));
assertEquals(MyAspect.PREFIX + ServiceImpl3.MY_RE, re);
}
}
@Test
public void testOnlyOneDependentImplementeeBeanCreated()
{
Controller controller = applicationContext.getBean(Controller.class);
Controller1 controller1 = applicationContext.getBean(Controller1.class);
assertTrue(controller1.getService() == controller.getService());
}
@Test
public void testNotInterfaceImplementeeBean()
{
TestNotInterfaceImplementeeBean.TNIController controller = applicationContext
.getBean(TestNotInterfaceImplementeeBean.TNIController.class);
assertTrue(controller.getService() instanceof CglibImplementee);
}
public static class TestNotInterfaceImplementeeBean
{
@Component
public static class TNIController
{
private TNIService service;
public TNIService getService()
{
return service;
}
@Autowired
public void setService(TNIService service)
{
this.service = service;
}
}
@Component
public static class TNIService
{
}
@Component
public static class TNIServiceAnother extends TNIService
{
}
}
@Test
public void testSetterMethodAutowired()
{
TestSetterMethodAutowired bean = applicationContext
.getBean(TestSetterMethodAutowired.class);
assertNotNull(bean.getService());
}
@Component
public static class TestSetterMethodAutowired
{
private Service service;
public Service getService()
{
return service;
}
@Autowired
public void setService(Service service)
{
this.service = service;
}
}
@Test
public void testFieldAutowired()
{
TestFieldAutowired bean = applicationContext
.getBean(TestFieldAutowired.class);
assertNotNull(bean.getService());
}
@Component
public static class TestFieldAutowired
{
@Autowired
private Service service;
public Service getService()
{
return service;
}
}
@Test
public void testNoXimplementationWhenOnlyOneInstance()
{
TestNoXimplementationWhenOnlyOneInstance.TNOController controller = applicationContext
.getBean(
TestNoXimplementationWhenOnlyOneInstance.TNOController.class);
assertEquals(TestNoXimplementationWhenOnlyOneInstance.TNOService.class,
controller.getService().getClass());
assertEquals(
TestNoXimplementationWhenOnlyOneInstance.TNOService1Impl.class,
controller.getService1().getClass());
}
public static class TestNoXimplementationWhenOnlyOneInstance
{
@Component
public static class TNOController
{
private TNOService service;
private TNOService1 service1;
public TNOService getService()
{
return service;
}
@Autowired
public void setService(TNOService service)
{
this.service = service;
}
public TNOService1 getService1()
{
return service1;
}
@Autowired
public void setService1(TNOService1 service1)
{
this.service1 = service1;
}
}
@Component
public static class TNOService
{
}
public interface TNOService1
{
}
@Component
public static class TNOService1Impl implements TNOService1
{
}
}
@Test
public void testPrototypeImplementorBean()
{
TestPrototypeImplementorBean.TPIController controller = applicationContext
.getBean(TestPrototypeImplementorBean.TPIController.class);
// singleton
assertEquals(0, controller
.getCount(TestPrototypeImplementorBean.TPIService1.TYPE));
assertEquals(0, controller
.getCount(TestPrototypeImplementorBean.TPIService1.TYPE));
assertEquals(0, controller
.getCount(TestPrototypeImplementorBean.TPIService1.TYPE));
// prototype
TestPrototypeImplementorBean.TPIService2.count = 0;
assertEquals(1, controller
.getCount(TestPrototypeImplementorBean.TPIService2.TYPE));
assertEquals(2, controller
.getCount(TestPrototypeImplementorBean.TPIService2.TYPE));
assertEquals(3, controller
.getCount(TestPrototypeImplementorBean.TPIService2.TYPE));
}
public static class TestPrototypeImplementorBean
{
@Component
public static class TPIController
{
@Autowired
private TPIService service;
public TPIService getService()
{
return service;
}
public void setService(TPIService service)
{
this.service = service;
}
public int getCount(String type)
{
return this.service.getCount(type);
}
}
public static interface TPIService
{
public int getCount(String type);
}
@Component
public static class TPIService1 implements TPIService
{
public static final String TYPE = TPIService1.class.getName();
private static int count = 0;
public TPIService1()
{
super();
}
@Validity("isValid")
@Override
public int getCount(String type)
{
return count;
}
boolean isValid(String type)
{
return TYPE.equals(type);
}
}
@Component
@Scope("prototype")
public static class TPIService2 implements TPIService
{
public static final String TYPE = TPIService2.class.getName();
private static int count = 0;
public TPIService2()
{
super();
count += 1;
}
@Validity("isValid")
@Override
public int getCount(String type)
{
return count;
}
boolean isValid(String type)
{
return TYPE.equals(type);
}
}
}
@Test
public void testPrototypeImplementorBean_AopOfJdkProxy()
{
// singleton
TestPrototypeImplementorBean_AopOfJdkProxy.TPIController controller = applicationContext
.getBean(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIController.class);
// singleton
assertEquals(0, controller
.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService1.TYPE));
assertEquals(0, controller
.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService1.TYPE));
assertEquals(0, controller
.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService1.TYPE));
// AOP Jdk Proxy
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService2.count = 0;
assertEquals(1, controller.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService2.TYPE));
assertEquals(2, controller.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService2.TYPE));
assertEquals(3, controller.getCount(
TestPrototypeImplementorBean_AopOfJdkProxy.TPIService2.TYPE));
}
public static class TestPrototypeImplementorBean_AopOfJdkProxy
{
@Component
public static class TPIController
{
@Autowired
private TPIService service;
public TPIService getService()
{
return service;
}
public void setService(TPIService service)
{
this.service = service;
}
public int getCount(String type)
{
return this.service.getCount(type);
}
}
public static interface TPIService
{
public int getCount(String type);
}
@Component
public static class TPIService1 implements TPIService
{
public static final String TYPE = TPIService1.class.getName();
private static int count = 0;
public TPIService1()
{
super();
}
@Validity("isValid")
@Override
public int getCount(String type)
{
return count;
}
boolean isValid(String type)
{
return TYPE.equals(type);
}
}
@Component
@Scope("prototype")
public static class TPIService2 implements TPIService
{
public static final String TYPE = TPIService2.class.getName();
private static int count = 0;
public TPIService2()
{
super();
count += 1;
}
@Validity("isValid")
@Override
public int getCount(String type)
{
return count;
}
boolean isValid(String type)
{
return TYPE.equals(type);
}
}
@Component
@Aspect
public static class TPIService3Aspect
{
@Pointcut("execution(* org.ximplementation.spring.ImplementeeBeanCreationPostProcessorTest$TestPrototypeImplementorBean_AopOfJdkProxy$TPIService.getCount(..))")
private void testPointcut()
{
}
@org.aspectj.lang.annotation.Before("testPointcut()")
public void beforeAspect(JoinPoint jp) throws Throwable
{
}
}
}
@Component
public static class Controller
{
@Autowired
private Service service;
public Controller()
{
super();
}
public Controller(Service tservice)
{
super();
this.service = tservice;
}
public Service getService()
{
return service;
}
public void setService(Service service)
{
this.service = service;
}
public String handle(Number number)
{
return this.service.handle(number);
}
}
@Component
public static class Controller1
{
@Autowired
private Service service;
public Controller1()
{
super();
}
public Service getService()
{
return service;
}
public void setService(Service service)
{
this.service = service;
}
}
public static class ControllerForXMLConfig
{
private Service service;
public ControllerForXMLConfig()
{
super();
}
public Service getService()
{
return service;
}
public void setService(Service service)
{
this.service = service;
}
public String handle(Number number)
{
return this.service.handle(number);
}
}
@Component
public static interface Service
{
String handle(Number number);
}
@Component
public static class ServiceImpl0 implements Service
{
public static final String MY_RE = ServiceImpl0.class.getName();
@Override
public String handle(Number number)
{
return MY_RE;
}
}
@Component
public static class ServiceImpl1 implements Service
{
public static final String MY_RE = ServiceImpl1.class.getName();
public static final Number MY_NUMBER = new Double(1.0D);
@Validity("isValid")
@Override
public String handle(Number number)
{
return MY_RE;
}
public boolean isValid(Number number)
{
return MY_NUMBER.equals(number);
}
}
@Component
@Implementor(Service.class)
public static class ServiceImpl2
{
public static final String MY_RE = ServiceImpl2.class.getName();
@Implement("handle")
public String handle(Integer number)
{
return MY_RE;
}
}
@Component
@Implementor(Service.class)
public static class ServiceImpl3
{
public static final String MY_RE = ServiceImpl3.class.getName();
@Implement("handle")
public String handle(Float number)
{
return MY_RE;
}
}
@Component
@Aspect
public static class MyAspect
{
public static final String PREFIX = MyAspect.class.getSimpleName();
@Pointcut("execution(* org.ximplementation.spring.ImplementeeBeanCreationPostProcessorTest$Service.handle(..))")
private void testPointcut()
{
}
@org.aspectj.lang.annotation.Before("testPointcut()")
public void beforeAspect(JoinPoint jp) throws Throwable
{
System.out.println("Before aspect execute");
}
@org.aspectj.lang.annotation.Around("testPointcut()")
public Object aroundAspect(ProceedingJoinPoint jp) throws Throwable
{
String re = (String) jp.proceed();
return PREFIX + re;
}
}
}
| |
/*
* Copyright 2011 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.rhino.Node;
/**
* Unit tests for {#link {@link OptimizeCalls}
*
*/
public final class OptimizeCallsTest extends CompilerTestCase {
public OptimizeCallsTest() {
super(
lines(
DEFAULT_EXTERNS,
"var window;",
"var goog = {};",
"goog.reflect = {};",
"goog.reflect.object = function(a, b) {};",
"function goog$inherits(a, b) {}",
"var alert;",
"function use(x) {}"));
}
@Override
protected void setUp() throws Exception {
super.setUp();
enableNormalize();
enableGatherExternProperties();
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return new CompilerPass() {
@Override
public void process(Node externs, Node root) {
NameBasedDefinitionProvider defFinder = new NameBasedDefinitionProvider(compiler, true);
defFinder.process(externs, root);
new PureFunctionIdentifier(compiler, defFinder).process(externs, root);
new RemoveUnusedCode.Builder(compiler)
.removeLocalVars(true)
.removeGlobals(true)
.build()
.process(externs, root);
final OptimizeCalls passes = new OptimizeCalls(compiler);
passes.addPass(new OptimizeReturns(compiler));
passes.addPass(new OptimizeParameters(compiler));
passes.process(externs, root);
}
};
}
public void testSimpleRemoval() {
// unused parameter value
test("var foo = (p1)=>{}; foo(1); foo(2)",
"var foo = ( )=>{}; foo( ); foo( )");
test("let foo = (p1)=>{}; foo(1); foo(2)",
"let foo = ( )=>{}; foo( ); foo( )");
test("const foo = (p1)=>{}; foo(1); foo(2)",
"const foo = ( )=>{}; foo( ); foo( )");
}
public void testRemovingReturnCallToFunctionWithUnusedParams() {
test(
"function foo() {var x; return x = bar(1)} foo(); function bar(x) {}",
"function foo() {bar();return} foo(); function bar() {1;}");
test(
"function foo() {return} foo(); function bar() {1;}",
"function foo(){return}foo()");
}
public void testNestingFunctionCallWithUnsedParams() {
test(
lines(
"function f1(x) { }",
"function f2(x) { }",
"function f3(x) { }",
"function f4(x) { }",
"f3(f1(f2()));"),
lines(
"function f1(){f2()}",
"function f2(){}",
"function f3(){f1()}f3()"));
}
public void testUnusedAssignOnFunctionWithUnusedParams() {
test("var foo = function(a){ }; function bar(){var x;x = foo} bar(); foo(1)",
"var foo = function( ){1;}; function bar(){ } bar(); foo()");
}
public void testCallSiteInteraction() {
testSame("var b=function(){return};b()");
test(
"var b=function(c){ return c}; b(1)",
"var b=function( ){var c = 1; c; return }; b( )");
test(
"var b=function(c){};b.call(null, 1); b(2);", // preserve alignment
"var b=function( ){};b.call(null ); b( );");
test(
"var b=function(c){};b.apply(null, []);", // preserve alignment
"var b=function( ){};b.apply(null, []);");
test(
"var b=function(c){return};b(1);b(2)",
"var b=function(){return};b();b();");
test(
"var b=function(c){return};b(1,2);b();",
"var b=function(){return};b();b();");
test(
"var b=function(c){return};b(1,2);b(3,4)",
"var b=function(){return};b();b()");
// Here there is a unknown reference to the function so we can't
// change the signature.
// TODO(johnlenz): replace unused parameter values, even
// if we don't know all the uses.
testSame(
"var b=function(c,d){return d};b(1,2);b(3,4);b.f()");
test(
"var b=function(c){return};b(1,2);b(3,new use())",
"var b=function( ){return};b( );b( new use())");
test(
"var b=function(c){return};b(1,2);b(new use(),4)",
"var b=function( ){return};b( );b(new use() )");
test(
"var b=function(c,d){return d};b(1,2);use(b(new use(),4))",
"var b=function(c,d){return d};b(0,2);use(b(new use(),4))");
test(
"var b=function(c,d,e){return d};b(1,2,3);use(b(new use(),4,new use()))",
"var b=function(c,d ){return d};b(0,2 );use(b(new use(),4,new use()))");
// Recursive calls are OK.
test(
"var b=function(c,d){b(1,2);return d};b(3,4);use(b(5,6))",
"var b=function(d){b(2);return d};b(4);use(b(6))");
testSame("var b=function(c){return arguments};b(1,2);use(b(3,4))");
// remove all function arguments
test(
"var b=function(c,d){return};b();b(1);b(1,2);b(1,2,3)",
"var b=function(){return};b();b();b();b();");
// remove no function arguments
testSame("var b=function(c,d){use(c+d)};b(2,3);b(1,2)");
// remove some function arguments
test(
"var b=function(e,f,c,d){use(c+d)};b(1,2,3,4);b(3,4,5,6)",
"var b=function(c,d){use(c+d)};b(3,4);b(5,6)");
test(
"var b=function(c,d,e,f){use(c+d)};b(1,2);b(3,4)",
"var b=function(c,d){use(c+d)};b(1,2);b(3,4)");
test(
"var b=function(e,c,f,d,g){use(c+d)};b(1,2);b(3,4)",
"var b=function(c){var f;var d;use(c+d)};b(2);b(4)");
test(
"var b=function(c,d){};var b=function(e,f){};b(1,2)",
"var b=function(){};var b=function(){};b()");
}
public void testComplexDefinition1() {
testSame("var x; var b = x ? function(c) { use(c) } : function(c) { use(c) }; b(1)");
test(
"var x; var b = (x, function(c) { use(c) }); b(1)",
"var x; var b = (x, function( ) { var c = 1; use(c) }); b()");
testSame("var x; var b; b = x ? function(c) { use(c) } : function(c) { use(c) }; b(1)");
test(
"var x; var b; b = (x, function(c) { use(c) }); b(1)",
"var x; var b; b = (x, function( ) { var c = 1; use(c) }); b( )");
}
public void testComplexDefinition2() {
testSame("var x; var b = x ? function(c) { use(c) } : function(c) { use(c) }; b(1); b(2);");
testSame("var x;var b = (x, function(c) { use(c) }); b(1); b(2);");
testSame("var x; var b; b = x ? function(c) { use(c) } : function(c) { use(c) }; b(1); b(2);");
testSame("var x; var b; b = (x, function(c) { use(c) }); b(1); b(2);");
}
public void testCallSiteInteraction_constructors0() {
// Unused parmeters to constructors invoked with .call
// can be removed.
test(
lines(
"var Ctor1=function(a,b){return a};", // preserve newlines
"Ctor1.call(this, 1, 2);",
"Ctor1(3, 4)"),
lines(
"var Ctor1=function(a ){a; return};", // preserve newlines
"Ctor1.call(this,1);",
"Ctor1(3)"));
}
public void testCallSiteInteraction_constructors1() {
// NOTE: Ctor1 used trailing parameter is removed by
// RemoveUnusedCode
// For now, goog.inherits prevents optimizations
test(
lines(
"var Ctor1=function(a,b){use(a)};",
"var Ctor2=function(x,y){};",
"goog$inherits(Ctor2, Ctor1);",
"new Ctor2(1,2);new Ctor2(3,4);"),
lines(
"var Ctor1=function(a){use(a)};",
"var Ctor2=function(){};",
"goog$inherits(Ctor2, Ctor1);",
"new Ctor2(1,2);new Ctor2(3,4);"));
}
public void testCallSiteInteraction_constructors2() {
// For now, goog.inherits prevents call site optimizations
String code = lines(
"var Ctor1=function(a,b){return a};",
"var Ctor2=function(x,y){Ctor1.call(this,x,y)};",
"goog$inherits(Ctor2, Ctor1);",
"new Ctor2(1,2);new Ctor2(3,4)");
String expected = lines(
"var Ctor1=function(a){return a};",
"var Ctor2=function(x,y){Ctor1.call(this,x,y)};",
"goog$inherits(Ctor2, Ctor1);",
"new Ctor2(1,2);new Ctor2(3,4)");
test(code, expected);
}
public void testFunctionArgRemovalCausingInconsistency() {
// Test the case where an unused argument is removed and the argument
// contains a call site in its subtree (will cause the call site's parent
// pointer to be null).
test(
lines(
"var a=function(x,y){};",
"var b=function(z){};",
"a(new b, b)"),
lines(
"var a=function(){new b;b};",
"var b=function(){};",
"a()"));
}
public void testRemoveUnusedVarsPossibleNpeCase() {
test(
lines(
"var a = [];",
"var register = function(callback) {a[0] = callback};",
"register(function(transformer) {});",
"register(function(transformer) {});"),
lines(
"var register=function(){};register();register()"));
}
public void testDoNotOptimizeJSCompiler_renameProperty() {
// Only the function definition can be modified, none of the call sites.
test(
lines(
"function JSCompiler_renameProperty(a) {};",
"JSCompiler_renameProperty('a');"),
lines(
"function JSCompiler_renameProperty() {};",
"JSCompiler_renameProperty('a');"));
}
public void testDoNotOptimizeJSCompiler_ObjectPropertyString() {
test(
lines(
"function JSCompiler_ObjectPropertyString(a, b) {};",
"JSCompiler_ObjectPropertyString(window,'b');"),
lines("function JSCompiler_ObjectPropertyString() {};",
"JSCompiler_ObjectPropertyString(window,'b');"));
}
public void testFunctionArgRemovalFromCallSites() {
// remove all function arguments
test(
"var b=function(c,d){return};b(1,2);b(3,4)",
"var b=function(){return};b();b()");
// remove no function arguments
testSame("var b=function(c,d){return c+d};b(1,2);use(b(3,4))");
test(
"var b=function(e,f,c,d){return c+d};b(1,2,3,4);use(b(4,3,2,1))",
"var b=function(c,d){return c+d};b(3,4);use(b(2,1))");
// remove some function arguments
test(
"var b=function(c,d,e,f){use(c+d)};b(1,2);b();",
"var b=function(c,d){use(c+d)};b(1,2);b();");
test(
"var b=function(e,c,f,d,g){use(c+d)};b(1,2);b(3,4,5,6)",
"var b=function(c,d){use(c+d)};b(2);b(4,6)");
}
public void testFunctionArgRemovalFromCallSitesSpread1() {
test(
"function f(a,b,c,d){};f(...[1,2,3,4]);f(4,3,2,1)",
"function f(){};f();f()");
test(
"function f(a,b,c,d){};f(...[1,2,3,4], alert());f(4,3,2,1)",
"function f(){};f(alert());f()");
test(
"function f(a,b,c,d){use(c+d)};f(...[1,2,3,4]);f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)};f(...[1,2,3,4]);f(0,0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,...[2,3,4,5]);f(4,3,2,1)",
"function f( b,c,d){use(c+d)};f( ...[2,3,4,5]);f( 0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,2,...[3,4,5]);f(4,3,2,1)",
"function f( c,d){use(c+d)};f( ...[3,4,5]);f( 2,1)");
test(
"function f(a,b,c,d){use(c+d)}; f(...[],2,3);f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)}; f(...[],2,3);f(0,0,2,1)");
}
public void testFunctionArgRemovalFromCallSitesSpread2() {
test(
"function f(a,b,c,d){};f(...[alert()]);f(4,3,2,1)",
"function f(){};f(...[alert()]);f()");
test(
"function f(a,b,c,d){};f(...[alert()], alert());f(4,3,2,1)",
"function f(){};f(...[alert()], alert());f()");
test(
"function f(a,b,c,d){use(c+d)};f(...[alert()]);f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)};f(...[alert()]);f(0,0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,...[alert()]);f(4,3,2,1)",
"function f( b,c,d){use(c+d)};f( ...[alert()]);f( 0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,2,...[alert()]);f(4,3,2,1)",
"function f( c,d){use(c+d)};f( ...[alert()]);f( 2,1)");
test(
"function f(a,b,c,d){use(c+d)}; f(...[alert()],2,3);f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)}; f(...[alert()],2,3);f(0,0,2,1)");
}
public void testFunctionArgRemovalFromCallSitesSpread3() {
test(
"function f(a,b,c,d){};f(...alert());f(4,3,2,1)",
"function f(){};f(...alert());f()");
test(
"function f(a,b,c,d){};f(...alert(), 1);f(4,3,2,1)",
"function f(){};f(...alert());f()");
test(
"function f(a,b,c,d){use(c+d)};f(...alert());f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)};f(...alert());f(0,0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,...alert());f(4,3,2,1)",
"function f( b,c,d){use(c+d)};f( ...alert());f( 0,2,1)");
test(
"function f(a,b,c,d){use(c+d)};f(1,2,...alert());f(4,3,2,1)",
"function f( c,d){use(c+d)};f( ...alert());f( 2,1)");
test(
"function f(a,b,c,d){use(c+d)}; f(...[alert()],2,3);f(4,3,2,1)",
"function f(a,b,c,d){use(c+d)}; f(...[alert()],2,3);f(0,0,2,1)");
}
public void testFunctionArgRemovalFromCallSitesRest() {
// remove all function arguments
test(
"var b=function(c,...d){return};b(1,2,3);b(4,5,6)",
"var b=function( ){return};b( );b( )");
// remove no function arguments
testSame("var b=function(c,...d){return c+d};b(1,2,3);use(b(4,5,6))");
// remove some function arguments
test(
"var b=function(e,f,...c){return c};b(1,2,3,4);use(b(4,3,2,1))",
"var b=function( ...c){return c};b( 3,4);use(b( 2,1))");
}
public void testFunctionArgRemovalFromCallSitesDefaultValue() {
// remove all function arguments
test(
"function f(c = 1, d = 2){};f(1,2,3);f(4,5,6)",
"function f( ){};f( );f( )");
testSame(
"function f(c = alert()){};f(undefined);f(4)");
test(
"function f(c = alert()){};f();f()",
"function f(){var c = alert();};f();f()");
// TODO(johnlenz): handle this like the "no value" case above and
// allow the default value to inlined into the body.
testSame(
"function f(c = alert()){};f(undefined);f(undefined)");
}
public void testFunctionArgRemovalFromCallSitesDestructuring() {
// remove all function arguments
test(
"function f([a] = [1], [b] = [2]){} f(1, 2, 3); f(4, 5, 6)",
"function f( ){} f( ); f( )");
test(
"function f(a, [b] = alert(), [c] = alert(), d){} f(1, 2, 3, 4); f(4, 5, 6, 7)",
"function f( [ ] = alert(), [ ] = alert() ){} f( 2, 3 ); f( 5, 6 )");
test(
"function f(a, [b = alert()] = [], [c = alert()] = [], d){} f(1, 2, 3, 4);f(4, 5, 6, 7)",
"function f( [b = alert()] = [], [c = alert()] = [] ){} f( 2, 3 );f( 5, 6 )");
test(
"function f(a, [b = alert()], [c = alert()], d){} f(1, 2, 3, 4); f(4, 5, 6, 7);",
"function f( [b = alert()], [c = alert()] ){} f( 2, 3 ); f( 5, 6 );");
}
public void testLocalVarReferencesGlobalVar() {
test(
"var a=3;function f(b, c){b=a; alert(c);} f(1,2);f()",
"function f(c) { alert(c); } f(2);f();");
}
public void testReflectedMethods() {
testSame(
lines(
"/** @constructor */",
"function Foo() {}",
"Foo.prototype.handle = function(x, y) { alert(y); };",
"var x = goog.reflect.object(Foo, {handle: 1});",
"for (var i in x) { x[i].call(x); }",
"window['Foo'] = Foo;"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.utils;
import java.util.ArrayList;
import java.util.List;
import org.apache.ambari.server.bootstrap.BootStrapImpl;
import org.apache.commons.lang.StringUtils;
/**
* Provides various utility functions to be used for version handling.
* The compatibility matrix between server, agent, store can also be maintained
* in this class. Currently, exact match is required between all the three.
*/
public class VersionUtils {
/**
* Compares two versions strings of the form N.N.N.N or even N.N.N.N-###
* (which should ignore everything after the dash). If the user has a custom
* stack, e.g., 2.3.MYNAME or MYNAME.2.3, then any segment that contains
* letters should be ignored.
*
* @param version1
* the first operand. If set to {@value BootStrapImpl#DEV_VERSION}
* then this will always return {@code 0)}
* @param version2
* the second operand.
* @param maxLengthToCompare
* The maximum length to compare - 2 means only Major and Minor 0 to
* compare the whole version strings
* @return 0 if both are equal up to the length compared, -1 if first one is
* lower, +1 otherwise
*/
public static int compareVersions(String version1, String version2, int maxLengthToCompare)
throws IllegalArgumentException {
if (version1 == null){
throw new IllegalArgumentException("version1 cannot be null");
}
if (version2 == null){
throw new IllegalArgumentException("version2 cannot be null");
}
version1 = StringUtils.trim(version1);
version2 = StringUtils.trim(version2);
if (version1.indexOf('-') >=0) {
version1 = version1.substring(0, version1.indexOf('-'));
}
if (version2.indexOf('-') >=0) {
version2 = version2.substring(0, version2.indexOf('-'));
}
if (version1.isEmpty()) {
throw new IllegalArgumentException("version1 cannot be empty");
}
if (version2.isEmpty()) {
throw new IllegalArgumentException("version2 cannot be empty");
}
if (maxLengthToCompare < 0) {
throw new IllegalArgumentException("maxLengthToCompare cannot be less than 0");
}
if(BootStrapImpl.DEV_VERSION.equals(version1.trim())) {
return 0;
}
//String pattern = "^([0-9]+)\\.([0-9]+)\\.([0-9]+)\\.([0-9]+).*";
String pattern = "([0-9]+).([0-9]+).([0-9]+).([0-9]+)?.*";
String[] version1Parts = version1.replaceAll(pattern, "$1.$2.$3.$4").split("\\.");
String[] version2Parts = version2.replaceAll(pattern, "$1.$2.$3.$4").split("\\.");
int length = Math.max(version1Parts.length, version2Parts.length);
length = maxLengthToCompare == 0 || maxLengthToCompare > length ? length : maxLengthToCompare;
List<Integer> stack1Parts = new ArrayList<>();
List<Integer> stack2Parts = new ArrayList<>();
for (int i = 0; i < length; i++) {
// Robust enough to handle strings in the version
try {
int stack1Part = i < version1Parts.length ?
Integer.parseInt(version1Parts[i]) : 0;
stack1Parts.add(stack1Part);
} catch (NumberFormatException e) {
stack1Parts.add(0);
}
try {
int stack2Part = i < version2Parts.length ?
Integer.parseInt(version2Parts[i]) : 0;
stack2Parts.add(stack2Part);
} catch (NumberFormatException e) {
stack2Parts.add(0);
}
}
length = Math.max(stack1Parts.size(), stack2Parts.size());
for (int i = 0; i < length; i++) {
Integer stack1Part = stack1Parts.get(i);
Integer stack2Part = stack2Parts.get(i);
if (stack1Part < stack2Part) {
return -1;
}
if (stack1Part > stack2Part) {
return 1;
}
}
return 0;
}
/**
* Compares two versions strings of the form N.N.N.N
*
* @param version1
* the first operand. If set to {@value BootStrapImpl#DEV_VERSION}
* then this will always return {@code 0)}
* @param version2
* the second operand.
* @param allowEmptyVersions
* Allow one or both version values to be null or empty string
* @return 0 if both are equal up to the length compared, -1 if first one is
* lower, +1 otherwise
*/
public static int compareVersions(String version1, String version2, boolean allowEmptyVersions) {
if (allowEmptyVersions) {
if (version1 != null && version1.equals(BootStrapImpl.DEV_VERSION)) {
return 0;
}
if (version1 == null && version2 == null) {
return 0;
} else {
if (version1 == null) {
return -1;
}
if (version2 == null) {
return 1;
}
}
if (version1.isEmpty() && version2.isEmpty()) {
return 0;
} else {
if (version1.isEmpty()) {
return -1;
}
if (version2.isEmpty()) {
return 1;
}
}
}
return compareVersions(version1, version2, 0);
}
/**
* Compares two versions strings of the form N.N.N.N
*
* @param version1
* the first operand. If set to {@value BootStrapImpl#DEV_VERSION}
* then this will always return {@code 0)}
* @param version2
* the second operand.
* @return 0 if both are equal, -1 if first one is lower, +1 otherwise
*/
public static int compareVersions(String version1, String version2) {
return compareVersions(version1, version2, 0);
}
/**
* Compares two version for equality, allows empty versions
*
* @param version1
* the first operand. If set to {@value BootStrapImpl#DEV_VERSION}
* then this will always return {@code 0)}
* @param version2
* the second operand.
* @param allowEmptyVersions
* Allow one or both version values to be null or empty string
* @return true if versions are equal; false otherwise
*/
public static boolean areVersionsEqual(String version1, String version2, boolean allowEmptyVersions) {
return 0 == compareVersions(version1, version2, allowEmptyVersions);
}
/**
* Return N.N.N from N.N.N.xyz
* @param version
* @return
*/
public static String getVersionSubstring(String version) {
String[] versionParts = version.split("\\.");
if (versionParts.length < 3) {
throw new IllegalArgumentException("Invalid version number");
}
return versionParts[0] + "." + versionParts[1] + "." + versionParts[2];
}
}
| |
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.scheduling.config;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.Trigger;
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
import org.springframework.scheduling.support.CronTrigger;
import org.springframework.util.Assert;
/**
* Helper bean for registering tasks with a {@link TaskScheduler}, typically using cron
* expressions.
*
* <p>As of Spring 3.1, {@code ScheduledTaskRegistrar} has a more prominent user-facing
* role when used in conjunction with the @{@link
* org.springframework.scheduling.annotation.EnableAsync EnableAsync} annotation and its
* {@link org.springframework.scheduling.annotation.SchedulingConfigurer
* SchedulingConfigurer} callback interface.
*
* @author Juergen Hoeller
* @author Chris Beams
* @since 3.0
* @see org.springframework.scheduling.annotation.EnableAsync
* @see org.springframework.scheduling.annotation.SchedulingConfigurer
*/
public class ScheduledTaskRegistrar implements InitializingBean, DisposableBean {
private TaskScheduler taskScheduler;
private ScheduledExecutorService localExecutor;
private List<TriggerTask> triggerTasks;
private List<CronTask> cronTasks;
private List<IntervalTask> fixedRateTasks;
private List<IntervalTask> fixedDelayTasks;
private final Set<ScheduledFuture<?>> scheduledFutures = new LinkedHashSet<ScheduledFuture<?>>();
/**
* Set the {@link TaskScheduler} to register scheduled tasks with.
*/
public void setTaskScheduler(TaskScheduler taskScheduler) {
Assert.notNull(taskScheduler, "TaskScheduler must not be null");
this.taskScheduler = taskScheduler;
}
/**
* Set the {@link TaskScheduler} to register scheduled tasks with, or a
* {@link java.util.concurrent.ScheduledExecutorService} to be wrapped as a
* {@code TaskScheduler}.
*/
public void setScheduler(Object scheduler) {
Assert.notNull(scheduler, "Scheduler object must not be null");
if (scheduler instanceof TaskScheduler) {
this.taskScheduler = (TaskScheduler) scheduler;
}
else if (scheduler instanceof ScheduledExecutorService) {
this.taskScheduler = new ConcurrentTaskScheduler(((ScheduledExecutorService) scheduler));
}
else {
throw new IllegalArgumentException("Unsupported scheduler type: " + scheduler.getClass());
}
}
/**
* Return the {@link TaskScheduler} instance for this registrar (may be {@code null}).
*/
public TaskScheduler getScheduler() {
return this.taskScheduler;
}
/**
* Specify triggered tasks as a Map of Runnables (the tasks) and Trigger objects
* (typically custom implementations of the {@link Trigger} interface).
*/
public void setTriggerTasks(Map<Runnable, Trigger> triggerTasks) {
this.triggerTasks = new ArrayList<TriggerTask>();
for (Map.Entry<Runnable, Trigger> task : triggerTasks.entrySet()) {
this.triggerTasks.add(new TriggerTask(task.getKey(), task.getValue()));
}
}
/**
* Specify triggered tasks as a list of {@link TriggerTask} objects. Primarily used
* by {@code <task:*>} namespace parsing.
* @since 3.2
* @see ScheduledTasksBeanDefinitionParser
*/
public void setTriggerTasksList(List<TriggerTask> triggerTasks) {
this.triggerTasks = triggerTasks;
}
/**
* Specify triggered tasks as a Map of Runnables (the tasks) and cron expressions.
* @see CronTrigger
*/
public void setCronTasks(Map<Runnable, String> cronTasks) {
this.cronTasks = new ArrayList<CronTask>();
for (Map.Entry<Runnable, String> task : cronTasks.entrySet()) {
this.addCronTask(task.getKey(), task.getValue());
}
}
/**
* Specify triggered tasks as a list of {@link CronTask} objects. Primarily used by
* {@code <task:*>} namespace parsing.
* @since 3.2
* @see ScheduledTasksBeanDefinitionParser
*/
public void setCronTasksList(List<CronTask> cronTasks) {
this.cronTasks = cronTasks;
}
/**
* Specify triggered tasks as a Map of Runnables (the tasks) and fixed-rate values.
* @see TaskScheduler#scheduleAtFixedRate(Runnable, long)
*/
public void setFixedRateTasks(Map<Runnable, Long> fixedRateTasks) {
this.fixedRateTasks = new ArrayList<IntervalTask>();
for (Map.Entry<Runnable, Long> task : fixedRateTasks.entrySet()) {
this.addFixedRateTask(task.getKey(), task.getValue());
}
}
/**
* Specify fixed-rate tasks as a list of {@link IntervalTask} objects. Primarily used
* by {@code <task:*>} namespace parsing.
* @since 3.2
* @see ScheduledTasksBeanDefinitionParser
*/
public void setFixedRateTasksList(List<IntervalTask> fixedRateTasks) {
this.fixedRateTasks = fixedRateTasks;
}
/**
* Specify triggered tasks as a Map of Runnables (the tasks) and fixed-delay values.
* @see TaskScheduler#scheduleWithFixedDelay(Runnable, long)
*/
public void setFixedDelayTasks(Map<Runnable, Long> fixedDelayTasks) {
this.fixedDelayTasks = new ArrayList<IntervalTask>();
for (Map.Entry<Runnable, Long> task : fixedDelayTasks.entrySet()) {
this.addFixedDelayTask(task.getKey(), task.getValue());
}
}
/**
* Specify fixed-delay tasks as a list of {@link IntervalTask} objects. Primarily used
* by {@code <task:*>} namespace parsing.
* @since 3.2
* @see ScheduledTasksBeanDefinitionParser
*/
public void setFixedDelayTasksList(List<IntervalTask> fixedDelayTasks) {
this.fixedDelayTasks = fixedDelayTasks;
}
/**
* Add a Runnable task to be triggered per the given {@link Trigger}.
* @see TaskScheduler#scheduleAtFixedRate(Runnable, long)
*/
public void addTriggerTask(Runnable task, Trigger trigger) {
this.addTriggerTask(new TriggerTask(task, trigger));
}
/**
* Add a {@code TriggerTask}.
* @since 3.2
* @see TaskScheduler#scheduleAtFixedRate(Runnable, long)
*/
public void addTriggerTask(TriggerTask task) {
if (this.triggerTasks == null) {
this.triggerTasks = new ArrayList<TriggerTask>();
}
this.triggerTasks.add(task);
}
/**
* Add a Runnable task to be triggered per the given cron expression
*/
public void addCronTask(Runnable task, String expression) {
this.addCronTask(new CronTask(task, expression));
}
/**
* Add a {@link CronTask}.
* @since 3.2
*/
public void addCronTask(CronTask task) {
if (this.cronTasks == null) {
this.cronTasks = new ArrayList<CronTask>();
}
this.cronTasks.add(task);
}
/**
* Add a Runnable task to be triggered at the given fixed-rate period.
* @see TaskScheduler#scheduleAtFixedRate(Runnable, long)
*/
public void addFixedRateTask(Runnable task, long period) {
this.addFixedRateTask(new IntervalTask(task, period, 0));
}
/**
* Add a fixed-rate {@link IntervalTask}.
* @since 3.2
* @see TaskScheduler#scheduleAtFixedRate(Runnable, long)
*/
public void addFixedRateTask(IntervalTask task) {
if (this.fixedRateTasks == null) {
this.fixedRateTasks = new ArrayList<IntervalTask>();
}
this.fixedRateTasks.add(task);
}
/**
* Add a Runnable task to be triggered with the given fixed delay.
* @see TaskScheduler#scheduleWithFixedDelay(Runnable, long)
*/
public void addFixedDelayTask(Runnable task, long delay) {
this.addFixedDelayTask(new IntervalTask(task, delay, 0));
}
/**
* Add a fixed-delay {@link IntervalTask}.
* @since 3.2
* @see TaskScheduler#scheduleWithFixedDelay(Runnable, long)
*/
public void addFixedDelayTask(IntervalTask task) {
if (this.fixedDelayTasks == null) {
this.fixedDelayTasks = new ArrayList<IntervalTask>();
}
this.fixedDelayTasks.add(task);
}
/**
* Return whether this {@code ScheduledTaskRegistrar} has any tasks registered.
* @since 3.2
*/
public boolean hasTasks() {
return (this.fixedRateTasks != null && !this.fixedRateTasks.isEmpty()) ||
(this.fixedDelayTasks != null && !this.fixedDelayTasks.isEmpty()) ||
(this.cronTasks != null && !this.cronTasks.isEmpty()) ||
(this.triggerTasks != null && !this.triggerTasks.isEmpty());
}
/**
* Calls {@link #scheduleTasks()} at bean construction time.
*/
@Override
public void afterPropertiesSet() {
scheduleTasks();
}
/**
* Schedule all registered tasks against the underlying {@linkplain
* #setTaskScheduler(TaskScheduler) task scheduler}.
*/
protected void scheduleTasks() {
long now = System.currentTimeMillis();
if (this.taskScheduler == null) {
this.localExecutor = Executors.newSingleThreadScheduledExecutor();
this.taskScheduler = new ConcurrentTaskScheduler(this.localExecutor);
}
if (this.triggerTasks != null) {
for (TriggerTask task : triggerTasks) {
this.scheduledFutures.add(this.taskScheduler.schedule(
task.getRunnable(), task.getTrigger()));
}
}
if (this.cronTasks != null) {
for (CronTask task : cronTasks) {
this.scheduledFutures.add(this.taskScheduler.schedule(
task.getRunnable(), task.getTrigger()));
}
}
if (this.fixedRateTasks != null) {
for (IntervalTask task : fixedRateTasks) {
if (task.getInitialDelay() > 0) {
Date startTime = new Date(now + task.getInitialDelay());
this.scheduledFutures.add(this.taskScheduler.scheduleAtFixedRate(
task.getRunnable(), startTime, task.getInterval()));
}
else {
this.scheduledFutures.add(this.taskScheduler.scheduleAtFixedRate(
task.getRunnable(), task.getInterval()));
}
}
}
if (this.fixedDelayTasks != null) {
for (IntervalTask task : fixedDelayTasks) {
if (task.getInitialDelay() > 0) {
Date startTime = new Date(now + task.getInitialDelay());
this.scheduledFutures.add(this.taskScheduler.scheduleWithFixedDelay(
task.getRunnable(), startTime, task.getInterval()));
}
else {
this.scheduledFutures.add(this.taskScheduler.scheduleWithFixedDelay(
task.getRunnable(), task.getInterval()));
}
}
}
}
@Override
public void destroy() {
for (ScheduledFuture<?> future : this.scheduledFutures) {
future.cancel(true);
}
if (this.localExecutor != null) {
this.localExecutor.shutdownNow();
}
}
}
| |
/*
* Copyright (c) 2008-2015 Citrix Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.citrix.netscaler.nitro.resource.config.vpn;
import com.citrix.netscaler.nitro.resource.base.*;
import com.citrix.netscaler.nitro.service.nitro_service;
import com.citrix.netscaler.nitro.service.options;
import com.citrix.netscaler.nitro.util.*;
import com.citrix.netscaler.nitro.exception.nitro_exception;
class vpnvserver_authenticationldappolicy_binding_response extends base_response
{
public vpnvserver_authenticationldappolicy_binding[] vpnvserver_authenticationldappolicy_binding;
}
/**
* Binding class showing the authenticationldappolicy that can be bound to vpnvserver.
*/
public class vpnvserver_authenticationldappolicy_binding extends base_resource
{
private String policy;
private Long priority;
private Long acttype;
private Boolean secondary;
private Boolean groupextraction;
private String name;
private String gotopriorityexpression;
private String bindpoint;
private Long __count;
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public void set_priority(long priority) throws Exception {
this.priority = new Long(priority);
}
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public void set_priority(Long priority) throws Exception{
this.priority = priority;
}
/**
* <pre>
* The priority, if any, of the vpn vserver policy.
* </pre>
*/
public Long get_priority() throws Exception {
return this.priority;
}
/**
* <pre>
* Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
* </pre>
*/
public void set_gotopriorityexpression(String gotopriorityexpression) throws Exception{
this.gotopriorityexpression = gotopriorityexpression;
}
/**
* <pre>
* Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
* </pre>
*/
public String get_gotopriorityexpression() throws Exception {
return this.gotopriorityexpression;
}
/**
* <pre>
* The name of the policy, if any, bound to the vpn vserver.
* </pre>
*/
public void set_policy(String policy) throws Exception{
this.policy = policy;
}
/**
* <pre>
* The name of the policy, if any, bound to the vpn vserver.
* </pre>
*/
public String get_policy() throws Exception {
return this.policy;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(boolean groupextraction) throws Exception {
this.groupextraction = new Boolean(groupextraction);
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public void set_groupextraction(Boolean groupextraction) throws Exception{
this.groupextraction = groupextraction;
}
/**
* <pre>
* Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
* </pre>
*/
public Boolean get_groupextraction() throws Exception {
return this.groupextraction;
}
/**
* <pre>
* Name of the virtual server.<br> Minimum length = 1
* </pre>
*/
public void set_name(String name) throws Exception{
this.name = name;
}
/**
* <pre>
* Name of the virtual server.<br> Minimum length = 1
* </pre>
*/
public String get_name() throws Exception {
return this.name;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public void set_secondary(boolean secondary) throws Exception {
this.secondary = new Boolean(secondary);
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public void set_secondary(Boolean secondary) throws Exception{
this.secondary = secondary;
}
/**
* <pre>
* Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
* </pre>
*/
public Boolean get_secondary() throws Exception {
return this.secondary;
}
/**
* <pre>
* Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.<br> Possible values = REQUEST, RESPONSE, ICA_REQUEST, OTHERTCP_REQUEST
* </pre>
*/
public void set_bindpoint(String bindpoint) throws Exception{
this.bindpoint = bindpoint;
}
/**
* <pre>
* Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.<br> Possible values = REQUEST, RESPONSE, ICA_REQUEST, OTHERTCP_REQUEST
* </pre>
*/
public String get_bindpoint() throws Exception {
return this.bindpoint;
}
/**
* <pre>
* .
* </pre>
*/
public Long get_acttype() throws Exception {
return this.acttype;
}
/**
* <pre>
* converts nitro response into object and returns the object array in case of get request.
* </pre>
*/
protected base_resource[] get_nitro_response(nitro_service service, String response) throws Exception{
vpnvserver_authenticationldappolicy_binding_response result = (vpnvserver_authenticationldappolicy_binding_response) service.get_payload_formatter().string_to_resource(vpnvserver_authenticationldappolicy_binding_response.class, response);
if(result.errorcode != 0) {
if (result.errorcode == 444) {
service.clear_session();
}
if(result.severity != null)
{
if (result.severity.equals("ERROR"))
throw new nitro_exception(result.message,result.errorcode);
}
else
{
throw new nitro_exception(result.message,result.errorcode);
}
}
return result.vpnvserver_authenticationldappolicy_binding;
}
/**
* <pre>
* Returns the value of object identifier argument
* </pre>
*/
protected String get_object_name() {
return this.name;
}
public static base_response add(nitro_service client, vpnvserver_authenticationldappolicy_binding resource) throws Exception {
vpnvserver_authenticationldappolicy_binding updateresource = new vpnvserver_authenticationldappolicy_binding();
updateresource.name = resource.name;
updateresource.policy = resource.policy;
updateresource.priority = resource.priority;
updateresource.secondary = resource.secondary;
updateresource.groupextraction = resource.groupextraction;
updateresource.gotopriorityexpression = resource.gotopriorityexpression;
updateresource.bindpoint = resource.bindpoint;
return updateresource.update_resource(client);
}
public static base_responses add(nitro_service client, vpnvserver_authenticationldappolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnvserver_authenticationldappolicy_binding updateresources[] = new vpnvserver_authenticationldappolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new vpnvserver_authenticationldappolicy_binding();
updateresources[i].name = resources[i].name;
updateresources[i].policy = resources[i].policy;
updateresources[i].priority = resources[i].priority;
updateresources[i].secondary = resources[i].secondary;
updateresources[i].groupextraction = resources[i].groupextraction;
updateresources[i].gotopriorityexpression = resources[i].gotopriorityexpression;
updateresources[i].bindpoint = resources[i].bindpoint;
}
result = update_bulk_request(client, updateresources);
}
return result;
}
public static base_response delete(nitro_service client, vpnvserver_authenticationldappolicy_binding resource) throws Exception {
vpnvserver_authenticationldappolicy_binding deleteresource = new vpnvserver_authenticationldappolicy_binding();
deleteresource.name = resource.name;
deleteresource.policy = resource.policy;
deleteresource.secondary = resource.secondary;
deleteresource.groupextraction = resource.groupextraction;
deleteresource.bindpoint = resource.bindpoint;
return deleteresource.delete_resource(client);
}
public static base_responses delete(nitro_service client, vpnvserver_authenticationldappolicy_binding resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnvserver_authenticationldappolicy_binding deleteresources[] = new vpnvserver_authenticationldappolicy_binding[resources.length];
for (int i=0;i<resources.length;i++){
deleteresources[i] = new vpnvserver_authenticationldappolicy_binding();
deleteresources[i].name = resources[i].name;
deleteresources[i].policy = resources[i].policy;
deleteresources[i].secondary = resources[i].secondary;
deleteresources[i].groupextraction = resources[i].groupextraction;
deleteresources[i].bindpoint = resources[i].bindpoint;
}
result = delete_bulk_request(client, deleteresources);
}
return result;
}
/**
* Use this API to fetch vpnvserver_authenticationldappolicy_binding resources of given name .
*/
public static vpnvserver_authenticationldappolicy_binding[] get(nitro_service service, String name) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
vpnvserver_authenticationldappolicy_binding response[] = (vpnvserver_authenticationldappolicy_binding[]) obj.get_resources(service);
return response;
}
/**
* Use this API to fetch filtered set of vpnvserver_authenticationldappolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static vpnvserver_authenticationldappolicy_binding[] get_filtered(nitro_service service, String name, String filter) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
options option = new options();
option.set_filter(filter);
vpnvserver_authenticationldappolicy_binding[] response = (vpnvserver_authenticationldappolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to fetch filtered set of vpnvserver_authenticationldappolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static vpnvserver_authenticationldappolicy_binding[] get_filtered(nitro_service service, String name, filtervalue[] filter) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
options option = new options();
option.set_filter(filter);
vpnvserver_authenticationldappolicy_binding[] response = (vpnvserver_authenticationldappolicy_binding[]) obj.getfiltered(service, option);
return response;
}
/**
* Use this API to count vpnvserver_authenticationldappolicy_binding resources configued on NetScaler.
*/
public static long count(nitro_service service, String name) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
vpnvserver_authenticationldappolicy_binding response[] = (vpnvserver_authenticationldappolicy_binding[]) obj.get_resources(service,option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnvserver_authenticationldappolicy_binding resources.
* filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
*/
public static long count_filtered(nitro_service service, String name, String filter) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnvserver_authenticationldappolicy_binding[] response = (vpnvserver_authenticationldappolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
/**
* Use this API to count the filtered set of vpnvserver_authenticationldappolicy_binding resources.
* set the filter parameter values in filtervalue object.
*/
public static long count_filtered(nitro_service service, String name, filtervalue[] filter) throws Exception{
vpnvserver_authenticationldappolicy_binding obj = new vpnvserver_authenticationldappolicy_binding();
obj.set_name(name);
options option = new options();
option.set_count(true);
option.set_filter(filter);
vpnvserver_authenticationldappolicy_binding[] response = (vpnvserver_authenticationldappolicy_binding[]) obj.getfiltered(service, option);
if (response != null) {
return response[0].__count;
}
return 0;
}
public static class bindpointEnum {
public static final String REQUEST = "REQUEST";
public static final String RESPONSE = "RESPONSE";
public static final String ICA_REQUEST = "ICA_REQUEST";
public static final String OTHERTCP_REQUEST = "OTHERTCP_REQUEST";
}
}
| |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.minidump_uploader;
import android.support.test.filters.SmallTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.Feature;
import org.chromium.components.minidump_uploader.util.HttpURLConnectionFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Unittests for {@link MinidumpUploadCallable}.
*/
@RunWith(BaseJUnit4ClassRunner.class)
public class MinidumpUploaderTest {
/* package */ static final String BOUNDARY = "TESTBOUNDARY";
/* package */ static final String UPLOAD_CRASH_ID = "IMACRASHID";
@Rule
public CrashTestRule mTestRule = new CrashTestRule();
private File mUploadTestFile;
/**
* A HttpURLConnection that performs some basic checks to ensure we are uploading
* minidumps correctly.
*/
/* package */ static class TestHttpURLConnection extends HttpURLConnection {
static final String DEFAULT_EXPECTED_CONTENT_TYPE =
String.format(MinidumpUploader.CONTENT_TYPE_TMPL, BOUNDARY);
private final String mExpectedContentType;
/**
* The value of the "Content-Type" property if the property has been set.
*/
private String mContentTypePropertyValue = "";
public TestHttpURLConnection(URL url) {
this(url, DEFAULT_EXPECTED_CONTENT_TYPE);
}
public TestHttpURLConnection(URL url, String contentType) {
super(url);
mExpectedContentType = contentType;
Assert.assertEquals(MinidumpUploader.CRASH_URL_STRING, url.toString());
}
@Override
public void disconnect() {
// Check that the "Content-Type" property has been set and the property's value.
Assert.assertEquals(mExpectedContentType, mContentTypePropertyValue);
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(ApiCompatibilityUtils.getBytesUtf8(UPLOAD_CRASH_ID));
}
@Override
public OutputStream getOutputStream() {
return new ByteArrayOutputStream();
}
@Override
public int getResponseCode() {
return 200;
}
@Override
public String getResponseMessage() {
return null;
}
@Override
public boolean usingProxy() {
return false;
}
@Override
public void connect() {}
@Override
public void setRequestProperty(String key, String value) {
if (key.equals("Content-Type")) {
mContentTypePropertyValue = value;
}
}
}
/**
* A HttpURLConnectionFactory that performs some basic checks to ensure we are uploading
* minidumps correctly.
*/
/* package */ static class TestHttpURLConnectionFactory implements HttpURLConnectionFactory {
String mContentType;
public TestHttpURLConnectionFactory() {
mContentType = TestHttpURLConnection.DEFAULT_EXPECTED_CONTENT_TYPE;
}
@Override
public HttpURLConnection createHttpURLConnection(String url) {
try {
return new TestHttpURLConnection(new URL(url), mContentType);
} catch (IOException e) {
return null;
}
}
}
/* package */ static class ErrorCodeHttpURLConnectionFactory
implements HttpURLConnectionFactory {
private final int mErrorCode;
ErrorCodeHttpURLConnectionFactory(int errorCode) {
mErrorCode = errorCode;
}
@Override
public HttpURLConnection createHttpURLConnection(String url) {
try {
return new TestHttpURLConnection(new URL(url)) {
@Override
public int getResponseCode() {
return mErrorCode;
}
};
} catch (IOException e) {
return null;
}
}
}
/* package */ static class FailHttpURLConnectionFactory implements HttpURLConnectionFactory {
@Override
public HttpURLConnection createHttpURLConnection(String url) {
Assert.fail();
return null;
}
}
@Before
public void setUp() throws IOException {
mUploadTestFile = new File(mTestRule.getCrashDir(), "crashFile");
CrashTestRule.setUpMinidumpFile(mUploadTestFile, MinidumpUploaderTest.BOUNDARY);
}
@After
public void tearDown() throws IOException {
mUploadTestFile.delete();
}
// This is a regression test for http://crbug.com/712420
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallWithInvalidMinidumpBoundary() throws Exception {
// Include an invalid character, '[', in the test string.
final String boundary = "--InvalidBoundaryWithSpecialCharacter--[";
CrashTestRule.setUpMinidumpFile(mUploadTestFile, boundary);
HttpURLConnectionFactory httpURLConnectionFactory = new TestHttpURLConnectionFactory() {
{ mContentType = ""; }
};
MinidumpUploader minidumpUploader = new MinidumpUploader(httpURLConnectionFactory);
MinidumpUploader.Result result = minidumpUploader.upload(mUploadTestFile);
Assert.assertTrue(result.isFailure());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testCallWithValidMinidumpBoundary() throws Exception {
// Include all valid characters in the test string.
final String boundary = "--0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
final String expectedContentType =
String.format(MinidumpUploader.CONTENT_TYPE_TMPL, boundary);
CrashTestRule.setUpMinidumpFile(mUploadTestFile, boundary);
HttpURLConnectionFactory httpURLConnectionFactory = new TestHttpURLConnectionFactory() {
{ mContentType = expectedContentType; }
};
MinidumpUploader minidumpUploader = new MinidumpUploader(httpURLConnectionFactory);
MinidumpUploader.Result result = minidumpUploader.upload(mUploadTestFile);
Assert.assertTrue(result.isSuccess());
}
@Test
@SmallTest
@Feature({"Android-AppBase"})
public void testReceivingErrorCodes() {
final int[] errorCodes = {400, 401, 403, 404, 500};
for (int n = 0; n < errorCodes.length; n++) {
HttpURLConnectionFactory httpURLConnectionFactory =
new ErrorCodeHttpURLConnectionFactory(errorCodes[n]);
MinidumpUploader minidumpUploader = new MinidumpUploader(httpURLConnectionFactory);
MinidumpUploader.Result result = minidumpUploader.upload(mUploadTestFile);
Assert.assertTrue(result.isUploadError());
Assert.assertEquals(result.errorCode(), errorCodes[n]);
}
}
}
| |
/*PLEASE DO NOT EDIT THIS CODE*/
/*This code was generated using the UMPLE 1.21.0.4666 modeling language!*/
import java.util.*;
// line 44 "ElevatorSystemB.ump"
// line 87 "ElevatorSystemB.ump"
public class UpCallButton extends Button
{
//------------------------
// MEMBER VARIABLES
//------------------------
//UpCallButton Associations
private List<Elevator> elevators;
//------------------------
// CONSTRUCTOR
//------------------------
public UpCallButton(boolean aLightOn, Elevator... allElevators)
{
super(aLightOn);
elevators = new ArrayList<Elevator>();
boolean didAddElevators = setElevators(allElevators);
if (!didAddElevators)
{
throw new RuntimeException("Unable to create UpCallButton, must have at least 1 elevators");
}
}
//------------------------
// INTERFACE
//------------------------
public Elevator getElevator(int index)
{
Elevator aElevator = elevators.get(index);
return aElevator;
}
public List<Elevator> getElevators()
{
List<Elevator> newElevators = Collections.unmodifiableList(elevators);
return newElevators;
}
public int numberOfElevators()
{
int number = elevators.size();
return number;
}
public boolean hasElevators()
{
boolean has = elevators.size() > 0;
return has;
}
public int indexOfElevator(Elevator aElevator)
{
int index = elevators.indexOf(aElevator);
return index;
}
public boolean isNumberOfElevatorsValid()
{
boolean isValid = numberOfElevators() >= minimumNumberOfElevators();
return isValid;
}
public static int minimumNumberOfElevators()
{
return 1;
}
public boolean addElevator(Elevator aElevator)
{
boolean wasAdded = false;
if (elevators.contains(aElevator)) { return false; }
elevators.add(aElevator);
if (aElevator.indexOfUpCallButton(this) != -1)
{
wasAdded = true;
}
else
{
wasAdded = aElevator.addUpCallButton(this);
if (!wasAdded)
{
elevators.remove(aElevator);
}
}
return wasAdded;
}
public boolean removeElevator(Elevator aElevator)
{
boolean wasRemoved = false;
if (!elevators.contains(aElevator))
{
return wasRemoved;
}
if (numberOfElevators() <= minimumNumberOfElevators())
{
return wasRemoved;
}
int oldIndex = elevators.indexOf(aElevator);
elevators.remove(oldIndex);
if (aElevator.indexOfUpCallButton(this) == -1)
{
wasRemoved = true;
}
else
{
wasRemoved = aElevator.removeUpCallButton(this);
if (!wasRemoved)
{
elevators.add(oldIndex,aElevator);
}
}
return wasRemoved;
}
public boolean setElevators(Elevator... newElevators)
{
boolean wasSet = false;
ArrayList<Elevator> verifiedElevators = new ArrayList<Elevator>();
for (Elevator aElevator : newElevators)
{
if (verifiedElevators.contains(aElevator))
{
continue;
}
verifiedElevators.add(aElevator);
}
if (verifiedElevators.size() != newElevators.length || verifiedElevators.size() < minimumNumberOfElevators())
{
return wasSet;
}
ArrayList<Elevator> oldElevators = new ArrayList<Elevator>(elevators);
elevators.clear();
for (Elevator aNewElevator : verifiedElevators)
{
elevators.add(aNewElevator);
if (oldElevators.contains(aNewElevator))
{
oldElevators.remove(aNewElevator);
}
else
{
aNewElevator.addUpCallButton(this);
}
}
for (Elevator anOldElevator : oldElevators)
{
anOldElevator.removeUpCallButton(this);
}
wasSet = true;
return wasSet;
}
public boolean addElevatorAt(Elevator aElevator, int index)
{
boolean wasAdded = false;
if(addElevator(aElevator))
{
if(index < 0 ) { index = 0; }
if(index > numberOfElevators()) { index = numberOfElevators() - 1; }
elevators.remove(aElevator);
elevators.add(index, aElevator);
wasAdded = true;
}
return wasAdded;
}
public boolean addOrMoveElevatorAt(Elevator aElevator, int index)
{
boolean wasAdded = false;
if(elevators.contains(aElevator))
{
if(index < 0 ) { index = 0; }
if(index > numberOfElevators()) { index = numberOfElevators() - 1; }
elevators.remove(aElevator);
elevators.add(index, aElevator);
wasAdded = true;
}
else
{
wasAdded = addElevatorAt(aElevator, index);
}
return wasAdded;
}
public void delete()
{
ArrayList<Elevator> copyOfElevators = new ArrayList<Elevator>(elevators);
elevators.clear();
for(Elevator aElevator : copyOfElevators)
{
aElevator.removeUpCallButton(this);
}
super.delete();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/admin/v2/bigtable_instance_admin.proto
package com.google.bigtable.admin.v2;
/**
*
*
* <pre>
* Request message for BigtableInstanceAdmin.CreateCluster.
* </pre>
*
* Protobuf type {@code google.bigtable.admin.v2.CreateClusterRequest}
*/
public final class CreateClusterRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.bigtable.admin.v2.CreateClusterRequest)
CreateClusterRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateClusterRequest.newBuilder() to construct.
private CreateClusterRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateClusterRequest() {
parent_ = "";
clusterId_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateClusterRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
clusterId_ = s;
break;
}
case 26:
{
com.google.bigtable.admin.v2.Cluster.Builder subBuilder = null;
if (cluster_ != null) {
subBuilder = cluster_.toBuilder();
}
cluster_ =
input.readMessage(
com.google.bigtable.admin.v2.Cluster.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(cluster_);
cluster_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.bigtable.admin.v2.BigtableInstanceAdminProto
.internal_static_google_bigtable_admin_v2_CreateClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.admin.v2.BigtableInstanceAdminProto
.internal_static_google_bigtable_admin_v2_CreateClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.admin.v2.CreateClusterRequest.class,
com.google.bigtable.admin.v2.CreateClusterRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CLUSTER_ID_FIELD_NUMBER = 2;
private volatile java.lang.Object clusterId_;
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
clusterId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public com.google.protobuf.ByteString getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CLUSTER_FIELD_NUMBER = 3;
private com.google.bigtable.admin.v2.Cluster cluster_;
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public boolean hasCluster() {
return cluster_ != null;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public com.google.bigtable.admin.v2.Cluster getCluster() {
return cluster_ == null ? com.google.bigtable.admin.v2.Cluster.getDefaultInstance() : cluster_;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public com.google.bigtable.admin.v2.ClusterOrBuilder getClusterOrBuilder() {
return getCluster();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getParentBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!getClusterIdBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, clusterId_);
}
if (cluster_ != null) {
output.writeMessage(3, getCluster());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getParentBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!getClusterIdBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, clusterId_);
}
if (cluster_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCluster());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.bigtable.admin.v2.CreateClusterRequest)) {
return super.equals(obj);
}
com.google.bigtable.admin.v2.CreateClusterRequest other =
(com.google.bigtable.admin.v2.CreateClusterRequest) obj;
boolean result = true;
result = result && getParent().equals(other.getParent());
result = result && getClusterId().equals(other.getClusterId());
result = result && (hasCluster() == other.hasCluster());
if (hasCluster()) {
result = result && getCluster().equals(other.getCluster());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
hash = (53 * hash) + getClusterId().hashCode();
if (hasCluster()) {
hash = (37 * hash) + CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getCluster().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.admin.v2.CreateClusterRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.admin.v2.CreateClusterRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for BigtableInstanceAdmin.CreateCluster.
* </pre>
*
* Protobuf type {@code google.bigtable.admin.v2.CreateClusterRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.bigtable.admin.v2.CreateClusterRequest)
com.google.bigtable.admin.v2.CreateClusterRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.bigtable.admin.v2.BigtableInstanceAdminProto
.internal_static_google_bigtable_admin_v2_CreateClusterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.admin.v2.BigtableInstanceAdminProto
.internal_static_google_bigtable_admin_v2_CreateClusterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.admin.v2.CreateClusterRequest.class,
com.google.bigtable.admin.v2.CreateClusterRequest.Builder.class);
}
// Construct using com.google.bigtable.admin.v2.CreateClusterRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
clusterId_ = "";
if (clusterBuilder_ == null) {
cluster_ = null;
} else {
cluster_ = null;
clusterBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.bigtable.admin.v2.BigtableInstanceAdminProto
.internal_static_google_bigtable_admin_v2_CreateClusterRequest_descriptor;
}
@java.lang.Override
public com.google.bigtable.admin.v2.CreateClusterRequest getDefaultInstanceForType() {
return com.google.bigtable.admin.v2.CreateClusterRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.bigtable.admin.v2.CreateClusterRequest build() {
com.google.bigtable.admin.v2.CreateClusterRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.bigtable.admin.v2.CreateClusterRequest buildPartial() {
com.google.bigtable.admin.v2.CreateClusterRequest result =
new com.google.bigtable.admin.v2.CreateClusterRequest(this);
result.parent_ = parent_;
result.clusterId_ = clusterId_;
if (clusterBuilder_ == null) {
result.cluster_ = cluster_;
} else {
result.cluster_ = clusterBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.admin.v2.CreateClusterRequest) {
return mergeFrom((com.google.bigtable.admin.v2.CreateClusterRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.admin.v2.CreateClusterRequest other) {
if (other == com.google.bigtable.admin.v2.CreateClusterRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (!other.getClusterId().isEmpty()) {
clusterId_ = other.clusterId_;
onChanged();
}
if (other.hasCluster()) {
mergeCluster(other.getCluster());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.admin.v2.CreateClusterRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.bigtable.admin.v2.CreateClusterRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique name of the instance in which to create the new cluster.
* Values are of the form
* `projects/<project>/instances/<instance>`.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private java.lang.Object clusterId_ = "";
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
clusterId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public com.google.protobuf.ByteString getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public Builder setClusterId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
clusterId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public Builder clearClusterId() {
clusterId_ = getDefaultInstance().getClusterId();
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to be used when referring to the new cluster within its instance,
* e.g., just `mycluster` rather than
* `projects/myproject/instances/myinstance/clusters/mycluster`.
* </pre>
*
* <code>string cluster_id = 2;</code>
*/
public Builder setClusterIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
clusterId_ = value;
onChanged();
return this;
}
private com.google.bigtable.admin.v2.Cluster cluster_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.bigtable.admin.v2.Cluster,
com.google.bigtable.admin.v2.Cluster.Builder,
com.google.bigtable.admin.v2.ClusterOrBuilder>
clusterBuilder_;
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public boolean hasCluster() {
return clusterBuilder_ != null || cluster_ != null;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public com.google.bigtable.admin.v2.Cluster getCluster() {
if (clusterBuilder_ == null) {
return cluster_ == null
? com.google.bigtable.admin.v2.Cluster.getDefaultInstance()
: cluster_;
} else {
return clusterBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public Builder setCluster(com.google.bigtable.admin.v2.Cluster value) {
if (clusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
cluster_ = value;
onChanged();
} else {
clusterBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public Builder setCluster(com.google.bigtable.admin.v2.Cluster.Builder builderForValue) {
if (clusterBuilder_ == null) {
cluster_ = builderForValue.build();
onChanged();
} else {
clusterBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public Builder mergeCluster(com.google.bigtable.admin.v2.Cluster value) {
if (clusterBuilder_ == null) {
if (cluster_ != null) {
cluster_ =
com.google.bigtable.admin.v2.Cluster.newBuilder(cluster_)
.mergeFrom(value)
.buildPartial();
} else {
cluster_ = value;
}
onChanged();
} else {
clusterBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public Builder clearCluster() {
if (clusterBuilder_ == null) {
cluster_ = null;
onChanged();
} else {
cluster_ = null;
clusterBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public com.google.bigtable.admin.v2.Cluster.Builder getClusterBuilder() {
onChanged();
return getClusterFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
public com.google.bigtable.admin.v2.ClusterOrBuilder getClusterOrBuilder() {
if (clusterBuilder_ != null) {
return clusterBuilder_.getMessageOrBuilder();
} else {
return cluster_ == null
? com.google.bigtable.admin.v2.Cluster.getDefaultInstance()
: cluster_;
}
}
/**
*
*
* <pre>
* The cluster to be created.
* Fields marked `OutputOnly` must be left blank.
* </pre>
*
* <code>.google.bigtable.admin.v2.Cluster cluster = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.bigtable.admin.v2.Cluster,
com.google.bigtable.admin.v2.Cluster.Builder,
com.google.bigtable.admin.v2.ClusterOrBuilder>
getClusterFieldBuilder() {
if (clusterBuilder_ == null) {
clusterBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.bigtable.admin.v2.Cluster,
com.google.bigtable.admin.v2.Cluster.Builder,
com.google.bigtable.admin.v2.ClusterOrBuilder>(
getCluster(), getParentForChildren(), isClean());
cluster_ = null;
}
return clusterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.bigtable.admin.v2.CreateClusterRequest)
}
// @@protoc_insertion_point(class_scope:google.bigtable.admin.v2.CreateClusterRequest)
private static final com.google.bigtable.admin.v2.CreateClusterRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.admin.v2.CreateClusterRequest();
}
public static com.google.bigtable.admin.v2.CreateClusterRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateClusterRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateClusterRequest>() {
@java.lang.Override
public CreateClusterRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateClusterRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateClusterRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateClusterRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.bigtable.admin.v2.CreateClusterRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.namenode.FSImage;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.junit.Test;
import org.slf4j.event.Level;
/**
* A JUnit test for checking if restarting DFS preserves the
* blocks that are part of an unclosed file.
*/
public class TestPersistBlocks {
static {
GenericTestUtils.setLogLevel(FSImage.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.TRACE);
}
private static final int BLOCK_SIZE = 4096;
private static final int NUM_BLOCKS = 5;
private static final String FILE_NAME = "/data";
private static final Path FILE_PATH = new Path(FILE_NAME);
static final byte[] DATA_BEFORE_RESTART = new byte[BLOCK_SIZE * NUM_BLOCKS];
static final byte[] DATA_AFTER_RESTART = new byte[BLOCK_SIZE * NUM_BLOCKS];
private static final String HADOOP_1_0_MULTIBLOCK_TGZ =
"hadoop-1.0-multiblock-file.tgz";
static {
Random rand = new Random();
rand.nextBytes(DATA_BEFORE_RESTART);
rand.nextBytes(DATA_AFTER_RESTART);
}
/** check if DFS remains in proper condition after a restart
**/
@Test
public void testRestartDfsWithFlush() throws Exception {
testRestartDfs(true);
}
/** check if DFS remains in proper condition after a restart
**/
@Test
public void testRestartDfsWithSync() throws Exception {
testRestartDfs(false);
}
/** check if DFS remains in proper condition after a restart
* @param useFlush - if true then flush is used instead of sync (ie hflush)
*/
void testRestartDfs(boolean useFlush) throws Exception {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
conf.setBoolean(HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, true);
MiniDFSCluster cluster = null;
long len = 0;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
if (useFlush)
stream.flush();
else
stream.hflush();
// Wait for at least a few blocks to get through
while (len <= BLOCK_SIZE) {
FileStatus status = fs.getFileStatus(FILE_PATH);
len = status.getLen();
Thread.sleep(100);
}
// explicitly do NOT close the file.
cluster.restartNameNode();
// Check that the file has no less bytes than before the restart
// This would mean that blocks were successfully persisted to the log
FileStatus status = fs.getFileStatus(FILE_PATH);
assertTrue("Length too short: " + status.getLen(),
status.getLen() >= len);
// And keep writing (ensures that leases are also persisted correctly)
stream.write(DATA_AFTER_RESTART);
stream.close();
// Verify that the data showed up, both from before and after the restart.
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_BEFORE_RESTART, verifyBuf);
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_AFTER_RESTART, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartDfsWithAbandonedBlock() throws Exception {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
long len = 0;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
stream.hflush();
// Wait for all of the blocks to get through
while (len < BLOCK_SIZE * (NUM_BLOCKS - 1)) {
FileStatus status = fs.getFileStatus(FILE_PATH);
len = status.getLen();
Thread.sleep(100);
}
// Abandon the last block
DFSClient dfsclient = DFSClientAdapter.getDFSClient((DistributedFileSystem)fs);
HdfsFileStatus fileStatus = dfsclient.getNamenode().getFileInfo(FILE_NAME);
LocatedBlocks blocks = dfsclient.getNamenode().getBlockLocations(
FILE_NAME, 0, BLOCK_SIZE * NUM_BLOCKS);
assertEquals(NUM_BLOCKS, blocks.getLocatedBlocks().size());
LocatedBlock b = blocks.getLastLocatedBlock();
dfsclient.getNamenode().abandonBlock(b.getBlock(), fileStatus.getFileId(),
FILE_NAME, dfsclient.clientName);
// explicitly do NOT close the file.
cluster.restartNameNode();
// Check that the file has no less bytes than before the restart
// This would mean that blocks were successfully persisted to the log
FileStatus status = fs.getFileStatus(FILE_PATH);
assertTrue("Length incorrect: " + status.getLen(),
status.getLen() == len - BLOCK_SIZE);
// Verify the data showed up from before restart, sans abandoned block.
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length - BLOCK_SIZE];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
byte[] expectedBuf = new byte[DATA_BEFORE_RESTART.length - BLOCK_SIZE];
System.arraycopy(DATA_BEFORE_RESTART, 0,
expectedBuf, 0, expectedBuf.length);
assertArrayEquals(expectedBuf, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartWithPartialBlockHflushed() throws IOException {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
DFSUtilClient.getNNAddress(conf).getPort();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART);
stream.write((byte)1);
stream.hflush();
// explicitly do NOT close the file before restarting the NN.
cluster.restartNameNode();
// this will fail if the final block of the file is prematurely COMPLETEd
stream.write((byte)2);
stream.hflush();
stream.close();
assertEquals(DATA_BEFORE_RESTART.length + 2,
fs.getFileStatus(FILE_PATH).getLen());
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length + 2];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
byte[] expectedBuf = new byte[DATA_BEFORE_RESTART.length + 2];
System.arraycopy(DATA_BEFORE_RESTART, 0, expectedBuf, 0,
DATA_BEFORE_RESTART.length);
System.arraycopy(new byte[]{1, 2}, 0, expectedBuf,
DATA_BEFORE_RESTART.length, 2);
assertArrayEquals(expectedBuf, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testRestartWithAppend() throws IOException {
final Configuration conf = new HdfsConfiguration();
// Turn off persistent IPC, so that the DFSClient can survive NN restart
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY,
0);
MiniDFSCluster cluster = null;
FSDataOutputStream stream;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
FileSystem fs = cluster.getFileSystem();
DFSUtilClient.getNNAddress(conf).getPort();
// Creating a file with 4096 blockSize to write multiple blocks
stream = fs.create(FILE_PATH, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART, 0, DATA_BEFORE_RESTART.length / 2);
stream.close();
stream = fs.append(FILE_PATH, BLOCK_SIZE);
stream.write(DATA_BEFORE_RESTART, DATA_BEFORE_RESTART.length / 2,
DATA_BEFORE_RESTART.length / 2);
stream.close();
assertEquals(DATA_BEFORE_RESTART.length,
fs.getFileStatus(FILE_PATH).getLen());
cluster.restartNameNode();
assertEquals(DATA_BEFORE_RESTART.length,
fs.getFileStatus(FILE_PATH).getLen());
FSDataInputStream readStream = fs.open(FILE_PATH);
try {
byte[] verifyBuf = new byte[DATA_BEFORE_RESTART.length];
IOUtils.readFully(readStream, verifyBuf, 0, verifyBuf.length);
assertArrayEquals(DATA_BEFORE_RESTART, verifyBuf);
} finally {
IOUtils.closeStream(readStream);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/**
* Earlier versions of HDFS didn't persist block allocation to the edit log.
* This makes sure that we can still load an edit log when the OP_CLOSE
* is the opcode which adds all of the blocks. This is a regression
* test for HDFS-2773.
* This test uses a tarred pseudo-distributed cluster from Hadoop 1.0
* which has a multi-block file. This is similar to the tests in
* {@link TestDFSUpgradeFromImage} but none of those images include
* a multi-block file.
*/
@Test
public void testEarlierVersionEditLog() throws Exception {
final Configuration conf = new HdfsConfiguration();
String tarFile = System.getProperty("test.cache.data", "build/test/cache")
+ "/" + HADOOP_1_0_MULTIBLOCK_TGZ;
String testDir = PathUtils.getTestDirName(getClass());
File dfsDir = new File(testDir, "image-1.0");
if (dfsDir.exists() && !FileUtil.fullyDelete(dfsDir)) {
throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
}
FileUtil.unTar(new File(tarFile), new File(testDir));
File nameDir = new File(dfsDir, "name");
GenericTestUtils.assertExists(nameDir);
File dataDir = new File(dfsDir, "data");
GenericTestUtils.assertExists(dataDir);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, nameDir.getAbsolutePath());
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, dataDir.getAbsolutePath());
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.format(false)
.manageDataDfsDirs(false)
.manageNameDfsDirs(false)
.numDataNodes(1)
.startupOption(StartupOption.UPGRADE)
.build();
try {
FileSystem fs = cluster.getFileSystem();
Path testPath = new Path("/user/todd/4blocks");
// Read it without caring about the actual data within - we just need
// to make sure that the block states and locations are OK.
DFSTestUtil.readFile(fs, testPath);
// Ensure that we can append to it - if the blocks were in some funny
// state we'd get some kind of issue here.
FSDataOutputStream stm = fs.append(testPath);
try {
stm.write(1);
} finally {
IOUtils.closeStream(stm);
}
} finally {
cluster.shutdown();
}
}
}
| |
/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.HashMap;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import static org.onlab.packet.LLDPOrganizationalTLV.OUI_LENGTH;
import static org.onlab.packet.LLDPOrganizationalTLV.SUBTYPE_LENGTH;
import static org.slf4j.LoggerFactory.getLogger;
/**
* ONOS LLDP containing organizational TLV for ONOS device discovery.
*/
public class ONOSLLDP extends LLDP {
private static final Logger log = getLogger(ONOSLLDP.class);
public static final String DEFAULT_DEVICE = "INVALID";
public static final String DEFAULT_NAME = "ONOS Discovery";
protected static final byte NAME_SUBTYPE = 1;
protected static final byte DEVICE_SUBTYPE = 2;
protected static final byte DOMAIN_SUBTYPE = 3;
protected static final byte TIMESTAMP_SUBTYPE = 4;
protected static final byte SIG_SUBTYPE = 5;
private static final short NAME_LENGTH = OUI_LENGTH + SUBTYPE_LENGTH;
private static final short DEVICE_LENGTH = OUI_LENGTH + SUBTYPE_LENGTH;
private static final short DOMAIN_LENGTH = OUI_LENGTH + SUBTYPE_LENGTH;
private static final short TIMESTAMP_LENGTH = OUI_LENGTH + SUBTYPE_LENGTH;
private static final short SIG_LENGTH = OUI_LENGTH + SUBTYPE_LENGTH;
private final HashMap<Byte, LLDPOrganizationalTLV> opttlvs = Maps.newHashMap();
// TLV constants: type, size and subtype
// Organizationally specific TLV also have packet offset and contents of TLV
// header
private static final byte CHASSIS_TLV_TYPE = 1;
private static final byte CHASSIS_TLV_SIZE = 7;
private static final byte CHASSIS_TLV_SUBTYPE = 4;
private static final byte TTL_TLV_TYPE = 3;
private static final byte PORT_DESC_TLV_TYPE = 4;
private final byte[] ttlValue = new byte[] {0, 0x78};
// Only needs to be accessed from LinkProbeFactory.
public ONOSLLDP(byte... subtype) {
super();
for (byte st : subtype) {
opttlvs.put(st, new LLDPOrganizationalTLV());
}
// guarantee the following (name and device) TLVs exist
opttlvs.putIfAbsent(NAME_SUBTYPE, new LLDPOrganizationalTLV());
opttlvs.putIfAbsent(DEVICE_SUBTYPE, new LLDPOrganizationalTLV());
setName(DEFAULT_NAME);
setDevice(DEFAULT_DEVICE);
setOptionalTLVList(Lists.newArrayList(opttlvs.values()));
setTtl(new LLDPTLV().setType(TTL_TLV_TYPE)
.setLength((short) ttlValue.length)
.setValue(ttlValue));
}
private ONOSLLDP(LLDP lldp) {
this.portId = lldp.getPortId();
this.chassisId = lldp.getChassisId();
this.ttl = lldp.getTtl();
this.optionalTLVList = lldp.getOptionalTLVList();
}
public void setName(String name) {
LLDPOrganizationalTLV nametlv = opttlvs.get(NAME_SUBTYPE);
nametlv.setLength((short) (name.length() + NAME_LENGTH));
nametlv.setInfoString(name);
nametlv.setSubType(NAME_SUBTYPE);
nametlv.setOUI(MacAddress.ONOS.oui());
}
public void setDevice(String device) {
LLDPOrganizationalTLV devicetlv = opttlvs.get(DEVICE_SUBTYPE);
devicetlv.setInfoString(device);
devicetlv.setLength((short) (device.length() + DEVICE_LENGTH));
devicetlv.setSubType(DEVICE_SUBTYPE);
devicetlv.setOUI(MacAddress.ONOS.oui());
}
public void setDomainInfo(String domainId) {
LLDPOrganizationalTLV domaintlv = opttlvs.get(DOMAIN_SUBTYPE);
if (domaintlv == null) {
// maybe warn people not to set this if remote probes aren't.
return;
}
domaintlv.setInfoString(domainId);
domaintlv.setLength((short) (domainId.length() + DOMAIN_LENGTH));
domaintlv.setSubType(DOMAIN_SUBTYPE);
domaintlv.setOUI(MacAddress.ONOS.oui());
}
public void setChassisId(final ChassisId chassisId) {
MacAddress chassisMac = MacAddress.valueOf(chassisId.value());
byte[] chassis = ArrayUtils.addAll(new byte[] {CHASSIS_TLV_SUBTYPE},
chassisMac.toBytes());
LLDPTLV chassisTLV = new LLDPTLV();
chassisTLV.setLength(CHASSIS_TLV_SIZE);
chassisTLV.setType(CHASSIS_TLV_TYPE);
chassisTLV.setValue(chassis);
this.setChassisId(chassisTLV);
}
public void setPortId(final int portNumber) {
byte[] port = ArrayUtils.addAll(new byte[] {PORT_TLV_COMPONENT_SUBTYPE},
String.valueOf(portNumber).getBytes(StandardCharsets.UTF_8));
LLDPTLV portTLV = new LLDPTLV();
portTLV.setLength((short) port.length);
portTLV.setType(PORT_TLV_TYPE);
portTLV.setValue(port);
this.setPortId(portTLV);
}
public void setPortName(final String portName) {
byte[] port = ArrayUtils.addAll(new byte[] {PORT_TLV_INTERFACE_NAME_SUBTYPE},
portName.getBytes(StandardCharsets.UTF_8));
LLDPTLV portTLV = new LLDPTLV();
portTLV.setLength((short) port.length);
portTLV.setType(PORT_TLV_TYPE);
portTLV.setValue(port);
this.setPortId(portTLV);
}
public void setTimestamp(long timestamp) {
LLDPOrganizationalTLV tmtlv = opttlvs.get(TIMESTAMP_SUBTYPE);
if (tmtlv == null) {
return;
}
tmtlv.setInfoString(ByteBuffer.allocate(8).putLong(timestamp).array());
tmtlv.setLength((short) (8 + TIMESTAMP_LENGTH));
tmtlv.setSubType(TIMESTAMP_SUBTYPE);
tmtlv.setOUI(MacAddress.ONOS.oui());
}
public void setSig(byte[] sig) {
LLDPOrganizationalTLV sigtlv = opttlvs.get(SIG_SUBTYPE);
if (sigtlv == null) {
return;
}
sigtlv.setInfoString(sig);
sigtlv.setLength((short) (sig.length + SIG_LENGTH));
sigtlv.setSubType(SIG_SUBTYPE);
sigtlv.setOUI(MacAddress.ONOS.oui());
}
public LLDPOrganizationalTLV getNameTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == LLDPOrganizationalTLV.ORGANIZATIONAL_TLV_TYPE) {
LLDPOrganizationalTLV orgTLV = (LLDPOrganizationalTLV) tlv;
if (orgTLV.getSubType() == NAME_SUBTYPE) {
return orgTLV;
}
}
}
return null;
}
public LLDPOrganizationalTLV getDeviceTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == LLDPOrganizationalTLV.ORGANIZATIONAL_TLV_TYPE) {
LLDPOrganizationalTLV orgTLV = (LLDPOrganizationalTLV) tlv;
if (orgTLV.getSubType() == DEVICE_SUBTYPE) {
return orgTLV;
}
}
}
return null;
}
public LLDPOrganizationalTLV getTimestampTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == LLDPOrganizationalTLV.ORGANIZATIONAL_TLV_TYPE) {
LLDPOrganizationalTLV orgTLV = (LLDPOrganizationalTLV) tlv;
if (orgTLV.getSubType() == TIMESTAMP_SUBTYPE) {
return orgTLV;
}
}
}
return null;
}
public LLDPOrganizationalTLV getSigTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == LLDPOrganizationalTLV.ORGANIZATIONAL_TLV_TYPE) {
LLDPOrganizationalTLV orgTLV = (LLDPOrganizationalTLV) tlv;
if (orgTLV.getSubType() == SIG_SUBTYPE) {
return orgTLV;
}
}
}
return null;
}
/**
* Gets the TLV associated with remote probing. This TLV will be null if
* remote probing is disabled.
*
* @return A TLV containing domain ID, or null.
*/
public LLDPOrganizationalTLV getDomainTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == LLDPOrganizationalTLV.ORGANIZATIONAL_TLV_TYPE) {
LLDPOrganizationalTLV orgTLV = (LLDPOrganizationalTLV) tlv;
if (orgTLV.getSubType() == DOMAIN_SUBTYPE) {
return orgTLV;
}
}
}
return null;
}
public LLDPTLV getPortDescTLV() {
for (LLDPTLV tlv : this.getOptionalTLVList()) {
if (tlv.getType() == PORT_DESC_TLV_TYPE) {
return tlv;
}
}
log.error("Cannot find the port description tlv type.");
return null;
}
public String getNameString() {
LLDPOrganizationalTLV tlv = getNameTLV();
if (tlv != null) {
return new String(tlv.getInfoString(), StandardCharsets.UTF_8);
}
return null;
}
public String getDeviceString() {
LLDPOrganizationalTLV tlv = getDeviceTLV();
if (tlv != null) {
return new String(tlv.getInfoString(), StandardCharsets.UTF_8);
}
return null;
}
public String getDomainString() {
LLDPOrganizationalTLV tlv = getDomainTLV();
if (tlv != null) {
return new String(tlv.getInfoString(), StandardCharsets.UTF_8);
}
return null;
}
public String getPortDescString() {
LLDPTLV tlv = getPortDescTLV();
if (tlv != null) {
return new String(tlv.getValue(), StandardCharsets.UTF_8);
}
return null;
}
public Integer getPort() {
ByteBuffer portBB = ByteBuffer.wrap(this.getPortId().getValue());
byte type = portBB.get();
if (type == PORT_TLV_COMPONENT_SUBTYPE) {
return Integer.parseInt(new String(portBB.array(),
portBB.position(), portBB.remaining(), StandardCharsets.UTF_8));
} else {
return -1;
}
}
public String getPortNameString() {
ByteBuffer portBB = ByteBuffer.wrap(this.getPortId().getValue());
byte type = portBB.get();
if (type == PORT_TLV_INTERFACE_NAME_SUBTYPE) {
return new String(portBB.array(), portBB.position(), portBB.remaining(), StandardCharsets.UTF_8);
} else {
return null;
}
}
public MacAddress getChassisIdByMac() {
ByteBuffer portBB = ByteBuffer.wrap(this.getChassisId().getValue());
byte type = portBB.get();
if (type == CHASSIS_TLV_SUBTYPE) {
byte[] bytes = new byte[portBB.remaining()];
System.arraycopy(portBB.array(), portBB.position(), bytes, 0, MacAddress.MAC_ADDRESS_LENGTH);
return new MacAddress(bytes);
} else {
return MacAddress.NONE;
}
}
public short getTtlBySeconds() {
ByteBuffer portBB = ByteBuffer.wrap(this.getTtl().getValue());
return portBB.getShort();
}
public long getTimestamp() {
LLDPOrganizationalTLV tlv = getTimestampTLV();
if (tlv != null) {
ByteBuffer b = ByteBuffer.allocate(8).put(tlv.getInfoString());
b.flip();
return b.getLong();
}
return 0;
}
public byte[] getSig() {
LLDPOrganizationalTLV tlv = getSigTLV();
if (tlv != null) {
return tlv.getInfoString();
}
return null;
}
/**
* Given an ethernet packet, determines if this is an LLDP from
* ONOS and returns the device the LLDP came from.
* @param eth an ethernet packet
* @return a the lldp packet or null
*/
public static ONOSLLDP parseONOSLLDP(Ethernet eth) {
if (eth.getEtherType() == Ethernet.TYPE_LLDP ||
eth.getEtherType() == Ethernet.TYPE_BSN) {
ONOSLLDP onosLldp = new ONOSLLDP((LLDP) eth.getPayload());
if (ONOSLLDP.DEFAULT_NAME.equals(onosLldp.getNameString())) {
return onosLldp;
}
}
return null;
}
/**
* Given an ethernet packet, returns the device the LLDP came from.
* @param eth an ethernet packet
* @return a the lldp packet or null
*/
public static ONOSLLDP parseLLDP(Ethernet eth) {
if (eth.getEtherType() == Ethernet.TYPE_LLDP ||
eth.getEtherType() == Ethernet.TYPE_BSN) {
return new ONOSLLDP((LLDP) eth.getPayload());
}
log.error("Packet is not the LLDP or BSN.");
return null;
}
/**
* Creates a link probe for link discovery/verification.
* @deprecated since 1.15. Insecure, do not use.
*
* @param deviceId The device ID as a String
* @param chassisId The chassis ID of the device
* @param portNum Port number of port to send probe out of
* @return ONOSLLDP probe message
*/
@Deprecated
public static ONOSLLDP onosLLDP(String deviceId, ChassisId chassisId, int portNum) {
ONOSLLDP probe = new ONOSLLDP(NAME_SUBTYPE, DEVICE_SUBTYPE);
probe.setPortId(portNum);
probe.setDevice(deviceId);
probe.setChassisId(chassisId);
return probe;
}
/**
* Creates a link probe for link discovery/verification.
*
* @param deviceId The device ID as a String
* @param chassisId The chassis ID of the device
* @param portNum Port number of port to send probe out of
* @param secret LLDP secret
* @return ONOSLLDP probe message
*/
public static ONOSLLDP onosSecureLLDP(String deviceId, ChassisId chassisId, int portNum, String secret) {
ONOSLLDP probe = null;
if (secret == null) {
probe = new ONOSLLDP(NAME_SUBTYPE, DEVICE_SUBTYPE);
} else {
probe = new ONOSLLDP(NAME_SUBTYPE, DEVICE_SUBTYPE, TIMESTAMP_SUBTYPE, SIG_SUBTYPE);
}
probe.setPortId(portNum);
probe.setDevice(deviceId);
probe.setChassisId(chassisId);
if (secret != null) {
/* Secure Mode */
long ts = System.currentTimeMillis();
probe.setTimestamp(ts);
byte[] sig = createSig(deviceId, portNum, ts, secret);
if (sig == null) {
return null;
}
probe.setSig(sig);
sig = null;
}
return probe;
}
/**
* Creates a link probe for link discovery/verification.
* @deprecated since 1.15. Insecure, do not use.
*
* @param deviceId The device ID as a String
* @param chassisId The chassis ID of the device
* @param portNum Port number of port to send probe out of
* @param portDesc Port description of port to send probe out of
* @return ONOSLLDP probe message
*/
@Deprecated
public static ONOSLLDP onosLLDP(String deviceId, ChassisId chassisId, int portNum, String portDesc) {
ONOSLLDP probe = onosLLDP(deviceId, chassisId, portNum);
addPortDesc(probe, portDesc);
return probe;
}
/**
* Creates a link probe for link discovery/verification.
*
* @param deviceId The device ID as a String
* @param chassisId The chassis ID of the device
* @param portNum Port number of port to send probe out of
* @param portDesc Port description of port to send probe out of
* @param secret LLDP secret
* @return ONOSLLDP probe message
*/
public static ONOSLLDP onosSecureLLDP(String deviceId, ChassisId chassisId, int portNum, String portDesc,
String secret) {
ONOSLLDP probe = onosSecureLLDP(deviceId, chassisId, portNum, secret);
addPortDesc(probe, portDesc);
return probe;
}
private static void addPortDesc(ONOSLLDP probe, String portDesc) {
if (portDesc != null && !portDesc.isEmpty()) {
byte[] bPortDesc = portDesc.getBytes(StandardCharsets.UTF_8);
if (bPortDesc.length > LLDPTLV.MAX_LENGTH) {
bPortDesc = Arrays.copyOf(bPortDesc, LLDPTLV.MAX_LENGTH);
}
LLDPTLV portDescTlv = new LLDPTLV()
.setType(PORT_DESC_TLV_TYPE)
.setLength((short) bPortDesc.length)
.setValue(bPortDesc);
probe.addOptionalTLV(portDescTlv);
}
}
private static byte[] createSig(String deviceId, int portNum, long timestamp, String secret) {
byte[] pnb = ByteBuffer.allocate(8).putLong(portNum).array();
byte[] tmb = ByteBuffer.allocate(8).putLong(timestamp).array();
try {
SecretKeySpec signingKey = new SecretKeySpec(secret.getBytes(StandardCharsets.UTF_8), "HmacSHA256");
Mac mac = Mac.getInstance("HmacSHA256");
mac.init(signingKey);
mac.update(deviceId.getBytes());
mac.update(pnb);
mac.update(tmb);
byte[] sig = mac.doFinal();
return sig;
} catch (NoSuchAlgorithmException e) {
return null;
} catch (InvalidKeyException e) {
return null;
}
}
private static boolean verifySig(byte[] sig, String deviceId, int portNum, long timestamp, String secret) {
byte[] nsig = createSig(deviceId, portNum, timestamp, secret);
if (nsig == null) {
return false;
}
if (!ArrayUtils.isSameLength(nsig, sig)) {
return false;
}
boolean fail = false;
for (int i = 0; i < nsig.length; i++) {
if (sig[i] != nsig[i]) {
fail = true;
}
}
if (fail) {
return false;
}
return true;
}
public static boolean verify(ONOSLLDP probe, String secret, long maxDelay) {
if (secret == null) {
return true;
}
String deviceId = probe.getDeviceString();
int portNum = probe.getPort();
long timestamp = probe.getTimestamp();
byte[] sig = probe.getSig();
if (deviceId == null || sig == null) {
return false;
}
if (timestamp + maxDelay <= System.currentTimeMillis() ||
timestamp > System.currentTimeMillis()) {
return false;
}
return verifySig(sig, deviceId, portNum, timestamp, secret);
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.javascript.rhino.Node;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.logging.Logger;
/**
* Optimizes the order of compiler passes.
* @author nicksantos@google.com (Nick Santos)
*/
class PhaseOptimizer implements CompilerPass {
// This ordering is computed offline by running with compute_phase_ordering.
@VisibleForTesting
static final List<String> OPTIMAL_ORDER = ImmutableList.of(
"removeUnreachableCode",
"removeUnusedVars",
"foldConstants",
"deadAssignmentsElimination",
"inlineVariables",
"inlineFunctions",
"removeUnusedPrototypeProperties",
"minimizeExitPoints");
static final int MAX_LOOPS = 100;
static final String OPTIMIZE_LOOP_ERROR =
"Fixed point loop exceeded the maximum number of iterations.";
private static final Logger logger =
Logger.getLogger(PhaseOptimizer.class.getName());
private List<CompilerPass> passes = Lists.newArrayList();
private final AbstractCompiler compiler;
private final PerformanceTracker tracker;
private final CodeChangeHandler.RecentChange recentChange =
new CodeChangeHandler.RecentChange();
private boolean loopMutex = false;
private Tracer currentTracer = null;
private String currentPassName = null;
private PassFactory sanityCheck = null;
// The following static properties are only used for computing optimal
// phase orderings. They should not be touched by normal compiler runs.
private static boolean randomizeLoops = false;
private static List<List<String>> loopsRun = Lists.newArrayList();
PhaseOptimizer(AbstractCompiler compiler, PerformanceTracker tracker) {
this.compiler = compiler;
this.tracker = tracker;
compiler.addChangeHandler(recentChange);
}
/**
* Randomizes loops. This should only be used when computing optimal phase
* orderings.
*/
static void randomizeLoops() {
randomizeLoops = true;
}
/**
* Get the phase ordering of loops during this run.
* Returns an empty list when the loops are not randomized.
*/
static List<List<String>> getLoopsRun() {
return loopsRun;
}
/**
* Clears the phase ordering of loops during this run.
*/
static void clearLoopsRun() {
loopsRun.clear();
}
/**
* Add the passes generated by the given factories to the compile sequence.
*
* Automatically pulls multi-run passes into fixed point loops. If there
* are 2 or more multi-run passes in a row, they will run together in
* the same fixed point loop. If A and B are in the same fixed point loop,
* the loop will continue to run both A and B until both are finished
* making changes.
*
* Other than that, the PhaseOptimizer is free to tweak the order and
* frequency of multi-run passes in a fixed-point loop.
*/
void consume(List<PassFactory> factories) {
Loop currentLoop = new LoopInternal();
boolean isCurrentLoopPopulated = false;
for (PassFactory factory : factories) {
if (factory.isOneTimePass()) {
if (isCurrentLoopPopulated) {
passes.add(currentLoop);
currentLoop = new LoopInternal();
isCurrentLoopPopulated = false;
}
addOneTimePass(factory);
} else {
currentLoop.addLoopedPass(factory);
isCurrentLoopPopulated = true;
}
}
if (isCurrentLoopPopulated) {
passes.add(currentLoop);
}
}
/**
* Add the pass generated by the given factory to the compile sequence.
* This pass will be run once.
*/
void addOneTimePass(PassFactory factory) {
passes.add(new PassFactoryDelegate(compiler, factory));
}
/**
* Add a loop to the compile sequence. This loop will continue running
* until the AST stops changing.
* @return The loop structure. Pass suppliers should be added to the loop.
*/
Loop addFixedPointLoop() {
Loop loop = new LoopInternal();
passes.add(loop);
return loop;
}
/**
* Adds a sanity checker to be run after every pass. Intended for development.
*/
void setSanityCheck(PassFactory sanityCheck) {
this.sanityCheck = sanityCheck;
}
/**
* Run all the passes in the optimizer.
*/
@Override
public void process(Node externs, Node root) {
for (CompilerPass pass : passes) {
pass.process(externs, root);
if (hasHaltingErrors()) {
return;
}
}
}
/**
* Marks the beginning of a pass.
*/
private void startPass(String passName) {
Preconditions.checkState(currentTracer == null && currentPassName == null);
currentPassName = passName;
currentTracer = newTracer(passName);
}
/**
* Marks the end of a pass.
*/
private void endPass(Node externs, Node root) {
Preconditions.checkState(currentTracer != null && currentPassName != null);
String passToCheck = currentPassName;
try {
stopTracer(currentTracer, currentPassName);
currentPassName = null;
currentTracer = null;
maybeSanityCheck(externs, root);
} catch (Exception e) {
// TODO(johnlenz): Remove this once the normalization checks report
// errors instead of exceptions.
throw new RuntimeException("Sanity check failed for " + passToCheck, e);
}
}
/**
* Runs the sanity check if it is available.
*/
void maybeSanityCheck(Node externs, Node root) {
if (sanityCheck != null) {
sanityCheck.create(compiler).process(externs, root);
}
}
private boolean hasHaltingErrors() {
return compiler.hasHaltingErrors();
}
/**
* Returns a new tracer for the given pass name.
*/
private Tracer newTracer(String passName) {
String comment = passName +
(recentChange.hasCodeChanged() ? " on recently changed AST" : "");
if (tracker != null) {
tracker.recordPassStart(passName);
}
return new Tracer("JSCompiler", comment);
}
private void stopTracer(Tracer t, String passName) {
long result = t.stop();
if (tracker != null) {
tracker.recordPassStop(passName, result);
}
}
/**
* A single compiler pass.
*/
private abstract class NamedPass implements CompilerPass {
private final String name;
NamedPass(String name) {
this.name = name;
}
@Override
public void process(Node externs, Node root) {
logger.info(name);
startPass(name);
processInternal(externs, root);
endPass(externs, root);
}
abstract void processInternal(Node externs, Node root);
}
/**
* Delegates to a PassFactory for processing.
*/
private class PassFactoryDelegate extends NamedPass {
private final AbstractCompiler myCompiler;
private final PassFactory factory;
private PassFactoryDelegate(
AbstractCompiler myCompiler, PassFactory factory) {
super(factory.getName());
this.myCompiler = myCompiler;
this.factory = factory;
}
@Override
void processInternal(Node externs, Node root) {
factory.create(myCompiler).process(externs, root);
}
}
/**
* Runs a set of compiler passes until they reach a fixed point.
*/
static abstract class Loop implements CompilerPass {
abstract void addLoopedPass(PassFactory factory);
}
/**
* Runs a set of compiler passes until they reach a fixed point.
*
* Notice that this is a non-static class, because it includes the closure
* of PhaseOptimizer.
*/
private class LoopInternal extends Loop {
private final List<NamedPass> myPasses = Lists.newArrayList();
private final Set<String> myNames = Sets.newHashSet();
@Override
void addLoopedPass(PassFactory factory) {
String name = factory.getName();
Preconditions.checkArgument(
!myNames.contains(name),
"Already a pass with name '" + name + "' in this loop");
myNames.add(factory.getName());
myPasses.add(new PassFactoryDelegate(compiler, factory));
}
/**
* Gets the pass names, in order.
*/
private List<String> getPassOrder() {
List<String> order = Lists.newArrayList();
for (NamedPass pass : myPasses) {
order.add(pass.name);
}
return order;
}
@Override
public void process(Node externs, Node root) {
Preconditions.checkState(!loopMutex, "Nested loops are forbidden");
loopMutex = true;
if (randomizeLoops) {
randomizePasses();
} else {
optimizePasses();
}
try {
// TODO(nicksantos): Use a smarter algorithm that dynamically adjusts
// the order that passes are run in.
int count = 0;
out: do {
if (count++ > MAX_LOOPS) {
compiler.throwInternalError(OPTIMIZE_LOOP_ERROR, null);
}
recentChange.reset(); // reset before this round of optimizations
for (CompilerPass pass : myPasses) {
pass.process(externs, root);
if (hasHaltingErrors()) {
break out;
}
}
} while (recentChange.hasCodeChanged() && !hasHaltingErrors());
if (randomizeLoops) {
loopsRun.add(getPassOrder());
}
} finally {
loopMutex = false;
}
}
/** Re-arrange the passes in a random order. */
private void randomizePasses() {
List<NamedPass> mixedupPasses = Lists.newArrayList();
Random random = new Random();
while (myPasses.size() > 0) {
mixedupPasses.add(
myPasses.remove(random.nextInt(myPasses.size())));
}
myPasses.addAll(mixedupPasses);
}
/** Re-arrange the passes in an optimal order. */
private void optimizePasses() {
// It's important that this ordering is deterministic, so that
// multiple compiles with the same input produce exactly the same
// results.
//
// To do this, grab any passes we recognize, and move them to the end
// in an "optimal" order.
List<NamedPass> optimalPasses = Lists.newArrayList();
for (String passName : OPTIMAL_ORDER) {
for (NamedPass pass : myPasses) {
if (pass.name.equals(passName)) {
optimalPasses.add(pass);
break;
}
}
}
myPasses.removeAll(optimalPasses);
myPasses.addAll(optimalPasses);
}
}
}
| |
/**
*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*
* @author Wei Zhang, Language Technology Institute, School of Computer Science, Carnegie-Mellon University.
* email: wei.zhang@cs.cmu.edu
*
*/
package edu.cmu.geolocator.parser.spanish;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.csvreader.CsvReader;
import com.csvreader.CsvWriter;
import edu.cmu.geolocator.common.StringUtil;
import edu.cmu.geolocator.model.LocEntityAnnotation;
import edu.cmu.geolocator.model.Sentence;
import edu.cmu.geolocator.model.Token;
import edu.cmu.geolocator.model.Tweet;
import edu.cmu.geolocator.nlp.MisspellParser;
import edu.cmu.geolocator.nlp.NLPFactory;
import edu.cmu.geolocator.nlp.ner.FeatureExtractor.FeatureGenerator;
import edu.cmu.geolocator.nlp.tokenizer.EuroLangTwokenizer;
import edu.cmu.geolocator.parser.ParserFactory;
import edu.cmu.geolocator.parser.TPParser;
import edu.cmu.geolocator.parser.english.EnglishRuleToponymParser;
import edu.cmu.geolocator.parser.utils.ParserUtils;
import edu.cmu.geolocator.resource.ResourceFactory;
import edu.cmu.geolocator.resource.gazindexing.CollaborativeIndex.CollaborativeIndex;
public class SpanishRuleToponymParser implements TPParser {
static final int GRAM = 5;
static String[] ngram;
static String[] posngram;
static boolean allstopwords;
static ArrayList<String> results;
static String p1 = "d[an]";
static String p2 = "[avn]";
static String p3 = "[an][an]";
static String p4 = "[an][an][an]";
static String[] patterns = { p1, p2, p3, p4 };
static Pattern gazpattern;
/*
* Gaz Matching. The parser only lookup the token array. This parser does not tokenize the input
* string for efficiency reasons.
*/
public SpanishRuleToponymParser() {
}
List<LocEntityAnnotation> les;
public List<LocEntityAnnotation> parse(Tweet tweet) {
// TODO Auto-generated method stub
les = new ArrayList<LocEntityAnnotation>();
if (tweet == null || tweet.getSentence() == null
|| tweet.getSentence().getSentenceString() == null
|| tweet.getSentence().getSentenceString().length() == 0)
return null;
Sentence tweetSent = tweet.getSentence();
EuroLangTwokenizer.tokenize(tweetSent);
NLPFactory.getEsPosTagger().tag(tweetSent);
Token[] tokens = tweetSent.getTokens();
System.out.println("Tokenization : \n" + tokens.toString());
for (Token t : tokens) {
if (t.getToken().startsWith("#"))
t.setToken(t.getToken().substring(1));
}
String posstr = "";
for (int i = 0; i < tweetSent.tokenLength(); i++)
posstr += tweetSent.getTokens()[i].getPOS().charAt(0);
System.out.println("POS : \n" + posstr);
// convert Tokens to Strings
String[] toks = new String[tokens.length];
for (int i = 0; i < toks.length; i++)
toks[i] = tokens[i].getToken();
Token[] countryToks, topoToks;// store the token array variable inside.
// match countries without considering the part of speech.
for (int i = 1; i < 5; i++) {
String[] igrams = StringUtil.constructgrams(toks, i, true);
if (igrams == null)
continue;
// current ngram starting position is j.
// length is i, cause it's i-gram.
for (int j = 0; j < igrams.length; j++) {
if (ParserUtils.isCountry(igrams[j])) {
String[] str = igrams[j].split(" ");
int min = i;// minimal dimension for the igram
if (str.length != i) {
System.out.println("dimension not agree when unwrapping ngram in enTopoParser.");
System.out.println("Proceed anyway. Discard the rest part in ngram.");
min = Math.min(i, str.length);// if demension not agree, choose smaller one.
}
countryToks = new Token[min];
for (int k = 0; k < min; k++) {
countryToks[k] = new Token(str[k], tweet.getId(), j);
}
les.add(new LocEntityAnnotation(j, j + min - 1, "tp", countryToks));
}
}
}
for (int p = 0; p < patterns.length; p++) {
gazpattern = Pattern.compile(patterns[p]);
Matcher gazmatcher = gazpattern.matcher(posstr);
while (gazmatcher.find()) {
// System.out.println("found");
int n = gazmatcher.end() - gazmatcher.start();
String[] subtoks = new String[n];
int _offset = gazmatcher.start();
for (int i = gazmatcher.start(); i < gazmatcher.end(); i++) {
subtoks[i - gazmatcher.start()] = tokens[i].getToken();
}
for (int i = 1; i < n + 1; i++) {
String[] igrams = StringUtil.constructgrams(subtoks, i, true);
for (int j = 0; j < igrams.length; j++) {
// match gaz entry in the chunk.
if (ResourceFactory.getClbIndex().inIndex(igrams[j].trim())) {
if (ParserUtils.isEsFilterword(igrams[j].trim())
|| igrams[j].trim().equalsIgnoreCase("el")
|| igrams[j].trim().equalsIgnoreCase("ha")) {
continue;
}
// if (ParserUtils.isEsFilterword(kgram)&&Character.isUpperCase(kgram.charAt(0))
// &&
// StringUtil.mostTokensUpperCased(tokens)
// && tokens.size()>5
// ) {
// continue;
// }
// System.out.println("word to be added is: " + kgram);
String[] str = igrams[j].split(" ");
int min = i;
if (str.length != i) {
System.out.println("dimension not agree when unwrapping ngram in enTopoParser.");
System.out.println("Proceed anyway. Discard the rest part in ngram.");
Math.min(i, str.length);// if demension not agree, choose smaller one.
}
topoToks = new Token[min];
for (int k = 0; k < min; k++) {
topoToks[k] = new Token(str[k], tweet.getId(), _offset + j);
}
les.add(new LocEntityAnnotation(_offset + j, _offset + j + i - 1, "tp", topoToks));
}
}
}
}
}
return ParserUtils.ResultReduce(les, true);
}
private static SpanishRuleToponymParser etpparser;
public static SpanishRuleToponymParser getInstance() {
if (etpparser == null)
try {
etpparser = new SpanishRuleToponymParser();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return etpparser;
}
public static void main(String argv[]) throws IOException {
Tweet t = new Tweet();
BufferedReader s = new BufferedReader(new InputStreamReader(System.in, "utf-8"));
System.out.println(">");
while (true) {
String ss = s.readLine();
if (ss.length() == 0)
continue;
t.setSentence(ss);
double stime = System.currentTimeMillis();
List<LocEntityAnnotation> matches = ParserFactory.getEsToponymParser().parse(t);
double etime = System.currentTimeMillis();
System.out.println(matches);
System.out.println(etime - stime + "\n>");
}
}
}
| |
package se.sitic.megatron.core;
import java.util.Calendar;
import java.util.Date;
import org.joda.time.DateTime;
import org.joda.time.DateTimeConstants;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import se.sitic.megatron.util.DateUtil;
import se.sitic.megatron.util.StringUtil;
/**
* Time period with a start- and end-time.
*/
public class TimePeriod {
public static final int SHORT_FORMAT = 1;
public static final int LONG_FORMAT = 2;
public static final int DATE_FORMAT = 4;
private String periodString;
private boolean weekFormat;
private Date startDate;
private Date endDate;
/**
* Constructs a time period by parsing specified string.
*/
public TimePeriod(String periodString) throws MegatronException {
this.periodString = periodString;
parsePeriod();
}
/**
* Constructs a time period from specified dates. Result will not be in
* week format.
*/
public TimePeriod(Date startDate, Date endDate) throws MegatronException {
if ((startDate == null) || (endDate == null)) {
throw new MegatronException("Start date or end date is null.");
}
this.startDate = startDate;
this.endDate = endDate;
this.periodString = toString();
this.weekFormat = false;
}
/**
* Creates a time period before specified period. The duration for the
* returned time period will be same as the specified.
*/
public static TimePeriod createPreviousPeriod(TimePeriod currentPeriod) throws MegatronException {
Date orgStartDate = currentPeriod.getStartDate();
Date orgEndDate = currentPeriod.getEndDate();
long periodDiff = orgEndDate.getTime() - orgStartDate.getTime();
DateTime startDateTime = new DateTime(orgStartDate);
DateTime endDateTime = new DateTime(orgEndDate);
startDateTime = startDateTime.minus(periodDiff);
endDateTime = endDateTime.minus(periodDiff);
Date startDate = startDateTime.toDateMidnight().toDate();
Date endDate = endDateTime.toDateMidnight().toDate();
// set end time to 23:59:59
endDate.setTime(endDate.getTime() - 1000L);
return new TimePeriod(startDate, endDate);
}
public String getPeriodString() {
return periodString;
}
public String getFormattedPeriodString(int format) {
StringBuilder result = new StringBuilder(128);
String startDateStr = DateUtil.formatDateTime(DateUtil.DATE_FORMAT, getStartDate());
String endDateStr = DateUtil.formatDateTime(DateUtil.DATE_FORMAT, getEndDate());
if (format == DATE_FORMAT) {
result.append(startDateStr).append(" - ").append(endDateStr);
} else {
if (weekFormat) {
result.append("w.").append(periodString);
}
if (!weekFormat || (format == LONG_FORMAT)) {
if (weekFormat && (format == LONG_FORMAT)) {
result.append(" (").append(startDateStr).append(" - ").append(endDateStr).append(")");
} else {
result.append(startDateStr).append(" - ").append(endDateStr);
}
}
}
return result.toString();
}
public boolean isWeekFormat() {
return weekFormat;
}
public Date getStartDate() {
return startDate;
}
public Date getEndDate() {
return endDate;
}
public String getStartWeekday(int format) {
String formatStr = (format == SHORT_FORMAT) ? "EEE" : "EEEE";
return DateUtil.formatDateTime(formatStr, startDate);
}
public String getEndWeekday(int format) {
String formatStr = (format == SHORT_FORMAT) ? "EEE" : "EEEE";
return DateUtil.formatDateTime(formatStr, endDate);
}
@Override
public String toString() {
StringBuilder result = new StringBuilder(32);
result.append(DateUtil.formatDateTime(DateUtil.DATE_TIME_FORMAT_WITH_SECONDS, startDate));
result.append("--");
result.append(DateUtil.formatDateTime(DateUtil.DATE_TIME_FORMAT_WITH_SECONDS, endDate));
return result.toString();
}
private void parsePeriod() throws MegatronException {
// The JDK cannot parse "year of week", but Joda can.
// For example, "2008-01" does not work but "2008-02" does.
// More info:
// http://bugs.sun.com/bugdatabase/view_bug.do;jsessionid=bff79f21455177ffffffffc19116dffe41396?bug_id=4267450
// http://joda-time.sourceforge.net/
if (StringUtil.isNullOrEmpty(periodString)) {
throw new MegatronException("Invalid period; null or empty: " + periodString);
}
DateTime startDateTime = null;
DateTime endDateTime = null;
// week format?
String periodInUpperCase = periodString.toUpperCase();
if (periodInUpperCase.startsWith("W")) {
this.weekFormat = true;
this.periodString = StringUtil.removePrefix(periodInUpperCase, "W");
// Short format (w32)?
if (!periodString.contains("-")) {
// expand to full format (2008-32)
String yearStr = Calendar.getInstance().get(Calendar.YEAR) + "-";
periodString = yearStr + periodString;
}
try {
// Turns out that Joda cannot parse "year of week" either, e.g.
// "2009-01" works but not "2010-01". Use parseIsoWeek instead.
// Joda 1.5.2 and 1.6 contains this bug.
// DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-ww");
// // Joda will ensure that week always start on a monday
// startDateTime = fmt.parseDateTime(periodString);
startDateTime = parseIsoWeek(periodString);
endDateTime = startDateTime.plusDays(6);
} catch (Exception e) {
// UnsupportedOperationException, IllegalArgumentException
String msg = "Cannot parse period in week-format: " + periodString;
throw new MegatronException(msg, e);
}
} else {
// period format (2008-08-20--2008-08-25)
String[] headTail = StringUtil.splitHeadTail(periodString, "--", false);
if (headTail == null || StringUtil.isNullOrEmpty(headTail[0]) || StringUtil.isNullOrEmpty(headTail[1])) {
throw new MegatronException("Invalid period format: " + periodString);
}
try {
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd");
startDateTime = fmt.parseDateTime(headTail[0]);
endDateTime = fmt.parseDateTime(headTail[1]);
} catch (Exception e) {
// UnsupportedOperationException, IllegalArgumentException
String msg = "Cannot parse period: " + periodString;
throw new MegatronException(msg, e);
}
}
// Adjust for DST. Add 23:59:59 to end date.
this.startDate = startDateTime.toDateMidnight().toDate();
this.endDate = endDateTime.plusDays(1).toDateMidnight().toDate();
this.endDate.setTime(this.endDate.getTime() - 1000L);
if (startDate.after(endDate)) {
throw new MegatronException("Invalid period; start-date is after end-date: " + periodString);
}
}
/**
* Returns date for monday in specified week.
*
* @param weekStr full week string, e.g. "2010-01" (which will return 2010-01-04).
*/
private DateTime parseIsoWeek(String weekStr) throws Exception {
DateTime result = null;
// Split year and week
String[] headTail = StringUtil.splitHeadTail(weekStr, "-", false);
if ((headTail == null) || StringUtil.isNullOrEmpty(headTail[0]) || StringUtil.isNullOrEmpty(headTail[1]) || (headTail[0].length() != 4)) {
throw new Exception("Invalid week string: " + weekStr);
}
// Get monday of week 1.
// The first week of a year is the one that includes the first Thursday of the year.
// http://www.fourmilab.ch/documents/calendar/
// http://joda-time.sourceforge.net/cal_iso.html
String day1InYear = headTail[0] + "-01-01";
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd");
result = fmt.parseDateTime(day1InYear);
if (result.getDayOfWeek() <= DateTimeConstants.THURSDAY) {
while (result.getDayOfWeek() != DateTimeConstants.MONDAY) {
result = result.minusDays(1);
}
} else {
while (result.getDayOfWeek() != DateTimeConstants.MONDAY) {
result = result.plusDays(1);
}
}
// Add week number
int week = Integer.parseInt(headTail[1]);
if ((week < 1) || (week > 53)) {
throw new Exception("Invalid week string: " + weekStr);
}
result = result.plusDays(7*(week-1));
return result;
}
}
| |
// ========================================================================
// Copyright (c) 2000-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.server.ssl;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.security.KeyStore;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.net.ssl.HandshakeCompletedEvent;
import javax.net.ssl.HandshakeCompletedListener;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLServerSocket;
import javax.net.ssl.SSLServerSocketFactory;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import org.eclipse.jetty.http.HttpSchemes;
import org.eclipse.jetty.http.security.Password;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.io.bio.SocketEndPoint;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.bio.SocketConnector;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.resource.Resource;
/* ------------------------------------------------------------ */
/**
* SSL Socket Connector.
*
* This specialization of SocketConnector is an abstract listener that can be used as the basis for a
* specific JSSE listener.
*
* The original of this class was heavily based on the work from Court Demas, which in turn is
* based on the work from Forge Research. Since JSSE, this class has evolved significantly from
* that early work.
*
* @org.apache.xbean.XBean element="sslSocketConnector" description="Creates an ssl socket connector"
*
*
*/
public class SslSocketConnector extends SocketConnector implements SslConnector
{
/** Default value for the cipher Suites. */
private String _excludeCipherSuites[] = null;
/** Default value for the included cipher Suites. */
private String _includeCipherSuites[]=null;
/** Default value for the keystore location path. */
private String _keystorePath=DEFAULT_KEYSTORE ;
private String _keystoreType = "JKS"; // type of the key store
/** Set to true if we require client certificate authentication. */
private boolean _needClientAuth = false;
private transient Password _password;
private transient Password _keyPassword;
private transient Password _trustPassword;
private String _protocol= "TLS";
private String _provider;
private String _secureRandomAlgorithm; // cert algorithm
private String _sslKeyManagerFactoryAlgorithm = DEFAULT_KEYSTORE_ALGORITHM;
private String _sslTrustManagerFactoryAlgorithm = DEFAULT_TRUSTSTORE_ALGORITHM;
private String _truststorePath;
private String _truststoreType = "JKS"; // type of the key store
/** Set to true if we would like client certificate authentication. */
private boolean _wantClientAuth = false;
private int _handshakeTimeout = 0; //0 means use maxIdleTime
private SSLContext _context;
private boolean _allowRenegotiate =false;
/* ------------------------------------------------------------ */
/**
* Constructor.
*/
public SslSocketConnector()
{
super();
}
/* ------------------------------------------------------------ */
/**
* @return True if SSL re-negotiation is allowed (default false)
*/
public boolean isAllowRenegotiate()
{
return _allowRenegotiate;
}
/* ------------------------------------------------------------ */
/**
* Set if SSL re-negotiation is allowed. CVE-2009-3555 discovered
* a vulnerability in SSL/TLS with re-negotiation. If your JVM
* does not have CVE-2009-3555 fixed, then re-negotiation should
* not be allowed.
* @param allowRenegotiate true if re-negotiation is allowed (default false)
*/
public void setAllowRenegotiate(boolean allowRenegotiate)
{
_allowRenegotiate = allowRenegotiate;
}
/* ------------------------------------------------------------ */
@Override
public void accept(int acceptorID)
throws IOException, InterruptedException
{
Socket socket = _serverSocket.accept();
configure(socket);
ConnectorEndPoint connection=new SslConnectorEndPoint(socket);
connection.dispatch();
}
/* ------------------------------------------------------------ */
@Override
protected void configure(Socket socket)
throws IOException
{
super.configure(socket);
}
/* ------------------------------------------------------------ */
protected SSLContext createSSLContext() throws Exception
{
KeyManager[] keyManagers = getKeyManagers();
TrustManager[] trustManagers = getTrustManagers();
SecureRandom secureRandom = _secureRandomAlgorithm==null?null:SecureRandom.getInstance(_secureRandomAlgorithm);
SSLContext context = _provider==null?SSLContext.getInstance(_protocol):SSLContext.getInstance(_protocol, _provider);
context.init(keyManagers, trustManagers, secureRandom);
return context;
}
/* ------------------------------------------------------------ */
protected SSLServerSocketFactory createFactory()
throws Exception
{
if (_context==null)
_context=createSSLContext();
return _context.getServerSocketFactory();
}
/* ------------------------------------------------------------ */
protected KeyManager[] getKeyManagers() throws Exception
{
KeyStore keyStore = getKeyStore(_keystorePath, _keystoreType, _password==null?null:_password.toString());
KeyManagerFactory keyManagerFactory=KeyManagerFactory.getInstance(_sslKeyManagerFactoryAlgorithm);
keyManagerFactory.init(keyStore,_keyPassword==null?(_password==null?null:_password.toString().toCharArray()):_keyPassword.toString().toCharArray());
return keyManagerFactory.getKeyManagers();
}
protected TrustManager[] getTrustManagers() throws Exception
{
if (_truststorePath==null)
{
_truststorePath=_keystorePath;
_truststoreType=_keystoreType;
//TODO is this right? it wasn't in the code before refactoring
_trustPassword = _password;
_sslTrustManagerFactoryAlgorithm = _sslKeyManagerFactoryAlgorithm;
}
KeyStore trustStore = getKeyStore(_truststorePath, _truststoreType, _trustPassword==null?null:_trustPassword.toString());
TrustManagerFactory trustManagerFactory=TrustManagerFactory.getInstance(_sslTrustManagerFactoryAlgorithm);
trustManagerFactory.init(trustStore);
return trustManagerFactory.getTrustManagers();
}
protected KeyStore getKeyStore(String keystorePath, String keystoreType, String keystorePassword) throws Exception
{
KeyStore keystore;
InputStream keystoreInputStream = null;
try
{
if (keystorePath!=null)
keystoreInputStream = Resource.newResource(keystorePath).getInputStream();
keystore=KeyStore.getInstance(keystoreType);
keystore.load(keystoreInputStream,keystorePassword==null?null:keystorePassword.toString().toCharArray());
return keystore;
}
finally
{
if (keystoreInputStream != null)
keystoreInputStream.close();
}
}
/* ------------------------------------------------------------ */
/**
* Allow the Listener a chance to customise the request. before the server does its stuff. <br>
* This allows the required attributes to be set for SSL requests. <br>
* The requirements of the Servlet specs are:
* <ul>
* <li> an attribute named "javax.servlet.request.ssl_id" of type String (since Spec 3.0).</li>
* <li> an attribute named "javax.servlet.request.cipher_suite" of type String.</li>
* <li> an attribute named "javax.servlet.request.key_size" of type Integer.</li>
* <li> an attribute named "javax.servlet.request.X509Certificate" of type
* java.security.cert.X509Certificate[]. This is an array of objects of type X509Certificate,
* the order of this array is defined as being in ascending order of trust. The first
* certificate in the chain is the one set by the client, the next is the one used to
* authenticate the first, and so on. </li>
* </ul>
*
* @param endpoint The Socket the request arrived on.
* This should be a {@link SocketEndPoint} wrapping a {@link SSLSocket}.
* @param request HttpRequest to be customised.
*/
@Override
public void customize(EndPoint endpoint, Request request)
throws IOException
{
super.customize(endpoint, request);
request.setScheme(HttpSchemes.HTTPS);
SocketEndPoint socket_end_point = (SocketEndPoint)endpoint;
SSLSocket sslSocket = (SSLSocket)socket_end_point.getTransport();
SSLSession sslSession = sslSocket.getSession();
SslCertificates.customize(sslSession,endpoint,request);
}
/* ------------------------------------------------------------ */
public String[] getExcludeCipherSuites() {
return _excludeCipherSuites;
}
/* ------------------------------------------------------------ */
public String[] getIncludeCipherSuites()
{
return _includeCipherSuites;
}
/* ------------------------------------------------------------ */
public String getKeystore()
{
return _keystorePath;
}
/* ------------------------------------------------------------ */
public String getKeystoreType()
{
return (_keystoreType);
}
/* ------------------------------------------------------------ */
public boolean getNeedClientAuth()
{
return _needClientAuth;
}
/* ------------------------------------------------------------ */
public String getProtocol()
{
return _protocol;
}
/* ------------------------------------------------------------ */
public String getProvider() {
return _provider;
}
/* ------------------------------------------------------------ */
public String getSecureRandomAlgorithm()
{
return (this._secureRandomAlgorithm);
}
/* ------------------------------------------------------------ */
public String getSslKeyManagerFactoryAlgorithm()
{
return (this._sslKeyManagerFactoryAlgorithm);
}
/* ------------------------------------------------------------ */
public String getSslTrustManagerFactoryAlgorithm()
{
return (this._sslTrustManagerFactoryAlgorithm);
}
/* ------------------------------------------------------------ */
public String getTruststore()
{
return _truststorePath;
}
/* ------------------------------------------------------------ */
public String getTruststoreType()
{
return _truststoreType;
}
/* ------------------------------------------------------------ */
public boolean getWantClientAuth()
{
return _wantClientAuth;
}
/* ------------------------------------------------------------ */
/**
* By default, we're confidential, given we speak SSL. But, if we've been told about an
* confidential port, and said port is not our port, then we're not. This allows separation of
* listeners providing INTEGRAL versus CONFIDENTIAL constraints, such as one SSL listener
* configured to require client certs providing CONFIDENTIAL, whereas another SSL listener not
* requiring client certs providing mere INTEGRAL constraints.
*/
@Override
public boolean isConfidential(Request request)
{
final int confidentialPort = getConfidentialPort();
return confidentialPort == 0 || confidentialPort == request.getServerPort();
}
/* ------------------------------------------------------------ */
/**
* By default, we're integral, given we speak SSL. But, if we've been told about an integral
* port, and said port is not our port, then we're not. This allows separation of listeners
* providing INTEGRAL versus CONFIDENTIAL constraints, such as one SSL listener configured to
* require client certs providing CONFIDENTIAL, whereas another SSL listener not requiring
* client certs providing mere INTEGRAL constraints.
*/
@Override
public boolean isIntegral(Request request)
{
final int integralPort = getIntegralPort();
return integralPort == 0 || integralPort == request.getServerPort();
}
/* ------------------------------------------------------------ */
/**
* @param host The host name that this server should listen on
* @param port the port that this server should listen on
* @param backlog See {@link ServerSocket#bind(java.net.SocketAddress, int)}
* @return A new {@link ServerSocket socket object} bound to the supplied address with all other
* settings as per the current configuration of this connector.
* @see #setWantClientAuth(boolean)
* @see #setNeedClientAuth(boolean)
* @exception IOException
*/
/* ------------------------------------------------------------ */
@Override
protected ServerSocket newServerSocket(String host, int port,int backlog) throws IOException
{
SSLServerSocketFactory factory = null;
SSLServerSocket socket = null;
try
{
factory = createFactory();
socket = (SSLServerSocket) (host==null?
factory.createServerSocket(port,backlog):
factory.createServerSocket(port,backlog,InetAddress.getByName(host)));
if (_wantClientAuth)
socket.setWantClientAuth(_wantClientAuth);
if (_needClientAuth)
socket.setNeedClientAuth(_needClientAuth);
if ((_excludeCipherSuites!=null&&_excludeCipherSuites.length>0)
|| (_includeCipherSuites!=null&&_includeCipherSuites.length>0))
{
List<String> includedCSList;
if (_includeCipherSuites!=null)
{
includedCSList = Arrays.asList(_includeCipherSuites);
} else {
includedCSList = new ArrayList<String>();
}
List<String> excludedCSList;
if (_excludeCipherSuites!=null)
{
excludedCSList = Arrays.asList(_excludeCipherSuites);
} else {
excludedCSList = new ArrayList<String>();
}
String[] enabledCipherSuites = socket.getEnabledCipherSuites();
List<String> enabledCSList = new ArrayList<String>(Arrays.asList(enabledCipherSuites));
String[] supportedCipherSuites = socket.getSupportedCipherSuites();
List<String> supportedCSList = Arrays.asList(supportedCipherSuites);
for (String cipherName : includedCSList)
{
if ((!enabledCSList.contains(cipherName))
&& supportedCSList.contains(cipherName))
{
enabledCSList.add(cipherName);
}
}
for (String cipherName : excludedCSList)
{
if (enabledCSList.contains(cipherName))
{
enabledCSList.remove(cipherName);
}
}
enabledCipherSuites = enabledCSList.toArray(new String[enabledCSList.size()]);
socket.setEnabledCipherSuites(enabledCipherSuites);
}
}
catch (IOException e)
{
throw e;
}
catch (Exception e)
{
Log.warn(e.toString());
Log.debug(e);
throw new IOException("!JsseListener: " + e);
}
return socket;
}
/* ------------------------------------------------------------ */
/**
*
*/
public void setExcludeCipherSuites(String[] cipherSuites) {
this._excludeCipherSuites = cipherSuites;
}
/* ------------------------------------------------------------ */
public void setIncludeCipherSuites(String[] cipherSuites)
{
this._includeCipherSuites=cipherSuites;
}
/* ------------------------------------------------------------ */
public void setKeyPassword(String password)
{
_keyPassword = Password.getPassword(KEYPASSWORD_PROPERTY,password,null);
}
/* ------------------------------------------------------------ */
/**
* @param keystore The resource path to the keystore, or null for built in keystores.
*/
public void setKeystore(String keystore)
{
_keystorePath = keystore;
}
/* ------------------------------------------------------------ */
public void setKeystoreType(String keystoreType)
{
_keystoreType = keystoreType;
}
/* ------------------------------------------------------------ */
/**
* Set the value of the needClientAuth property
*
* @param needClientAuth true iff we require client certificate authentication.
*/
public void setNeedClientAuth(boolean needClientAuth)
{
_needClientAuth = needClientAuth;
}
/* ------------------------------------------------------------ */
public void setPassword(String password)
{
_password = Password.getPassword(PASSWORD_PROPERTY,password,null);
}
/* ------------------------------------------------------------ */
public void setTrustPassword(String password)
{
_trustPassword = Password.getPassword(PASSWORD_PROPERTY,password,null);
}
/* ------------------------------------------------------------ */
public void setProtocol(String protocol)
{
_protocol = protocol;
}
/* ------------------------------------------------------------ */
public void setProvider(String _provider) {
this._provider = _provider;
}
/* ------------------------------------------------------------ */
public void setSecureRandomAlgorithm(String algorithm)
{
this._secureRandomAlgorithm = algorithm;
}
/* ------------------------------------------------------------ */
public void setSslKeyManagerFactoryAlgorithm(String algorithm)
{
this._sslKeyManagerFactoryAlgorithm = algorithm;
}
/* ------------------------------------------------------------ */
public void setSslTrustManagerFactoryAlgorithm(String algorithm)
{
this._sslTrustManagerFactoryAlgorithm = algorithm;
}
public void setTruststore(String truststore)
{
_truststorePath = truststore;
}
public void setTruststoreType(String truststoreType)
{
_truststoreType = truststoreType;
}
public void setSslContext(SSLContext sslContext)
{
_context = sslContext;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.server.ssl.SslConnector#setSslContext(javax.net.ssl.SSLContext)
*/
public SSLContext getSslContext()
{
try
{
if (_context == null)
_context=createSSLContext();
}
catch(Exception e)
{
throw new RuntimeException(e);
}
return _context;
}
/* ------------------------------------------------------------ */
/**
* Set the value of the _wantClientAuth property. This property is used
* internally when opening server sockets.
*
* @param wantClientAuth true if we want client certificate authentication.
* @see SSLServerSocket#setWantClientAuth
*/
public void setWantClientAuth(boolean wantClientAuth)
{
_wantClientAuth = wantClientAuth;
}
/* ------------------------------------------------------------ */
/**
* Set the time in milliseconds for so_timeout during ssl handshaking
* @param msec a non-zero value will be used to set so_timeout during
* ssl handshakes. A zero value means the maxIdleTime is used instead.
*/
public void setHandshakeTimeout (int msec)
{
_handshakeTimeout = msec;
}
/* ------------------------------------------------------------ */
public int getHandshakeTimeout ()
{
return _handshakeTimeout;
}
/* ------------------------------------------------------------ */
public class SslConnectorEndPoint extends ConnectorEndPoint
{
public SslConnectorEndPoint(Socket socket) throws IOException
{
super(socket);
}
@Override
public void shutdownOutput() throws IOException
{
}
@Override
public void run()
{
try
{
int handshakeTimeout = getHandshakeTimeout();
int oldTimeout = _socket.getSoTimeout();
if (handshakeTimeout > 0)
_socket.setSoTimeout(handshakeTimeout);
final SSLSocket ssl=(SSLSocket)_socket;
ssl.addHandshakeCompletedListener(new HandshakeCompletedListener()
{
boolean handshook=false;
public void handshakeCompleted(HandshakeCompletedEvent event)
{
if (handshook)
{
if (!_allowRenegotiate)
{
Log.warn("SSL renegotiate denied: "+ssl);
try{ssl.close();}catch(IOException e){Log.warn(e);}
}
}
else
handshook=true;
}
});
ssl.startHandshake();
if (handshakeTimeout>0)
_socket.setSoTimeout(oldTimeout);
super.run();
}
catch (SSLException e)
{
Log.debug(e);
try{close();}
catch(IOException e2){Log.ignore(e2);}
}
catch (IOException e)
{
Log.debug(e);
try{close();}
catch(IOException e2){Log.ignore(e2);}
}
}
}
/* ------------------------------------------------------------ */
/**
* Unsupported.
*
* TODO: we should remove this as it is no longer an overridden method from SslConnector (like it was in the past)
*/
public String getAlgorithm()
{
throw new UnsupportedOperationException();
}
/* ------------------------------------------------------------ */
/**
* Unsupported.
*
* TODO: we should remove this as it is no longer an overridden method from SslConnector (like it was in the past)
*/
public void setAlgorithm(String algorithm)
{
throw new UnsupportedOperationException();
}
}
| |
package com.softhaxi.insofth.utils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.math.BigInteger;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
import java.util.Arrays;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
/**
*
* @author Hutasoit
*/
public class RSAUtil {
private static final String PUBLIC_KEY_FILE = "Public.key";
private static final String PRIVATE_KEY_FILE = "Private.key";
private static StringBuilder strBuilder;
public static String generateKey() throws IOException {
try {
strBuilder = new StringBuilder();
strBuilder.append("-------GENRATE PUBLIC and PRIVATE KEY-------------")
.append("\n");
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA");
keyPairGenerator.initialize(2048); //1024 used for normal securities
KeyPair keyPair = keyPairGenerator.generateKeyPair();
PublicKey publicKey = keyPair.getPublic();
PrivateKey privateKey = keyPair.getPrivate();
strBuilder.append("Public Key - ")
.append(publicKey)
.append("\n")
.append("Private Key -")
.append(privateKey)
.append("\n")
.append("\n------- PULLING OUT PARAMETERS WHICH MAKES KEYPAIR----------\n");
//Pullingout parameters which makes up Key
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
RSAPublicKeySpec rsaPubKeySpec = keyFactory.getKeySpec(publicKey, RSAPublicKeySpec.class);
RSAPrivateKeySpec rsaPrivKeySpec = keyFactory.getKeySpec(privateKey, RSAPrivateKeySpec.class);
System.out.println("PubKey Modulus : " + rsaPubKeySpec.getModulus());
System.out.println("PubKey Exponent : " + rsaPubKeySpec.getPublicExponent());
System.out.println("PrivKey Modulus : " + rsaPrivKeySpec.getModulus());
System.out.println("PrivKey Exponent : " + rsaPrivKeySpec.getPrivateExponent());
//Share public key with other so they can encrypt data and decrypt thoses using private key(Don't share with Other)
System.out.println("\n--------SAVING PUBLIC KEY AND PRIVATE KEY TO FILES-------\n");
saveKeys(PUBLIC_KEY_FILE, rsaPubKeySpec.getModulus(), rsaPubKeySpec.getPublicExponent());
saveKeys(PRIVATE_KEY_FILE, rsaPrivKeySpec.getModulus(), rsaPrivKeySpec.getPrivateExponent());
//Encrypt Data using Public Key
byte[] encryptedData = encryptData("Anuj Patel - Classified Information !");
//Descypt Data using Private Key
decryptData(encryptedData);
return strBuilder.toString();
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
}
return null;
}
/**
* Save Files
*
* @param fileName
* @param mod
* @param exp
* @throws IOException
*/
private static void saveKeys(String fileName, BigInteger mod, BigInteger exp) throws IOException {
FileOutputStream fos = null;
ObjectOutputStream oos = null;
try {
System.out.println("Generating " + fileName + "...");
fos = new FileOutputStream(fileName);
oos = new ObjectOutputStream(new BufferedOutputStream(fos));
oos.writeObject(mod);
oos.writeObject(exp);
System.out.println(fileName + " generated successfully");
} catch (Exception e) {
} finally {
if (oos != null) {
oos.close();
if (fos != null) {
fos.close();
}
}
}
}
/**
* Encrypt Data
*
* @param data
* @throws IOException
*/
private static byte[] encryptData(String data) throws IOException {
System.out.println("\n----------------ENCRYPTION STARTED------------");
System.out.println("Data Before Encryption :" + data);
byte[] dataToEncrypt = data.getBytes();
byte[] encryptedData = null;
try {
PublicKey pubKey = readPublicKeyFromFile(PUBLIC_KEY_FILE);
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(Cipher.ENCRYPT_MODE, pubKey);
encryptedData = cipher.doFinal(dataToEncrypt);
System.out.println("Encryted Data: " + Arrays.toString(encryptedData));
} catch (IOException | NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException e) {
}
System.out.println("----------------ENCRYPTION COMPLETED------------");
return encryptedData;
}
/**
* Encrypt Data
*
* @param data
* @throws IOException
*/
private static void decryptData(byte[] data) throws IOException {
System.out.println("\n----------------DECRYPTION STARTED------------");
byte[] descryptedData = null;
try {
PrivateKey privateKey = readPrivateKeyFromFile(PRIVATE_KEY_FILE);
Cipher cipher = Cipher.getInstance("RSA");
cipher.init(Cipher.DECRYPT_MODE, privateKey);
descryptedData = cipher.doFinal(data);
System.out.println("Decrypted Data: " + new String(descryptedData));
} catch (IOException | NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | IllegalBlockSizeException | BadPaddingException e) {
}
System.out.println("----------------DECRYPTION COMPLETED------------");
}
/**
* read Public Key From File
*
* @param fileName
* @return PublicKey
* @throws IOException
*/
public static PublicKey readPublicKeyFromFile(String fileName) throws IOException {
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
fis = new FileInputStream(new File(fileName));
ois = new ObjectInputStream(fis);
BigInteger modulus = (BigInteger) ois.readObject();
BigInteger exponent = (BigInteger) ois.readObject();
//Get Public Key
RSAPublicKeySpec rsaPublicKeySpec = new RSAPublicKeySpec(modulus, exponent);
KeyFactory fact = KeyFactory.getInstance("RSA");
PublicKey publicKey = fact.generatePublic(rsaPublicKeySpec);
return publicKey;
} catch (IOException | ClassNotFoundException | NoSuchAlgorithmException | InvalidKeySpecException e) {
} finally {
if (ois != null) {
ois.close();
if (fis != null) {
fis.close();
}
}
}
return null;
}
/**
* read Public Key From File
*
* @param fileName
* @return
* @throws IOException
*/
public static PrivateKey readPrivateKeyFromFile(String fileName) throws IOException {
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
fis = new FileInputStream(new File(fileName));
ois = new ObjectInputStream(fis);
BigInteger modulus = (BigInteger) ois.readObject();
BigInteger exponent = (BigInteger) ois.readObject();
//Get Private Key
RSAPrivateKeySpec rsaPrivateKeySpec = new RSAPrivateKeySpec(modulus, exponent);
KeyFactory fact = KeyFactory.getInstance("RSA");
PrivateKey privateKey = fact.generatePrivate(rsaPrivateKeySpec);
return privateKey;
} catch (IOException | ClassNotFoundException | NoSuchAlgorithmException | InvalidKeySpecException e) {
e.printStackTrace();
} finally {
if (ois != null) {
ois.close();
if (fis != null) {
fis.close();
}
}
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import javax.cache.CacheException;
import javax.cache.CacheManager;
import javax.cache.configuration.CacheEntryListenerConfiguration;
import javax.cache.configuration.Configuration;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.integration.CompletionListener;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheEntry;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.cache.CacheMetrics;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.cache.query.Query;
import org.apache.ignite.cache.query.QueryCursor;
import org.apache.ignite.cache.query.QueryDetailMetrics;
import org.apache.ignite.cache.query.QueryMetrics;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.internal.AsyncSupportAdapter;
import org.apache.ignite.internal.GridKernalState;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.future.IgniteFutureImpl;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.mxbean.CacheMetricsMXBean;
import org.apache.ignite.transactions.TransactionException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Cache proxy wrapper with gateway lock provided operations and possibility to change cache operation context.
*/
public class GatewayProtectedCacheProxy<K, V> extends AsyncSupportAdapter<IgniteCache<K, V>>
implements IgniteCacheProxy<K, V> {
/** */
private static final long serialVersionUID = 0L;
/** Cache proxy delegate. */
private IgniteCacheProxy<K, V> delegate;
/** If {@code false} does not acquire read lock on gateway enter. */
@GridToStringExclude private boolean lock;
/** Cache operation context. */
private CacheOperationContext opCtx;
/**
* Empty constructor required for {@link Externalizable}.
*/
public GatewayProtectedCacheProxy() {
}
/**
*
* @param delegate Cache proxy delegate.
* @param opCtx Cache operation context.
* @param lock True if cache proxy should be protected with gateway lock, false in other case.
*/
public GatewayProtectedCacheProxy(
@NotNull IgniteCacheProxy<K, V> delegate,
@NotNull CacheOperationContext opCtx,
boolean lock
) {
this.delegate = delegate;
this.opCtx = opCtx;
this.lock = lock;
}
/**
* Sets CacheManager to delegate.
*
* @param cacheMgr Cache Manager.
*/
public void setCacheManager(org.apache.ignite.cache.CacheManager cacheMgr) {
if (delegate instanceof IgniteCacheProxyImpl)
((IgniteCacheProxyImpl) delegate).setCacheManager(cacheMgr);
}
/** {@inheritDoc} */
@Override public GridCacheContext<K, V> context() {
return delegate.context();
}
/** {@inheritDoc} */
@Override public <C extends Configuration<K, V>> C getConfiguration(Class<C> clazz) {
return delegate.getConfiguration(clazz);
}
/** {@inheritDoc} */
@Override public String getName() {
return delegate.getName();
}
/** {@inheritDoc} */
@Override public CacheManager getCacheManager() {
return delegate.getCacheManager();
}
/** {@inheritDoc} */
@Override public GridCacheProxyImpl<K, V> internalProxy() {
return delegate.internalProxy();
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> cacheNoGate() {
return new GatewayProtectedCacheProxy<>(delegate, opCtx, false);
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> withExpiryPolicy(ExpiryPolicy plc) {
CacheOperationGate opGate = onEnter();
try {
return new GatewayProtectedCacheProxy<>(delegate, opCtx.withExpiryPolicy(plc), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> withSkipStore() {
return skipStore();
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> skipStore() {
CacheOperationGate opGate = onEnter();
try {
boolean skip = opCtx.skipStore();
if (skip)
return this;
return new GatewayProtectedCacheProxy<>(delegate, opCtx.setSkipStore(true), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> withNoRetries() {
CacheOperationGate opGate = onEnter();
try {
boolean noRetries = opCtx.noRetries();
if (noRetries)
return this;
return new GatewayProtectedCacheProxy<>(delegate, opCtx.setNoRetries(true), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> withPartitionRecover() {
CacheOperationGate opGate = onEnter();
try {
boolean recovery = opCtx.recovery();
if (recovery)
return this;
return new GatewayProtectedCacheProxy<>(delegate, opCtx.setRecovery(true), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <K1, V1> GatewayProtectedCacheProxy<K1, V1> withKeepBinary() {
return keepBinary();
}
/** {@inheritDoc} */
@Override public <K1, V1> GatewayProtectedCacheProxy<K1, V1> keepBinary() {
CacheOperationGate opGate = onEnter();
try {
return new GatewayProtectedCacheProxy<>((IgniteCacheProxy<K1, V1>) delegate, opCtx.keepBinary(), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public GatewayProtectedCacheProxy<K, V> withDataCenterId(byte dataCenterId) {
CacheOperationGate opGate = onEnter();
try {
Byte prevDataCenterId = opCtx.dataCenterId();
if (prevDataCenterId != null && dataCenterId == prevDataCenterId)
return this;
return new GatewayProtectedCacheProxy<>(delegate, opCtx.setDataCenterId(dataCenterId), lock);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void loadCache(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
delegate.loadCache(p, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> loadCacheAsync(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.loadCacheAsync(p, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void localLoadCache(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
delegate.localLoadCache(p, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> localLoadCacheAsync(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.localLoadCacheAsync(p, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V getAndPutIfAbsent(K key, V val) throws CacheException, TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndPutIfAbsent(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<V> getAndPutIfAbsentAsync(K key, V val) throws CacheException, TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndPutIfAbsentAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Lock lock(K key) {
return delegate.lock(key);
}
/** {@inheritDoc} */
@Override public Lock lockAll(Collection<? extends K> keys) {
return delegate.lockAll(keys);
}
/** {@inheritDoc} */
@Override public boolean isLocalLocked(K key, boolean byCurrThread) {
CacheOperationGate opGate = onEnter();
try {
return delegate.isLocalLocked(key, byCurrThread);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <R> QueryCursor<R> query(Query<R> qry) {
CacheOperationGate opGate = onEnter();
try {
return delegate.query(qry);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public FieldsQueryCursor<List<?>> query(SqlFieldsQuery qry) {
CacheOperationGate opGate = onEnter();
try {
return delegate.query(qry);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public List<FieldsQueryCursor<List<?>>> queryMultipleStatements(SqlFieldsQuery qry) {
CacheOperationGate opGate = onEnter();
try {
return delegate.queryMultipleStatements(qry);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T, R> QueryCursor<R> query(Query<T> qry, IgniteClosure<T, R> transformer) {
CacheOperationGate opGate = onEnter();
try {
return delegate.query(qry, transformer);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Iterable<Entry<K, V>> localEntries(CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.localEntries(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public QueryMetrics queryMetrics() {
CacheOperationGate opGate = onEnter();
try {
return delegate.queryMetrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void resetQueryMetrics() {
CacheOperationGate opGate = onEnter();
try {
delegate.resetQueryMetrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Collection<? extends QueryDetailMetrics> queryDetailMetrics() {
CacheOperationGate opGate = onEnter();
try {
return delegate.queryDetailMetrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void resetQueryDetailMetrics() {
CacheOperationGate opGate = onEnter();
try {
delegate.resetQueryDetailMetrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void localEvict(Collection<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
delegate.localEvict(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V localPeek(K key, CachePeekMode... peekModes) {
CacheOperationGate opGate = onEnter();
try {
return delegate.localPeek(key, peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public int size(CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.size(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Integer> sizeAsync(CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.sizeAsync(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public long sizeLong(CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.sizeLong(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Long> sizeLongAsync(CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.sizeLongAsync(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public long sizeLong(int partition, CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.sizeLong(partition, peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Long> sizeLongAsync(int partition, CachePeekMode... peekModes) throws CacheException {
CacheOperationGate opGate = onEnter();
try {
return delegate.sizeLongAsync(partition, peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public int localSize(CachePeekMode... peekModes) {
CacheOperationGate opGate = onEnter();
try {
return delegate.localSize(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public long localSizeLong(CachePeekMode... peekModes) {
CacheOperationGate opGate = onEnter();
try {
return delegate.localSizeLong(peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public long localSizeLong(int partition, CachePeekMode... peekModes) {
CacheOperationGate opGate = onEnter();
try {
return delegate.localSizeLong(partition, peekModes);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> Map<K, EntryProcessorResult<T>> invokeAll(Map<? extends K, ? extends EntryProcessor<K, V, T>> map, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAll(map, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<Map<K, EntryProcessorResult<T>>> invokeAllAsync(Map<? extends K, ? extends EntryProcessor<K, V, T>> map, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAllAsync(map, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V get(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.get(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<V> getAsync(K key) {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public CacheEntry<K, V> getEntry(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getEntry(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<CacheEntry<K, V>> getEntryAsync(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getEntryAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Map<K, V> getAll(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAll(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Map<K, V>> getAllAsync(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAllAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Collection<CacheEntry<K, V>> getEntries(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getEntries(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Collection<CacheEntry<K, V>>> getEntriesAsync(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getEntriesAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Map<K, V> getAllOutTx(Set<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAllOutTx(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Map<K, V>> getAllOutTxAsync(Set<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAllOutTxAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean containsKey(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.containsKey(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void loadAll(Set<? extends K> keys, boolean replaceExisting, CompletionListener completionListener) {
CacheOperationGate opGate = onEnter();
try {
delegate.loadAll(keys, replaceExisting, completionListener);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> containsKeyAsync(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.containsKeyAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean containsKeys(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.containsKeys(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> containsKeysAsync(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.containsKeysAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void put(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
delegate.put(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> putAsync(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.putAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V getAndPut(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndPut(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<V> getAndPutAsync(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndPutAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void putAll(Map<? extends K, ? extends V> map) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
delegate.putAll(map);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> putAllAsync(Map<? extends K, ? extends V> map) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.putAllAsync(map);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean putIfAbsent(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.putIfAbsent(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> putIfAbsentAsync(K key, V val) {
CacheOperationGate opGate = onEnter();
try {
return delegate.putIfAbsentAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean remove(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.remove(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> removeAsync(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.removeAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean remove(K key, V oldVal) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.remove(key, oldVal);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> removeAsync(K key, V oldVal) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.removeAsync(key, oldVal);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V getAndRemove(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndRemove(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<V> getAndRemoveAsync(K key) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndRemoveAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean replace(K key, V oldVal, V newVal) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.replace(key, oldVal, newVal);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> replaceAsync(K key, V oldVal, V newVal) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.replaceAsync(key, oldVal, newVal);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public boolean replace(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.replace(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> replaceAsync(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.replaceAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public V getAndReplace(K key, V val) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndReplace(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<V> getAndReplaceAsync(K key, V val) {
CacheOperationGate opGate = onEnter();
try {
return delegate.getAndReplaceAsync(key, val);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void removeAll(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
delegate.removeAll(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> removeAllAsync(Set<? extends K> keys) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.removeAllAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void removeAll() {
CacheOperationGate opGate = onEnter();
try {
delegate.removeAll();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> removeAllAsync() {
CacheOperationGate opGate = onEnter();
try {
return delegate.removeAllAsync();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void clear() {
CacheOperationGate opGate = onEnter();
try {
delegate.clear();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> clearAsync() {
CacheOperationGate opGate = onEnter();
try {
return delegate.clearAsync();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void clear(K key) {
CacheOperationGate opGate = onEnter();
try {
delegate.clear(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> clearAsync(K key) {
CacheOperationGate opGate = onEnter();
try {
return delegate.clearAsync(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void clearAll(Set<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
delegate.clearAll(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<Void> clearAllAsync(Set<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
return delegate.clearAllAsync(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void localClear(K key) {
CacheOperationGate opGate = onEnter();
try {
delegate.localClear(key);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void localClearAll(Set<? extends K> keys) {
CacheOperationGate opGate = onEnter();
try {
delegate.localClearAll(keys);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> T invoke(K key, EntryProcessor<K, V, T> entryProcessor, Object... arguments) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invoke(key, entryProcessor, arguments);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<T> invokeAsync(K key, EntryProcessor<K, V, T> entryProcessor, Object... arguments) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAsync(key, entryProcessor, arguments);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> T invoke(K key, CacheEntryProcessor<K, V, T> entryProcessor, Object... arguments) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invoke(key, entryProcessor, arguments);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<T> invokeAsync(K key, CacheEntryProcessor<K, V, T> entryProcessor, Object... arguments) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAsync(key, entryProcessor, arguments);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAll(keys, entryProcessor, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<Map<K, EntryProcessorResult<T>>> invokeAllAsync(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAllAsync(keys, entryProcessor, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, CacheEntryProcessor<K, V, T> entryProcessor, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAll(keys, entryProcessor, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> IgniteFuture<Map<K, EntryProcessorResult<T>>> invokeAllAsync(Set<? extends K> keys, CacheEntryProcessor<K, V, T> entryProcessor, Object... args) throws TransactionException {
CacheOperationGate opGate = onEnter();
try {
return delegate.invokeAllAsync(keys, entryProcessor, args);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> clazz) {
return delegate.unwrap(clazz);
}
/** {@inheritDoc} */
@Override public void registerCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
CacheOperationGate opGate = onEnter();
try {
delegate.registerCacheEntryListener(cacheEntryListenerConfiguration);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void deregisterCacheEntryListener(CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
CacheOperationGate opGate = onEnter();
try {
delegate.deregisterCacheEntryListener(cacheEntryListenerConfiguration);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Iterator<Entry<K, V>> iterator() {
CacheOperationGate opGate = onEnter();
try {
return delegate.iterator();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void destroy() {
GridCacheGateway<K, V> gate = gate();
if (!onEnterIfNoStop(gate))
return;
IgniteFuture<?> destroyFuture;
try {
destroyFuture = delegate.destroyAsync();
}
finally {
onLeave(gate);
}
if (destroyFuture != null)
destroyFuture.get();
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> destroyAsync() {
return delegate.destroyAsync();
}
/** {@inheritDoc} */
@Override public void close() {
GridCacheGateway<K, V> gate = gate();
if (!onEnterIfNoStop(gate))
return;
IgniteFuture<?> closeFuture;
try {
closeFuture = closeAsync();
}
finally {
onLeave(gate);
}
if (closeFuture != null)
closeFuture.get();
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> closeAsync() {
return delegate.closeAsync();
}
/** {@inheritDoc} */
@Override public boolean isClosed() {
return delegate.isClosed();
}
/** {@inheritDoc} */
@Override public IgniteFuture<Boolean> rebalance() {
return delegate.rebalance();
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> indexReadyFuture() {
return delegate.indexReadyFuture();
}
/** {@inheritDoc} */
@Override public CacheMetrics metrics() {
CacheOperationGate opGate = onEnter();
try {
return delegate.metrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public CacheMetrics metrics(ClusterGroup grp) {
CacheOperationGate opGate = onEnter();
try {
return delegate.metrics(grp);
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public CacheMetrics localMetrics() {
CacheOperationGate opGate = onEnter();
try {
return delegate.localMetrics();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean mxBean() {
CacheOperationGate opGate = onEnter();
try {
return delegate.mxBean();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean localMxBean() {
CacheOperationGate opGate = onEnter();
try {
return delegate.localMxBean();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public Collection<Integer> lostPartitions() {
CacheOperationGate opGate = onEnter();
try {
return delegate.lostPartitions();
}
finally {
onLeave(opGate);
}
}
/** {@inheritDoc} */
@Override public void enableStatistics(boolean enabled) {
CacheOperationGate opGate = onEnter();
try {
delegate.enableStatistics(enabled);
}
finally {
onLeave(opGate);
}
}
/**
* Safely get CacheGateway.
*
* @return Cache Gateway.
*/
@Nullable private GridCacheGateway<K, V> gate() {
GridCacheContext<K, V> cacheContext = delegate.context();
return cacheContext != null ? cacheContext.gate() : null;
}
/**
* Checks that proxy is in valid state (not closed, restarted or destroyed).
* Throws IllegalStateException or CacheRestartingException if proxy is in invalid state.
*
* @param gate Cache gateway.
*/
private GridCacheGateway<K, V> checkProxyIsValid(@Nullable GridCacheGateway<K, V> gate, boolean tryRestart) {
if (isProxyClosed())
throw new IllegalStateException("Cache has been closed: " + context().name());
boolean isCacheProxy = delegate instanceof IgniteCacheProxyImpl;
if (isCacheProxy)
((IgniteCacheProxyImpl) delegate).checkRestart();
if (gate == null)
throw new IllegalStateException("Gateway is unavailable. Probably cache has been destroyed, but proxy is not closed.");
if (isCacheProxy && tryRestart && gate.isStopped() &&
context().kernalContext().gateway().getState() == GridKernalState.STARTED) {
IgniteCacheProxyImpl proxyImpl = (IgniteCacheProxyImpl) delegate;
try {
IgniteInternalCache<K, V> cache = context().kernalContext().cache().<K, V>publicJCache(context().name()).internalProxy();
GridFutureAdapter<Void> fut = proxyImpl.opportunisticRestart();
if (fut == null)
proxyImpl.onRestarted(cache.context(), cache.context().cache());
else
new IgniteFutureImpl<>(fut).get();
return gate();
} catch (IgniteCheckedException ice) {
// Opportunity didn't work out.
}
}
return gate;
}
/**
* @return Previous projection set on this thread.
*/
private CacheOperationGate onEnter() {
GridCacheGateway<K, V> gate = checkProxyIsValid(gate(), true);
return new CacheOperationGate(gate,
lock ? gate.enter(opCtx) : gate.enterNoLock(opCtx));
}
/**
* @param gate Cache gateway.
* @return {@code True} if enter successful.
*/
private boolean onEnterIfNoStop(@Nullable GridCacheGateway<K, V> gate) {
try {
checkProxyIsValid(gate, false);
}
catch (Exception e) {
return false;
}
return lock ? gate.enterIfNotStopped() : gate.enterIfNotStoppedNoLock();
}
/**
* @param opGate Operation context to guard.
*/
private void onLeave(CacheOperationGate opGate) {
if (lock)
opGate.gate.leave(opGate.prev);
else
opGate.gate.leaveNoLock(opGate.prev);
}
/**
* @param gate Cache gateway.
*/
private void onLeave(GridCacheGateway<K, V> gate) {
if (lock)
gate.leave();
else
gate.leaveNoLock();
}
/** {@inheritDoc} */
@Override public boolean isProxyClosed() {
return delegate.isProxyClosed();
}
/** {@inheritDoc} */
@Override public void closeProxy() {
delegate.closeProxy();
}
/** {@inheritDoc} */
@Override public IgniteCache<K, V> withAsync() {
return delegate.withAsync();
}
/** {@inheritDoc} */
@Override public boolean isAsync() {
return delegate.isAsync();
}
/** {@inheritDoc} */
@Override public <R> IgniteFuture<R> future() {
return delegate.future();
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(delegate);
out.writeBoolean(lock);
out.writeObject(opCtx);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
delegate = (IgniteCacheProxy<K, V>) in.readObject();
lock = in.readBoolean();
opCtx = (CacheOperationContext) in.readObject();
}
/** {@inheritDoc} */
@Override public boolean equals(Object another) {
GatewayProtectedCacheProxy anotherProxy = (GatewayProtectedCacheProxy) another;
return delegate.equals(anotherProxy.delegate);
}
/** {@inheritDoc} */
@Override public int hashCode() {
return delegate.hashCode();
}
/**
* Holder for gate being entered and operation context to restore.
*/
private class CacheOperationGate {
/**
* Gate being entered in this operation.
*/
public final GridCacheGateway<K, V> gate;
/**
* Operation context to restore after current operation completes.
*/
public final CacheOperationContext prev;
/**
* @param gate Gate being entered in this operation.
* @param prev Operation context to restore after current operation completes.
*/
public CacheOperationGate(GridCacheGateway<K, V> gate, CacheOperationContext prev) {
this.gate = gate;
this.prev = prev;
}
}
}
| |
/*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: Expression.java,v 1.2.4.2 2005/09/14 19:50:20 jeffsuttor Exp $
*/
package com.sun.org.apache.xpath.internal;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.TransformerException;
import com.sun.org.apache.xalan.internal.res.XSLMessages;
import com.sun.org.apache.xml.internal.dtm.DTM;
import com.sun.org.apache.xml.internal.dtm.DTMIterator;
import com.sun.org.apache.xml.internal.utils.XMLString;
import com.sun.org.apache.xpath.internal.objects.XNodeSet;
import com.sun.org.apache.xpath.internal.objects.XObject;
import com.sun.org.apache.xpath.internal.res.XPATHErrorResources;
import org.xml.sax.ContentHandler;
/**
* This abstract class serves as the base for all expression objects. An
* Expression can be executed to return a {@link XObject},
* normally has a location within a document or DOM, can send error and warning
* events, and normally do not hold state and are meant to be immutable once
* construction has completed. An exception to the immutibility rule is iterators
* and walkers, which must be cloned in order to be used -- the original must
* still be immutable.
*/
public abstract class Expression implements java.io.Serializable, ExpressionNode, XPathVisitable
{
static final long serialVersionUID = 565665869777906902L;
/**
* The location where this expression was built from. Need for diagnostic
* messages. May be null.
* @serial
*/
private ExpressionNode m_parent;
/**
* Tell if this expression or it's subexpressions can traverse outside
* the current subtree.
*
* @return true if traversal outside the context node's subtree can occur.
*/
public boolean canTraverseOutsideSubtree()
{
return false;
}
// /**
// * Set the location where this expression was built from.
// *
// *
// * @param locator the location where this expression was built from, may be
// * null.
// */
// public void setSourceLocator(SourceLocator locator)
// {
// m_slocator = locator;
// }
/**
* Execute an expression in the XPath runtime context, and return the
* result of the expression.
*
*
* @param xctxt The XPath runtime context.
* @param currentNode The currentNode.
*
* @return The result of the expression in the form of a <code>XObject</code>.
*
* @throws TransformerException if a runtime exception
* occurs.
*/
public XObject execute(XPathContext xctxt, int currentNode)
throws TransformerException
{
// For now, the current node is already pushed.
return execute(xctxt);
}
/**
* Execute an expression in the XPath runtime context, and return the
* result of the expression.
*
*
* @param xctxt The XPath runtime context.
* @param currentNode The currentNode.
* @param dtm The DTM of the current node.
* @param expType The expanded type ID of the current node.
*
* @return The result of the expression in the form of a <code>XObject</code>.
*
* @throws TransformerException if a runtime exception
* occurs.
*/
public XObject execute(
XPathContext xctxt, int currentNode, DTM dtm, int expType)
throws TransformerException
{
// For now, the current node is already pushed.
return execute(xctxt);
}
/**
* Execute an expression in the XPath runtime context, and return the
* result of the expression.
*
*
* @param xctxt The XPath runtime context.
*
* @return The result of the expression in the form of a <code>XObject</code>.
*
* @throws TransformerException if a runtime exception
* occurs.
*/
public abstract XObject execute(XPathContext xctxt)
throws TransformerException;
/**
* Execute an expression in the XPath runtime context, and return the
* result of the expression, but tell that a "safe" object doesn't have
* to be returned. The default implementation just calls execute(xctxt).
*
*
* @param xctxt The XPath runtime context.
* @param destructiveOK true if a "safe" object doesn't need to be returned.
*
* @return The result of the expression in the form of a <code>XObject</code>.
*
* @throws TransformerException if a runtime exception
* occurs.
*/
public XObject execute(XPathContext xctxt, boolean destructiveOK)
throws TransformerException
{
return execute(xctxt);
}
/**
* Evaluate expression to a number.
*
*
* @param xctxt The XPath runtime context.
* @return The expression evaluated as a double.
*
* @throws TransformerException
*/
public double num(XPathContext xctxt)
throws TransformerException
{
return execute(xctxt).num();
}
/**
* Evaluate expression to a boolean.
*
*
* @param xctxt The XPath runtime context.
* @return false
*
* @throws TransformerException
*/
public boolean bool(XPathContext xctxt)
throws TransformerException
{
return execute(xctxt).bool();
}
/**
* Cast result object to a string.
*
*
* @param xctxt The XPath runtime context.
* @return The string this wraps or the empty string if null
*
* @throws TransformerException
*/
public XMLString xstr(XPathContext xctxt)
throws TransformerException
{
return execute(xctxt).xstr();
}
/**
* Tell if the expression is a nodeset expression. In other words, tell
* if you can execute {@link #asNode(XPathContext) asNode} without an exception.
* @return true if the expression can be represented as a nodeset.
*/
public boolean isNodesetExpr()
{
return false;
}
/**
* Return the first node out of the nodeset, if this expression is
* a nodeset expression.
* @param xctxt The XPath runtime context.
* @return the first node out of the nodeset, or DTM.NULL.
*
* @throws TransformerException
*/
public int asNode(XPathContext xctxt)
throws TransformerException
{
DTMIterator iter = execute(xctxt).iter();
return iter.nextNode();
}
/**
* Given an select expression and a context, evaluate the XPath
* and return the resulting iterator.
*
* @param xctxt The execution context.
* @param contextNode The node that "." expresses.
*
*
* @return A valid DTMIterator.
* @throws TransformerException thrown if the active ProblemListener decides
* the error condition is severe enough to halt processing.
*
* @throws TransformerException
* @xsl.usage experimental
*/
public DTMIterator asIterator(XPathContext xctxt, int contextNode)
throws TransformerException
{
try
{
xctxt.pushCurrentNodeAndExpression(contextNode, contextNode);
return execute(xctxt).iter();
}
finally
{
xctxt.popCurrentNodeAndExpression();
}
}
/**
* Given an select expression and a context, evaluate the XPath
* and return the resulting iterator, but do not clone.
*
* @param xctxt The execution context.
* @param contextNode The node that "." expresses.
*
*
* @return A valid DTMIterator.
* @throws TransformerException thrown if the active ProblemListener decides
* the error condition is severe enough to halt processing.
*
* @throws TransformerException
* @xsl.usage experimental
*/
public DTMIterator asIteratorRaw(XPathContext xctxt, int contextNode)
throws TransformerException
{
try
{
xctxt.pushCurrentNodeAndExpression(contextNode, contextNode);
XNodeSet nodeset = (XNodeSet)execute(xctxt);
return nodeset.iterRaw();
}
finally
{
xctxt.popCurrentNodeAndExpression();
}
}
/**
* Execute an expression in the XPath runtime context, and return the
* result of the expression.
*
*
* @param xctxt The XPath runtime context.
* NEEDSDOC @param handler
*
* @return The result of the expression in the form of a <code>XObject</code>.
*
* @throws TransformerException if a runtime exception
* occurs.
* @throws org.xml.sax.SAXException
*/
public void executeCharsToContentHandler(
XPathContext xctxt, ContentHandler handler)
throws TransformerException,
org.xml.sax.SAXException
{
XObject obj = execute(xctxt);
obj.dispatchCharactersEvents(handler);
obj.detach();
}
/**
* Tell if this expression returns a stable number that will not change during
* iterations within the expression. This is used to determine if a proximity
* position predicate can indicate that no more searching has to occur.
*
*
* @return true if the expression represents a stable number.
*/
public boolean isStableNumber()
{
return false;
}
/**
* This function is used to fixup variables from QNames to stack frame
* indexes at stylesheet build time.
* @param vars List of QNames that correspond to variables. This list
* should be searched backwards for the first qualified name that
* corresponds to the variable reference qname. The position of the
* QName in the vector from the start of the vector will be its position
* in the stack frame (but variables above the globalsTop value will need
* to be offset to the current stack frame).
* NEEDSDOC @param globalsSize
*/
public abstract void fixupVariables(java.util.Vector vars, int globalsSize);
/**
* Compare this object with another object and see
* if they are equal, include the sub heararchy.
*
* @param expr Another expression object.
* @return true if this objects class and the expr
* object's class are the same, and the data contained
* within both objects are considered equal.
*/
public abstract boolean deepEquals(Expression expr);
/**
* This is a utility method to tell if the passed in
* class is the same class as this. It is to be used by
* the deepEquals method. I'm bottlenecking it here
* because I'm not totally confident that comparing the
* class objects is the best way to do this.
* @return true of the passed in class is the exact same
* class as this class.
*/
protected final boolean isSameClass(Expression expr)
{
if(null == expr)
return false;
return (getClass() == expr.getClass());
}
/**
* Warn the user of an problem.
*
* @param xctxt The XPath runtime context.
* @param msg An error msgkey that corresponds to one of the conststants found
* in {@link XPATHErrorResources}, which is
* a key for a format string.
* @param args An array of arguments represented in the format string, which
* may be null.
*
* @throws TransformerException if the current ErrorListoner determines to
* throw an exception.
*
* @throws TransformerException
*/
public void warn(XPathContext xctxt, String msg, Object[] args)
throws TransformerException
{
String fmsg = XSLMessages.createXPATHWarning(msg, args);
if (null != xctxt)
{
ErrorListener eh = xctxt.getErrorListener();
// TO DO: Need to get stylesheet Locator from here.
eh.warning(new TransformerException(fmsg, xctxt.getSAXLocator()));
}
}
/**
* Tell the user of an assertion error, and probably throw an
* exception.
*
* @param b If false, a runtime exception will be thrown.
* @param msg The assertion message, which should be informative.
*
* @throws RuntimeException if the b argument is false.
*
* @throws TransformerException
*/
public void assertion(boolean b, String msg)
{
if (!b)
{
String fMsg = XSLMessages.createXPATHMessage(
XPATHErrorResources.ER_INCORRECT_PROGRAMMER_ASSERTION,
new Object[]{ msg });
throw new RuntimeException(fMsg);
}
}
/**
* Tell the user of an error, and probably throw an
* exception.
*
* @param xctxt The XPath runtime context.
* @param msg An error msgkey that corresponds to one of the constants found
* in {@link XPATHErrorResources}, which is
* a key for a format string.
* @param args An array of arguments represented in the format string, which
* may be null.
*
* @throws TransformerException if the current ErrorListoner determines to
* throw an exception.
*
* @throws TransformerException
*/
public void error(XPathContext xctxt, String msg, Object[] args)
throws TransformerException
{
String fmsg = XSLMessages.createXPATHMessage(msg, args);
if (null != xctxt)
{
ErrorListener eh = xctxt.getErrorListener();
TransformerException te = new TransformerException(fmsg, this);
eh.fatalError(te);
}
}
/**
* Get the first non-Expression parent of this node.
* @return null or first ancestor that is not an Expression.
*/
public ExpressionNode getExpressionOwner()
{
ExpressionNode parent = exprGetParent();
while((null != parent) && (parent instanceof Expression))
parent = parent.exprGetParent();
return parent;
}
//=============== ExpressionNode methods ================
/** This pair of methods are used to inform the node of its
parent. */
public void exprSetParent(ExpressionNode n)
{
assertion(n != this, "Can not parent an expression to itself!");
m_parent = n;
}
public ExpressionNode exprGetParent()
{
return m_parent;
}
/** This method tells the node to add its argument to the node's
list of children. */
public void exprAddChild(ExpressionNode n, int i)
{
assertion(false, "exprAddChild method not implemented!");
}
/** This method returns a child node. The children are numbered
from zero, left to right. */
public ExpressionNode exprGetChild(int i)
{
return null;
}
/** Return the number of children the node has. */
public int exprGetNumChildren()
{
return 0;
}
//=============== SourceLocator methods ================
/**
* Return the public identifier for the current document event.
*
* <p>The return value is the public identifier of the document
* entity or of the external parsed entity in which the markup that
* triggered the event appears.</p>
*
* @return A string containing the public identifier, or
* null if none is available.
* @see #getSystemId
*/
public String getPublicId()
{
if(null == m_parent)
return null;
return m_parent.getPublicId();
}
/**
* Return the system identifier for the current document event.
*
* <p>The return value is the system identifier of the document
* entity or of the external parsed entity in which the markup that
* triggered the event appears.</p>
*
* <p>If the system identifier is a URL, the parser must resolve it
* fully before passing it to the application.</p>
*
* @return A string containing the system identifier, or null
* if none is available.
* @see #getPublicId
*/
public String getSystemId()
{
if(null == m_parent)
return null;
return m_parent.getSystemId();
}
/**
* Return the line number where the current document event ends.
*
* <p><strong>Warning:</strong> The return value from the method
* is intended only as an approximation for the sake of error
* reporting; it is not intended to provide sufficient information
* to edit the character content of the original XML document.</p>
*
* <p>The return value is an approximation of the line number
* in the document entity or external parsed entity where the
* markup that triggered the event appears.</p>
*
* @return The line number, or -1 if none is available.
* @see #getColumnNumber
*/
public int getLineNumber()
{
if(null == m_parent)
return 0;
return m_parent.getLineNumber();
}
/**
* Return the character position where the current document event ends.
*
* <p><strong>Warning:</strong> The return value from the method
* is intended only as an approximation for the sake of error
* reporting; it is not intended to provide sufficient information
* to edit the character content of the original XML document.</p>
*
* <p>The return value is an approximation of the column number
* in the document entity or external parsed entity where the
* markup that triggered the event appears.</p>
*
* @return The column number, or -1 if none is available.
* @see #getLineNumber
*/
public int getColumnNumber()
{
if(null == m_parent)
return 0;
return m_parent.getColumnNumber();
}
}
| |
package com.keeps.security.utils;
public final class Base64 {
private static final int BASELENGTH = 128;
private static final int LOOKUPLENGTH = 64;
private static final int TWENTYFOURBITGROUP = 24;
private static final int EIGHTBIT = 8;
private static final int SIXTEENBIT = 16;
private static final int FOURBYTE = 4;
private static final int SIGN = -128;
private static final char PAD = 61;
private static final boolean fDebug = true;
private static final byte[] base64Alphabet = new byte[128];
private static final char[] lookUpBase64Alphabet = new char[64];
static {
int i = 0;
for (i = 0; i < 128; ++i) {
base64Alphabet[i] = -1;
}
for (i = 90; i >= 65; --i) {
base64Alphabet[i] = (byte) (i - 65);
}
for (i = 122; i >= 97; --i) {
base64Alphabet[i] = (byte) (i - 97 + 26);
}
for (i = 57; i >= 48; --i) {
base64Alphabet[i] = (byte) (i - 48 + 52);
}
base64Alphabet[43] = 62;
base64Alphabet[47] = 63;
for (i = 0; i <= 25; ++i) {
lookUpBase64Alphabet[i] = (char) (65 + i);
}
i = 26;
int j = 0;
for (j = 0; i <= 51; ++j) {
lookUpBase64Alphabet[i] = (char) (97 + j);
++i;
}
i = 52;
for (j = 0; i <= 61; ++j) {
lookUpBase64Alphabet[i] = (char) (48 + j);
++i;
}
lookUpBase64Alphabet[62] = '+';
lookUpBase64Alphabet[63] = '/';
}
private static boolean isWhiteSpace(char octect) {
return ((octect == ' ') || (octect == '\r') || (octect == '\n') || (octect == '\t'));
}
private static boolean isPad(char octect) {
return (octect == '=');
}
private static boolean isData(char octect) {
return ((octect < 128) && (base64Alphabet[octect] != -1));
}
public static String encode(byte[] binaryData) {
if (binaryData == null) {
return null;
}
int lengthDataBits = binaryData.length * 8;
if (lengthDataBits == 0) {
return "";
}
int fewerThan24bits = lengthDataBits % 24;
int numberTriplets = lengthDataBits / 24;
int numberQuartet = (fewerThan24bits != 0) ? numberTriplets + 1 : numberTriplets;
char[] encodedData = null;
encodedData = new char[numberQuartet * 4];
byte k = 0;
byte l = 0;
byte b1 = 0;
byte b2 = 0;
byte b3 = 0;
int encodedIndex = 0;
int dataIndex = 0;
for (int i = 0; i < numberTriplets; ++i) {
b1 = binaryData[(dataIndex++)];
b2 = binaryData[(dataIndex++)];
b3 = binaryData[(dataIndex++)];
l = (byte) (b2 & 0xF);
k = (byte) (b1 & 0x3);
byte val1 = ((b1 & 0xFFFFFF80) == 0) ? (byte) (b1 >> 2) : (byte) (b1 >> 2 ^ 0xC0);
byte val2 = ((b2 & 0xFFFFFF80) == 0) ? (byte) (b2 >> 4) : (byte) (b2 >> 4 ^ 0xF0);
byte val3 = ((b3 & 0xFFFFFF80) == 0) ? (byte) (b3 >> 6) : (byte) (b3 >> 6 ^ 0xFC);
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[val1];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(val2 | k << 4)];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(l << 2 | val3)];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(b3 & 0x3F)];
}
byte val1;
if (fewerThan24bits == 8) {
b1 = binaryData[dataIndex];
k = (byte) (b1 & 0x3);
val1 = ((b1 & 0xFFFFFF80) == 0) ? (byte) (b1 >> 2) : (byte) (b1 >> 2 ^ 0xC0);
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[val1];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(k << 4)];
encodedData[(encodedIndex++)] = '=';
encodedData[(encodedIndex++)] = '=';
} else if (fewerThan24bits == 16) {
b1 = binaryData[dataIndex];
b2 = binaryData[(dataIndex + 1)];
l = (byte) (b2 & 0xF);
k = (byte) (b1 & 0x3);
val1 = ((b1 & 0xFFFFFF80) == 0) ? (byte) (b1 >> 2) : (byte) (b1 >> 2 ^ 0xC0);
byte val2 = ((b2 & 0xFFFFFF80) == 0) ? (byte) (b2 >> 4) : (byte) (b2 >> 4 ^ 0xF0);
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[val1];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(val2 | k << 4)];
encodedData[(encodedIndex++)] = lookUpBase64Alphabet[(l << 2)];
encodedData[(encodedIndex++)] = '=';
}
return new String(encodedData);
}
public static byte[] decode(String encoded) {
if (encoded == null) {
return null;
}
char[] base64Data = encoded.toCharArray();
int len = removeWhiteSpace(base64Data);
if (len % 4 != 0) {
return null;
}
int numberQuadruple = len / 4;
if (numberQuadruple == 0) {
return new byte[0];
}
byte[] decodedData = null;
byte b1 = 0;
byte b2 = 0;
byte b3 = 0;
byte b4 = 0;
char d1 = '\0';
char d2 = '\0';
char d3 = '\0';
char d4 = '\0';
int i = 0;
int encodedIndex = 0;
int dataIndex = 0;
decodedData = new byte[numberQuadruple * 3];
for (; i < numberQuadruple - 1; ++i) {
if ((!(isData(d1 = base64Data[(dataIndex++)]))) || (!(isData(d2 = base64Data[(dataIndex++)])))
|| (!(isData(d3 = base64Data[(dataIndex++)]))) || (!(isData(d4 = base64Data[(dataIndex++)])))) {
return null;
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[(encodedIndex++)] = (byte) (b1 << 2 | b2 >> 4);
decodedData[(encodedIndex++)] = (byte) ((b2 & 0xF) << 4 | b3 >> 2 & 0xF);
decodedData[(encodedIndex++)] = (byte) (b3 << 6 | b4);
}
if ((!(isData(d1 = base64Data[(dataIndex++)]))) || (!(isData(d2 = base64Data[(dataIndex++)])))) {
return null;
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
d3 = base64Data[(dataIndex++)];
d4 = base64Data[(dataIndex++)];
if ((!(isData(d3))) || (!(isData(d4)))) {
byte[] tmp;
if ((isPad(d3)) && (isPad(d4))) {
if ((b2 & 0xF) != 0) {
return null;
}
tmp = new byte[i * 3 + 1];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex] = (byte) (b1 << 2 | b2 >> 4);
return tmp;
}
if ((!(isPad(d3))) && (isPad(d4))) {
b3 = base64Alphabet[d3];
if ((b3 & 0x3) != 0) {
return null;
}
tmp = new byte[i * 3 + 2];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[(encodedIndex++)] = (byte) (b1 << 2 | b2 >> 4);
tmp[encodedIndex] = (byte) ((b2 & 0xF) << 4 | b3 >> 2 & 0xF);
return tmp;
}
return null;
}
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[(encodedIndex++)] = (byte) (b1 << 2 | b2 >> 4);
decodedData[(encodedIndex++)] = (byte) ((b2 & 0xF) << 4 | b3 >> 2 & 0xF);
decodedData[(encodedIndex++)] = (byte) (b3 << 6 | b4);
return decodedData;
}
private static int removeWhiteSpace(char[] data) {
if (data == null) {
return 0;
}
int newSize = 0;
int len = data.length;
for (int i = 0; i < len; ++i) {
if (!(isWhiteSpace(data[i]))) {
data[(newSize++)] = data[i];
}
}
return newSize;
}
}
| |
package tv.ustream.yolo.module;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tv.ustream.yolo.config.ConfigException;
import tv.ustream.yolo.config.ConfigMap;
import tv.ustream.yolo.config.ConfigPattern;
import tv.ustream.yolo.handler.ILineHandler;
import tv.ustream.yolo.module.parser.IParser;
import tv.ustream.yolo.module.processor.ICompositeProcessor;
import tv.ustream.yolo.module.processor.IProcessor;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author bandesz
*/
public class ModuleChain implements ILineHandler
{
private static final Logger LOG = LoggerFactory.getLogger(ModuleChain.class);
private final ModuleFactory moduleFactory;
private final Map<String, IParser> parsers = new HashMap<String, IParser>();
private final Map<String, IProcessor> processors = new HashMap<String, IProcessor>();
private final Map<String, Map<String, Map<String, Object>>> transitions =
new HashMap<String, Map<String, Map<String, Object>>>();
private Map<String, Object> config = null;
public ModuleChain(final ModuleFactory moduleFactory)
{
this.moduleFactory = moduleFactory;
}
private ConfigMap getMainConfig()
{
ConfigMap mainConfig = new ConfigMap();
mainConfig.addConfigValue("processors", Map.class);
mainConfig.addConfigValue("parsers", Map.class);
return mainConfig;
}
public void updateConfig(final Map<String, Object> config, final boolean instant) throws ConfigException
{
this.config = config;
getMainConfig().parse("[root]", config);
if (instant)
{
update();
}
}
private void update() throws ConfigException
{
if (config == null)
{
return;
}
stop();
reset();
Map<String, Object> processorsEntry = (Map<String, Object>) config.get("processors");
for (Map.Entry<String, Object> processor : processorsEntry.entrySet())
{
addProcessor(processor.getKey(), (Map<String, Object>) processor.getValue());
}
for (Map.Entry<String, Object> processor : processorsEntry.entrySet())
{
setupCompositeProcessor(processors.get(processor.getKey()), (Map<String, Object>) processor.getValue());
}
Map<String, Object> parsersEntry = (Map<String, Object>) config.get("parsers");
for (Map.Entry<String, Object> parser : parsersEntry.entrySet())
{
addParser(parser.getKey(), (Map<String, Object>) parser.getValue());
}
config = null;
}
@SuppressWarnings("unchecked")
private void addProcessor(String name, Map<String, Object> config) throws ConfigException
{
LOG.info("Adding {} processor {}", name, config);
IProcessor processor = moduleFactory.createProcessor(name, config);
if (processor == null)
{
return;
}
processors.put(name, processor);
}
private void setupCompositeProcessor(IProcessor processor, Map<String, Object> config) throws ConfigException
{
if (!(processor instanceof ICompositeProcessor))
{
return;
}
for (String subProcessor : (List<String>) config.get("processors"))
{
if (processors.containsKey(subProcessor))
{
((ICompositeProcessor) processor).addProcessor(processors.get(subProcessor));
}
else
{
throw new ConfigException(subProcessor + " processor does not exist!");
}
}
}
@SuppressWarnings("unchecked")
private void addParser(String name, Map<String, Object> config) throws ConfigException
{
LOG.info("Adding {} parser {}", name, config);
IParser parser = moduleFactory.createParser(name, config);
if (parser == null)
{
return;
}
parsers.put(name, parser);
Map<String, Object> parserProcessors = (Map<String, Object>) config.get("processors");
transitions.put(name, new HashMap<String, Map<String, Object>>());
for (Map.Entry<String, Object> parserProcessor : parserProcessors.entrySet())
{
if (!processors.containsKey(parserProcessor.getKey()))
{
throw new ConfigException(parserProcessor.getKey() + " processor does not exist");
}
addTransition(name, parserProcessor.getKey(), parserProcessor.getValue());
}
}
@SuppressWarnings("unchecked")
private void addTransition(String parserName, String processorName, Object params) throws ConfigException
{
ConfigMap processParamsConfig = processors.get(processorName).getProcessParamsConfig();
if (processParamsConfig != null)
{
processParamsConfig.parse(parserName + ".processors." + processorName, params);
}
transitions.get(parserName).put(
processorName,
(Map<String, Object>) ConfigPattern.replacePatterns(params, parsers.get(parserName).getOutputKeys())
);
}
public void handle(String line)
{
try
{
update();
}
catch (ConfigException e)
{
throw new RuntimeException("Updating module chain failed: " + e.getMessage());
}
Boolean match = false;
for (String parserName : parsers.keySet())
{
if (!match || parsers.get(parserName).runAlways())
{
Map<String, Object> parserOutput = parsers.get(parserName).parse(line);
if (parserOutput != null)
{
match = true;
for (Map.Entry<String, Map<String, Object>> processor : transitions.get(parserName).entrySet())
{
processors.get(processor.getKey()).process(parserOutput, processor.getValue());
}
}
}
}
}
public void stop()
{
for (IProcessor processor : processors.values())
{
processor.stop();
}
}
private void reset()
{
parsers.clear();
transitions.clear();
processors.clear();
}
}
| |
/**
* Copyright (C) 2013 Oldterns
*
* This file may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
package com.oldterns.vilebot.db;
import java.util.Set;
import redis.clients.jedis.Jedis;
public class KarmaDB
extends RedisDB
{
private static final String keyOfKarmaSortedSet = "noun-karma";
/**
* Change the karma of a noun by an integer.
*
* @param noun The noun to change the karma of
* @param mod The amount to change the karma by, may be negative.
*/
public static void modNounKarma( String noun, int mod )
{
Jedis jedis = pool.getResource();
try
{
jedis.zincrby( keyOfKarmaSortedSet, mod, noun );
}
finally
{
pool.returnResource( jedis );
}
}
/**
* Get the karma of a noun.
*
* @param noun The noun to query to karma of
* @return Integer iff the noun has a defined value, else null
*/
public static Integer getNounKarma( String noun )
{
Jedis jedis = pool.getResource();
Double karma;
try
{
karma = jedis.zscore( keyOfKarmaSortedSet, noun );
}
finally
{
pool.returnResource( jedis );
}
if ( karma == null )
{
return null;
}
return Integer.valueOf( Long.valueOf( Math.round( karma ) ).intValue() );
}
/**
* Get the rank of a noun based on its karma.
*
* @param noun The noun to query the rank of
* @return Integer iff the noun has a defined value, else null
*/
public static Integer getNounRank( String noun )
{
Jedis jedis = pool.getResource();
Long rank;
try
{
rank = jedis.zrevrank( keyOfKarmaSortedSet, noun );
}
finally
{
pool.returnResource( jedis );
}
if ( rank == null )
{
return null;
}
return Integer.valueOf( rank.intValue() + 1 );
}
/**
* Get the rank of a noun based on its karma, starting at most negative karma.
*
* @param noun The noun to query the reverse rank of
* @return Integer iff the noun has a defined value, else null
*/
public static Integer getNounRevRank( String noun )
{
Jedis jedis = pool.getResource();
Long rank;
try
{
rank = jedis.zrank( keyOfKarmaSortedSet, noun );
}
finally
{
pool.returnResource( jedis );
}
if ( rank == null )
{
return null;
}
return Integer.valueOf( rank.intValue() + 1 );
}
/**
* Get noun from a karma rank (Rank 1 is the member with the highest karma).
*
* @param rank The rank to get the noun of.
* @return String The noun iff the rank exists, else null.
*/
public static String getRankNoun( long rank )
{
Set<String> nouns = getRankNouns( rank - 1, rank );
if ( nouns != null && nouns.iterator().hasNext() )
{
return nouns.iterator().next();
}
return null;
}
/**
* Get nouns from karma ranks.
*
* @param lower The lower rank to get the nouns of.
* @param upper The upper rank to get the nouns of.
* @return String The noun iff the rank exists, else null.
*/
public static Set<String> getRankNouns( long lower, long upper )
{
Set<String> nouns;
Jedis jedis = pool.getResource();
try
{
nouns = jedis.zrevrange( keyOfKarmaSortedSet, lower, upper );
}
finally
{
pool.returnResource( jedis );
}
if ( nouns == null || nouns.size() == 0 )
{
return null;
}
return nouns;
}
/**
* Get noun from a karma rank, starting with the lowest ranks (Rank 1 would be the member with the least karma).
*
* @param rank The reversed rank to get the noun of.
* @return String The noun iff the rank exists, else null.
*/
public static String getRevRankNoun( long rank )
{
Set<String> nouns = getRevRankNouns( rank - 1, rank );
if ( nouns != null && nouns.iterator().hasNext() )
{
return nouns.iterator().next();
}
return null;
}
/**
* Get nouns from a karma rank, starting with the lowest ranks.
*
* @param lower The lower rank to get the nouns of.
* @param upper The upper rank to get the nouns of.
* @return String The noun iff the rank exists, else null.
*/
public static Set<String> getRevRankNouns( long lower, long upper )
{
Set<String> nouns;
Jedis jedis = pool.getResource();
try
{
nouns = jedis.zrange( keyOfKarmaSortedSet, lower, upper );
}
finally
{
pool.returnResource( jedis );
}
if ( nouns == null || nouns.size() == 0 )
{
return null;
}
return nouns;
}
/**
* Remove noun from the karma/rank set.
*
* @param noun The noun to remove, if it exists.
* @return true iff the noun existed before removing it.
*/
public static boolean remNoun( String noun )
{
Long existed;
Jedis jedis = pool.getResource();
try
{
existed = jedis.zrem( keyOfKarmaSortedSet, noun );
}
finally
{
pool.returnResource( jedis );
}
if ( existed == null || existed != 1 )
{
return false;
}
return true;
}
public static long getTotalKarma() {
Jedis jedis = pool.getResource();
long totalKarma;
try
{
Set<String> members = jedis.zrange(keyOfKarmaSortedSet, 0, -1);
totalKarma = sum(members, jedis);
}
finally
{
pool.returnResource( jedis );
}
return totalKarma;
}
private static long sum(Set<String> members, Jedis jedis) {
long sum = 0;
for (String member : members) {
sum += jedis.zscore(keyOfKarmaSortedSet, member);
}
return sum;
}
}
| |
/************************************************************************
* This is free software - without ANY guarantee!
*
*
* Copyright 2013, Dr. Gernot Starke, arc42.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*********************************************************************** */
package org.arc42.pdfutil;
import com.itextpdf.text.*;
import com.itextpdf.text.pdf.*;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
public class PdfConcatenizer {
private java.util.List<String> sourceFiles;
private String targetFileName;
/*********
evenify: add blank pages to files with odd pagecount,
so that chapters always start on odd pages
********/
private Boolean evenify;
/*********
Stuff needed to add blank pages (e.g. evenify)
********/
private String textToPrintOnBlankPage;
private PdfReader blankReader;
private static final Logger LOGGER = Logger.getLogger(PdfUtilRunner.class.getName());
/**
* constructor with PdfProcessingOptions instance
* @param processingOptions
*/
public PdfConcatenizer( PdfProcessingOptions processingOptions ) {
super();
sourceFiles = processingOptions.getSourceFiles();
targetFileName = processingOptions.getTargetFile();
evenify = processingOptions.isEvenify();
textToPrintOnBlankPage = processingOptions.getBlankPageText();
this.initBlankPagePdf();
}
public PdfConcatenizer(List<String> pSourceFiles,
String pTargetFileName,
Boolean pEvenify,
String pTextToPrintOnBlankPages) {
sourceFiles = pSourceFiles;
evenify = pEvenify;
targetFileName = pTargetFileName;
textToPrintOnBlankPage = pTextToPrintOnBlankPages;
this.initBlankPagePdf();
}
/**
* creates a Pdf file with just a single page
* which will eventually be inserted when we need an EMPTY (blank) page!
*/
public final void initBlankPagePdf() {
Document blankDocument = new Document();
int fontsize = 40;
try {
PdfWriter.getInstance(blankDocument, new FileOutputStream("./blankpage_tmp.pdf"));
blankDocument.open();
Paragraph paragraph = new Paragraph();
addBlankLines(paragraph, 5);
Font font = new Font(Font.FontFamily.HELVETICA,
fontsize,
Font.BOLDITALIC,
BaseColor.LIGHT_GRAY);
// write the blankPageText in big letters
paragraph.setAlignment(Element.ALIGN_CENTER);
if (textToPrintOnBlankPage == null) {
textToPrintOnBlankPage = "intentionally left blank" ;
}
paragraph.add(
new Pa new Paragraph(textToPrintOnBlankPage,
font));ragraph(textToPrintOnBlankPage,
font));
blankDocument.add(paragraph);
blankDocument.close();
blankReader = new PdfReader("./blankpage_tmp.pdf");
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Exception: empty page pdf (needed to evenify) could not be generated: ", e);
}
}
/**
* concats all files given in sourceFile
*/
public void concatFiles() {
Document document = new Document();
try {
PdfCopy copy = new PdfCopy(document, new FileOutputStream(targetFileName));
document.open();
PdfReader reader;
int nrOfPagesInCurrentFile;
for (int i = 0; i < sourceFiles.size(); i++) {
reader = new PdfReader(sourceFiles.get(i));
nrOfPagesInCurrentFile = reader.getNumberOfPages();
for (int page = 0; page < nrOfPagesInCurrentFile; ) {
copy.addPage(copy.getImportedPage(reader, ++page));
}
if (evenify && (nrOfPagesInCurrentFile % 2 == 1)) {
addBlankPage(copy);
}
}
document.close();
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Exception: wrong file you gave me, Yoda muttered...", e);
} catch (BadPdfFormatException e) {
LOGGER.log(Level.SEVERE, "Exception: Encountered bad pdf format", e);
} catch (DocumentException e) {
LOGGER.log(Level.SEVERE, "Exception: Something bad happened to the output document.", e);
}
}
/**
* addBlankPage adds an empty page (e.g. "deliberately left blank") to current PdfCopy instance
* (usually at the end of odd-paged files to achieve "evenification" (even number of
* pages in every processed file)
*
* @param copy where the blank page is added to
*/
private void addBlankPage(PdfCopy copy) throws DocumentException, IOException {
copy.addPage(copy.getImportedPage(blankReader, 1));
// the following lines would add an "EMPTY" page, with NO text on it
// copy.newPage();
// copy.addPage(PageSize.A4, 0);
}
// thanx to Lars Vogel for the idea to the following method
private static void addBlankLines(Paragraph paragraph, int nrOfLines) {
for (int i = 0; i < nrOfLines; i++) {
paragraph.add(new Paragraph(" "));
}
}
/*
boring getter and setter stuff
*/
public Boolean getEvenify() {
return evenify;
}
public void setEvenify(Boolean evenify) {
this.evenify = evenify;
}
public List<String> getSourceFiles() {
return sourceFiles;
}
public void setSourceFiles( List<String> sourceFiles) {
this.sourceFiles = sourceFiles;
}
public String getTargetFileName() {
return targetFileName;
}
public void setTargetFileName(String targetFileName) {
this.targetFileName = targetFileName;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.commit;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.assertj.core.api.Assertions;
import org.junit.AfterClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.util.DurationInfo;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
import static org.apache.hadoop.fs.s3a.S3AUtils.*;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
import static org.apache.hadoop.fs.s3a.commit.CommitConstants.*;
import static org.apache.hadoop.test.LambdaTestUtils.*;
/**
* Test the job/task commit actions of an S3A Committer, including trying to
* simulate some failure and retry conditions.
* Derived from
* {@code org.apache.hadoop.mapreduce.lib.output.TestFileOutputCommitter}.
*
* This is a complex test suite as it tries to explore the full lifecycle
* of committers, and is designed for subclassing.
*/
@SuppressWarnings({"unchecked", "ThrowableNotThrown", "unused"})
public abstract class AbstractITCommitProtocol extends AbstractCommitITest {
private Path outDir;
private static final Logger LOG =
LoggerFactory.getLogger(AbstractITCommitProtocol.class);
private static final String SUB_DIR = "SUB_DIR";
protected static final String PART_00000 = "part-m-00000";
/**
* Counter to guarantee that even in parallel test runs, no job has the same
* ID.
*/
private String jobId;
// A random task attempt id for testing.
private String attempt0;
private TaskAttemptID taskAttempt0;
private String attempt1;
private TaskAttemptID taskAttempt1;
private static final Text KEY_1 = new Text("key1");
private static final Text KEY_2 = new Text("key2");
private static final Text VAL_1 = new Text("val1");
private static final Text VAL_2 = new Text("val2");
/** A job to abort in test case teardown. */
private List<JobData> abortInTeardown = new ArrayList<>(1);
private final StandardCommitterFactory
standardCommitterFactory = new StandardCommitterFactory();
private void cleanupDestDir() throws IOException {
rmdir(this.outDir, getConfiguration());
}
/**
* This must return the name of a suite which is unique to the non-abstract
* test.
* @return a string which must be unique and a valid path.
*/
protected abstract String suitename();
/**
* Get the log; can be overridden for test case log.
* @return a log.
*/
public Logger log() {
return LOG;
}
/**
* Overridden method returns the suitename as well as the method name,
* so if more than one committer test is run in parallel, paths are
* isolated.
* @return a name for a method, unique across the suites and test cases.
*/
@Override
protected String getMethodName() {
return suitename() + "-" + super.getMethodName();
}
@Override
public void setup() throws Exception {
super.setup();
jobId = randomJobId();
attempt0 = "attempt_" + jobId + "_m_000000_0";
taskAttempt0 = TaskAttemptID.forName(attempt0);
attempt1 = "attempt_" + jobId + "_m_000001_0";
taskAttempt1 = TaskAttemptID.forName(attempt1);
outDir = path(getMethodName());
abortMultipartUploadsUnderPath(outDir);
cleanupDestDir();
}
@Override
public void teardown() throws Exception {
describe("teardown");
abortInTeardown.forEach(this::abortJobQuietly);
if (outDir != null) {
try {
abortMultipartUploadsUnderPath(outDir);
cleanupDestDir();
} catch (IOException e) {
log().info("Exception during cleanup", e);
}
}
S3AFileSystem fileSystem = getFileSystem();
if (fileSystem != null) {
log().info("Statistics for {}:\n{}", fileSystem.getUri(),
fileSystem.getInstrumentation().dump(" ", " = ", "\n", true));
}
super.teardown();
}
/**
* This only looks for leakage of committer thread pools,
* and not any other leaked threads, such as those from S3A FS instances.
*/
@AfterClass
public static void checkForThreadLeakage() {
List<String> committerThreads = getCurrentThreadNames().stream()
.filter(n -> n.startsWith(AbstractS3ACommitter.THREAD_PREFIX))
.collect(Collectors.toList());
Assertions.assertThat(committerThreads)
.describedAs("Outstanding committer threads")
.isEmpty();
}
/**
* Add the specified job to the current list of jobs to abort in teardown.
* @param jobData job data.
*/
protected void abortInTeardown(JobData jobData) {
abortInTeardown.add(jobData);
}
@Override
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
disableFilesystemCaching(conf);
bindCommitter(conf);
return conf;
}
/***
* Bind to the committer from the methods of
* {@link #getCommitterFactoryName()} and {@link #getCommitterName()}.
* @param conf configuration to set up
*/
protected void bindCommitter(Configuration conf) {
super.bindCommitter(conf, getCommitterFactoryName(), getCommitterName());
}
/**
* Create a committer for a task.
* @param context task context
* @return new committer
* @throws IOException failure
*/
protected AbstractS3ACommitter createCommitter(
TaskAttemptContext context) throws IOException {
return createCommitter(getOutDir(), context);
}
/**
* Create a committer for a task and a given output path.
* @param outputPath path
* @param context task context
* @return new committer
* @throws IOException failure
*/
protected abstract AbstractS3ACommitter createCommitter(
Path outputPath,
TaskAttemptContext context) throws IOException;
protected String getCommitterFactoryName() {
return CommitConstants.S3A_COMMITTER_FACTORY;
}
protected abstract String getCommitterName();
protected Path getOutDir() {
return outDir;
}
protected String getJobId() {
return jobId;
}
protected String getAttempt0() {
return attempt0;
}
protected TaskAttemptID getTaskAttempt0() {
return taskAttempt0;
}
protected String getAttempt1() {
return attempt1;
}
protected TaskAttemptID getTaskAttempt1() {
return taskAttempt1;
}
/**
* Functional interface for creating committers, designed to allow
* different factories to be used to create different failure modes.
*/
@FunctionalInterface
public interface CommitterFactory {
/**
* Create a committer for a task.
* @param context task context
* @return new committer
* @throws IOException failure
*/
AbstractS3ACommitter createCommitter(
TaskAttemptContext context) throws IOException;
}
/**
* The normal committer creation factory, uses the abstract methods
* in the class.
*/
public class StandardCommitterFactory implements CommitterFactory {
@Override
public AbstractS3ACommitter createCommitter(TaskAttemptContext context)
throws IOException {
return AbstractITCommitProtocol.this.createCommitter(context);
}
}
/**
* Write some text out.
* @param context task
* @throws IOException IO failure
* @throws InterruptedException write interrupted
*/
protected void writeTextOutput(TaskAttemptContext context)
throws IOException, InterruptedException {
describe("write output");
try (DurationInfo d = new DurationInfo(LOG,
"Writing Text output for task %s", context.getTaskAttemptID())) {
writeOutput(new LoggingTextOutputFormat().getRecordWriter(context),
context);
}
}
/**
* Write the standard output.
* @param writer record writer
* @param context task context
* @throws IOException IO failure
* @throws InterruptedException write interrupted
*/
private void writeOutput(RecordWriter writer,
TaskAttemptContext context) throws IOException, InterruptedException {
NullWritable nullWritable = NullWritable.get();
try(CloseWriter cw = new CloseWriter(writer, context)) {
writer.write(KEY_1, VAL_1);
writer.write(null, nullWritable);
writer.write(null, VAL_1);
writer.write(nullWritable, VAL_2);
writer.write(KEY_2, nullWritable);
writer.write(KEY_1, null);
writer.write(null, null);
writer.write(KEY_2, VAL_2);
writer.close(context);
}
}
/**
* Write the output of a map.
* @param writer record writer
* @param context task context
* @throws IOException IO failure
* @throws InterruptedException write interrupted
*/
private void writeMapFileOutput(RecordWriter writer,
TaskAttemptContext context) throws IOException, InterruptedException {
describe("\nWrite map output");
try (DurationInfo d = new DurationInfo(LOG,
"Writing Text output for task %s", context.getTaskAttemptID());
CloseWriter cw = new CloseWriter(writer, context)) {
for (int i = 0; i < 10; ++i) {
Text val = ((i & 1) == 1) ? VAL_1 : VAL_2;
writer.write(new LongWritable(i), val);
}
writer.close(context);
}
}
/**
* Details on a job for use in {@code startJob} and elsewhere.
*/
public static class JobData {
private final Job job;
private final JobContext jContext;
private final TaskAttemptContext tContext;
private final AbstractS3ACommitter committer;
private final Configuration conf;
public JobData(Job job,
JobContext jContext,
TaskAttemptContext tContext,
AbstractS3ACommitter committer) {
this.job = job;
this.jContext = jContext;
this.tContext = tContext;
this.committer = committer;
conf = job.getConfiguration();
}
}
/**
* Create a new job. Sets the task attempt ID,
* and output dir; asks for a success marker.
* @return the new job
* @throws IOException failure
*/
public Job newJob() throws IOException {
return newJob(outDir, getConfiguration(), attempt0);
}
/**
* Create a new job. Sets the task attempt ID,
* and output dir; asks for a success marker.
* @param dir dest dir
* @param configuration config to get the job from
* @param taskAttemptId task attempt
* @return the new job
* @throws IOException failure
*/
private Job newJob(Path dir, Configuration configuration,
String taskAttemptId) throws IOException {
Job job = Job.getInstance(configuration);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskAttemptId);
conf.setBoolean(CREATE_SUCCESSFUL_JOB_OUTPUT_DIR_MARKER, true);
FileOutputFormat.setOutputPath(job, dir);
return job;
}
/**
* Start a job with a committer; optionally write the test data.
* Always register the job to be aborted (quietly) in teardown.
* This is, from an "OO-purity perspective" the wrong kind of method to
* do: it's setting things up, mixing functionality, registering for teardown.
* Its aim is simple though: a common body of code for starting work
* in test cases.
* @param writeText should the text be written?
* @return the job data 4-tuple
* @throws IOException IO problems
* @throws InterruptedException interruption during write
*/
protected JobData startJob(boolean writeText)
throws IOException, InterruptedException {
return startJob(standardCommitterFactory, writeText);
}
/**
* Start a job with a committer; optionally write the test data.
* Always register the job to be aborted (quietly) in teardown.
* This is, from an "OO-purity perspective" the wrong kind of method to
* do: it's setting things up, mixing functionality, registering for teardown.
* Its aim is simple though: a common body of code for starting work
* in test cases.
* @param factory the committer factory to use
* @param writeText should the text be written?
* @return the job data 4-tuple
* @throws IOException IO problems
* @throws InterruptedException interruption during write
*/
protected JobData startJob(CommitterFactory factory, boolean writeText)
throws IOException, InterruptedException {
Job job = newJob();
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
JobContext jContext = new JobContextImpl(conf, taskAttempt0.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf,
taskAttempt0);
AbstractS3ACommitter committer = factory.createCommitter(tContext);
// setup
JobData jobData = new JobData(job, jContext, tContext, committer);
setup(jobData);
abortInTeardown(jobData);
if (writeText) {
// write output
writeTextOutput(tContext);
}
return jobData;
}
/**
* Set up the job and task.
* @param jobData job data
* @throws IOException problems
*/
protected void setup(JobData jobData) throws IOException {
AbstractS3ACommitter committer = jobData.committer;
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
describe("\nsetup job");
try (DurationInfo d = new DurationInfo(LOG,
"setup job %s", jContext.getJobID())) {
committer.setupJob(jContext);
}
try (DurationInfo d = new DurationInfo(LOG,
"setup task %s", tContext.getTaskAttemptID())) {
committer.setupTask(tContext);
}
describe("setup complete\n");
}
/**
* Abort a job quietly.
* @param jobData job info
*/
protected void abortJobQuietly(JobData jobData) {
abortJobQuietly(jobData.committer, jobData.jContext, jobData.tContext);
}
/**
* Abort a job quietly: first task, then job.
* @param committer committer
* @param jContext job context
* @param tContext task context
*/
protected void abortJobQuietly(AbstractS3ACommitter committer,
JobContext jContext,
TaskAttemptContext tContext) {
describe("\naborting task");
try {
committer.abortTask(tContext);
} catch (IOException e) {
log().warn("Exception aborting task:", e);
}
describe("\naborting job");
try {
committer.abortJob(jContext, JobStatus.State.KILLED);
} catch (IOException e) {
log().warn("Exception aborting job", e);
}
}
/**
* Commit up the task and then the job.
* @param committer committer
* @param jContext job context
* @param tContext task context
* @throws IOException problems
*/
protected void commit(AbstractS3ACommitter committer,
JobContext jContext,
TaskAttemptContext tContext) throws IOException {
try (DurationInfo d = new DurationInfo(LOG,
"committing work", jContext.getJobID())) {
describe("\ncommitting task");
committer.commitTask(tContext);
describe("\ncommitting job");
committer.commitJob(jContext);
describe("commit complete\n");
verifyCommitterHasNoThreads(committer);
}
}
/**
* Execute work as part of a test, after creating the job.
* After the execution, {@link #abortJobQuietly(JobData)} is
* called for abort/cleanup.
* @param name name of work (for logging)
* @param action action to execute
* @throws Exception failure
*/
protected void executeWork(String name, ActionToTest action)
throws Exception {
executeWork(name, startJob(false), action);
}
/**
* Execute work as part of a test, against the created job.
* After the execution, {@link #abortJobQuietly(JobData)} is
* called for abort/cleanup.
* @param name name of work (for logging)
* @param jobData job info
* @param action action to execute
* @throws Exception failure
*/
public void executeWork(String name,
JobData jobData,
ActionToTest action) throws Exception {
try (DurationInfo d = new DurationInfo(LOG, "Executing %s", name)) {
action.exec(jobData.job,
jobData.jContext,
jobData.tContext,
jobData.committer);
} finally {
abortJobQuietly(jobData);
}
}
/**
* Verify that recovery doesn't work for these committers.
*/
@Test
@SuppressWarnings("deprecation")
public void testRecoveryAndCleanup() throws Exception {
describe("Test (unsupported) task recovery.");
JobData jobData = startJob(true);
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
assertNotNull("null workPath in committer " + committer,
committer.getWorkPath());
assertNotNull("null outputPath in committer " + committer,
committer.getOutputPath());
// Commit the task. This will promote data and metadata to where
// job commits will pick it up on commit or abort.
commitTask(committer, tContext);
assertTaskAttemptPathDoesNotExist(committer, tContext);
Configuration conf2 = jobData.job.getConfiguration();
conf2.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf2.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 2);
JobContext jContext2 = new JobContextImpl(conf2, taskAttempt0.getJobID());
TaskAttemptContext tContext2 = new TaskAttemptContextImpl(conf2,
taskAttempt0);
AbstractS3ACommitter committer2 = createCommitter(tContext2);
committer2.setupJob(tContext2);
assertFalse("recoverySupported in " + committer2,
committer2.isRecoverySupported());
intercept(PathCommitException.class, "recover",
() -> committer2.recoverTask(tContext2));
// at this point, task attempt 0 has failed to recover
// it should be abortable though. This will be a no-op as it already
// committed
describe("aborting task attempt 2; expect nothing to clean up");
committer2.abortTask(tContext2);
describe("Aborting job 2; expect pending commits to be aborted");
committer2.abortJob(jContext2, JobStatus.State.KILLED);
// now, state of system may still have pending data
assertNoMultipartUploadsPending(outDir);
verifyCommitterHasNoThreads(committer2);
}
protected void assertTaskAttemptPathDoesNotExist(
AbstractS3ACommitter committer, TaskAttemptContext context)
throws IOException {
Path attemptPath = committer.getTaskAttemptPath(context);
ContractTestUtils.assertPathDoesNotExist(
attemptPath.getFileSystem(context.getConfiguration()),
"task attempt dir",
attemptPath);
}
protected void assertJobAttemptPathDoesNotExist(
AbstractS3ACommitter committer, JobContext context)
throws IOException {
Path attemptPath = committer.getJobAttemptPath(context);
ContractTestUtils.assertPathDoesNotExist(
attemptPath.getFileSystem(context.getConfiguration()),
"job attempt dir",
attemptPath);
}
/**
* Verify the output of the directory.
* That includes the {@code part-m-00000-*}
* file existence and contents, as well as optionally, the success marker.
* @param dir directory to scan.
* @param expectSuccessMarker check the success marker?
* @throws Exception failure.
*/
private void validateContent(Path dir, boolean expectSuccessMarker)
throws Exception {
if (expectSuccessMarker) {
verifySuccessMarker(dir);
}
Path expectedFile = getPart0000(dir);
log().debug("Validating content in {}", expectedFile);
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(KEY_1).append('\t').append(VAL_1).append("\n");
expectedOutput.append(VAL_1).append("\n");
expectedOutput.append(VAL_2).append("\n");
expectedOutput.append(KEY_2).append("\n");
expectedOutput.append(KEY_1).append("\n");
expectedOutput.append(KEY_2).append('\t').append(VAL_2).append("\n");
String output = readFile(expectedFile);
assertEquals("Content of " + expectedFile,
expectedOutput.toString(), output);
}
/**
* Identify any path under the directory which begins with the
* {@code "part-m-00000"} sequence. There's some compensation for
* eventual consistency here.
* @param dir directory to scan
* @return the full path
* @throws FileNotFoundException the path is missing.
* @throws Exception failure.
*/
protected Path getPart0000(final Path dir) throws Exception {
final FileSystem fs = dir.getFileSystem(getConfiguration());
return eventually(CONSISTENCY_WAIT, CONSISTENCY_PROBE_INTERVAL,
() -> getPart0000Immediately(fs, dir));
}
/**
* Identify any path under the directory which begins with the
* {@code "part-m-00000"} sequence. There's some compensation for
* eventual consistency here.
* @param fs FS to probe
* @param dir directory to scan
* @return the full path
* @throws FileNotFoundException the path is missing.
* @throws IOException failure.
*/
private Path getPart0000Immediately(FileSystem fs, Path dir)
throws IOException {
FileStatus[] statuses = fs.listStatus(dir,
path -> path.getName().startsWith(PART_00000));
if (statuses.length != 1) {
// fail, with a listing of the parent dir
ContractTestUtils.assertPathExists(fs, "Output file",
new Path(dir, PART_00000));
}
return statuses[0].getPath();
}
/**
* Look for the partFile subdir of the output dir.
* @param fs filesystem
* @param dir output dir
* @throws Exception failure.
*/
private void validateMapFileOutputContent(
FileSystem fs, Path dir) throws Exception {
// map output is a directory with index and data files
assertPathExists("Map output", dir);
Path expectedMapDir = getPart0000(dir);
assertPathExists("Map output", expectedMapDir);
assertIsDirectory(expectedMapDir);
FileStatus[] files = fs.listStatus(expectedMapDir);
assertTrue("No files found in " + expectedMapDir, files.length > 0);
assertPathExists("index file in " + expectedMapDir,
new Path(expectedMapDir, MapFile.INDEX_FILE_NAME));
assertPathExists("data file in " + expectedMapDir,
new Path(expectedMapDir, MapFile.DATA_FILE_NAME));
}
/**
* Dump all MPUs in the filesystem.
* @throws IOException IO failure
*/
protected void dumpMultipartUploads() throws IOException {
countMultipartUploads("");
}
/**
* Full test of the expected lifecycle: start job, task, write, commit task,
* commit job.
* @throws Exception on a failure
*/
@Test
public void testCommitLifecycle() throws Exception {
describe("Full test of the expected lifecycle:\n" +
" start job, task, write, commit task, commit job.\n" +
"Verify:\n" +
"* no files are visible after task commit\n" +
"* the expected file is visible after job commit\n" +
"* no outstanding MPUs after job commit");
JobData jobData = startJob(false);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
validateTaskAttemptWorkingDirectory(committer, tContext);
// write output
describe("1. Writing output");
writeTextOutput(tContext);
dumpMultipartUploads();
describe("2. Committing task");
assertTrue("No files to commit were found by " + committer,
committer.needsTaskCommit(tContext));
commitTask(committer, tContext);
// this is only task commit; there MUST be no part- files in the dest dir
waitForConsistency();
try {
applyLocatedFiles(getFileSystem().listFiles(outDir, false),
(status) ->
assertFalse("task committed file to dest :" + status,
status.getPath().toString().contains("part")));
} catch (FileNotFoundException ignored) {
log().info("Outdir {} is not created by task commit phase ",
outDir);
}
describe("3. Committing job");
assertMultipartUploadsPending(outDir);
commitJob(committer, jContext);
// validate output
describe("4. Validating content");
validateContent(outDir, shouldExpectSuccessMarker());
assertNoMultipartUploadsPending(outDir);
}
@Test
public void testCommitterWithDuplicatedCommit() throws Exception {
describe("Call a task then job commit twice;" +
"expect the second task commit to fail.");
JobData jobData = startJob(true);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
// do commit
commit(committer, jContext, tContext);
// validate output
validateContent(outDir, shouldExpectSuccessMarker());
assertNoMultipartUploadsPending(outDir);
// commit task to fail on retry
expectFNFEonTaskCommit(committer, tContext);
}
protected boolean shouldExpectSuccessMarker() {
return true;
}
/**
* Simulate a failure on the first job commit; expect the
* second to succeed.
*/
@Test
public void testCommitterWithFailure() throws Exception {
describe("Fail the first job commit then retry");
JobData jobData = startJob(new FailingCommitterFactory(), true);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
// do commit
committer.commitTask(tContext);
// now fail job
expectSimulatedFailureOnJobCommit(jContext, committer);
commitJob(committer, jContext);
// but the data got there, due to the order of operations.
validateContent(outDir, shouldExpectSuccessMarker());
expectJobCommitToFail(jContext, committer);
}
/**
* Override point: the failure expected on the attempt to commit a failed
* job.
* @param jContext job context
* @param committer committer
* @throws Exception any unexpected failure.
*/
protected void expectJobCommitToFail(JobContext jContext,
AbstractS3ACommitter committer) throws Exception {
// next attempt will fail as there is no longer a directory to commit
expectJobCommitFailure(jContext, committer,
FileNotFoundException.class);
}
/**
* Expect a job commit operation to fail with a specific exception.
* @param jContext job context
* @param committer committer
* @param clazz class of exception
* @return the caught exception
* @throws Exception any unexpected failure.
*/
protected static <E extends IOException> E expectJobCommitFailure(
JobContext jContext,
AbstractS3ACommitter committer,
Class<E> clazz)
throws Exception {
return intercept(clazz,
() -> {
committer.commitJob(jContext);
return committer.toString();
});
}
protected static void expectFNFEonTaskCommit(
AbstractS3ACommitter committer,
TaskAttemptContext tContext) throws Exception {
intercept(FileNotFoundException.class,
() -> {
committer.commitTask(tContext);
return committer.toString();
});
}
/**
* Simulate a failure on the first job commit; expect the
* second to succeed.
*/
@Test
public void testCommitterWithNoOutputs() throws Exception {
describe("Have a task and job with no outputs: expect success");
JobData jobData = startJob(new FailingCommitterFactory(), false);
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
// do commit
committer.commitTask(tContext);
assertTaskAttemptPathDoesNotExist(committer, tContext);
}
protected static void expectSimulatedFailureOnJobCommit(JobContext jContext,
AbstractS3ACommitter committer) throws Exception {
((CommitterFaultInjection) committer).setFaults(
CommitterFaultInjection.Faults.commitJob);
expectJobCommitFailure(jContext, committer,
CommitterFaultInjectionImpl.Failure.class);
}
@Test
public void testMapFileOutputCommitter() throws Exception {
describe("Test that the committer generates map output into a directory\n" +
"starting with the prefix part-");
JobData jobData = startJob(false);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
Configuration conf = jobData.conf;
// write output
writeMapFileOutput(new MapFileOutputFormat().getRecordWriter(tContext),
tContext);
// do commit
commit(committer, jContext, tContext);
S3AFileSystem fs = getFileSystem();
waitForConsistency();
lsR(fs, outDir, true);
String ls = ls(outDir);
describe("\nvalidating");
// validate output
verifySuccessMarker(outDir);
describe("validate output of %s", outDir);
validateMapFileOutputContent(fs, outDir);
// Ensure getReaders call works and also ignores
// hidden filenames (_ or . prefixes)
describe("listing");
FileStatus[] filtered = fs.listStatus(outDir, HIDDEN_FILE_FILTER);
assertEquals("listed children under " + ls,
1, filtered.length);
FileStatus fileStatus = filtered[0];
assertTrue("Not the part file: " + fileStatus,
fileStatus.getPath().getName().startsWith(PART_00000));
describe("getReaders()");
assertEquals("Number of MapFile.Reader entries with shared FS "
+ outDir + " : " + ls,
1, getReaders(fs, outDir, conf).length);
describe("getReaders(new FS)");
FileSystem fs2 = FileSystem.get(outDir.toUri(), conf);
assertEquals("Number of MapFile.Reader entries with shared FS2 "
+ outDir + " : " + ls,
1, getReaders(fs2, outDir, conf).length);
describe("MapFileOutputFormat.getReaders");
assertEquals("Number of MapFile.Reader entries with new FS in "
+ outDir + " : " + ls,
1, MapFileOutputFormat.getReaders(outDir, conf).length);
}
/** Open the output generated by this format. */
@SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
private static MapFile.Reader[] getReaders(FileSystem fs,
Path dir,
Configuration conf) throws IOException {
Path[] names = FileUtil.stat2Paths(fs.listStatus(dir, HIDDEN_FILE_FILTER));
// sort names, so that hash partitioning works
Arrays.sort(names);
MapFile.Reader[] parts = new MapFile.Reader[names.length];
for (int i = 0; i < names.length; i++) {
parts[i] = new MapFile.Reader(names[i], conf);
}
return parts;
}
/**
* A functional interface which an action to test must implement.
*/
@FunctionalInterface
public interface ActionToTest {
void exec(Job job, JobContext jContext, TaskAttemptContext tContext,
AbstractS3ACommitter committer) throws Exception;
}
@Test
public void testAbortTaskNoWorkDone() throws Exception {
executeWork("abort task no work",
(job, jContext, tContext, committer) ->
committer.abortTask(tContext));
}
@Test
public void testAbortJobNoWorkDone() throws Exception {
executeWork("abort task no work",
(job, jContext, tContext, committer) ->
committer.abortJob(jContext, JobStatus.State.RUNNING));
}
@Test
public void testCommitJobButNotTask() throws Exception {
executeWork("commit a job while a task's work is pending, " +
"expect task writes to be cancelled.",
(job, jContext, tContext, committer) -> {
// step 1: write the text
writeTextOutput(tContext);
// step 2: commit the job
createCommitter(tContext).commitJob(tContext);
// verify that no output can be observed
assertPart0000DoesNotExist(outDir);
// that includes, no pending MPUs; commitJob is expected to
// cancel any.
assertNoMultipartUploadsPending(outDir);
}
);
}
@Test
public void testAbortTaskThenJob() throws Exception {
JobData jobData = startJob(true);
AbstractS3ACommitter committer = jobData.committer;
// do abort
committer.abortTask(jobData.tContext);
intercept(FileNotFoundException.class, "",
() -> getPart0000(committer.getWorkPath()));
committer.abortJob(jobData.jContext, JobStatus.State.FAILED);
assertJobAbortCleanedUp(jobData);
verifyCommitterHasNoThreads(committer);
}
/**
* Extension point: assert that the job was all cleaned up after an abort.
* Base assertions
* <ul>
* <li>Output dir is absent or, if present, empty</li>
* <li>No pending MPUs to/under the output dir</li>
* </ul>
* @param jobData job data
* @throws Exception failure
*/
public void assertJobAbortCleanedUp(JobData jobData) throws Exception {
// special handling of magic directory; harmless in staging
S3AFileSystem fs = getFileSystem();
try {
FileStatus[] children = listChildren(fs, outDir);
if (children.length != 0) {
lsR(fs, outDir, true);
}
assertArrayEquals("Output directory not empty " + ls(outDir),
new FileStatus[0], children);
} catch (FileNotFoundException e) {
// this is a valid failure mode; it means the dest dir doesn't exist yet.
}
assertNoMultipartUploadsPending(outDir);
}
@Test
public void testFailAbort() throws Exception {
describe("Abort the task, then job (failed), abort the job again");
JobData jobData = startJob(true);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
AbstractS3ACommitter committer = jobData.committer;
// do abort
committer.abortTask(tContext);
committer.getJobAttemptPath(jContext);
committer.getTaskAttemptPath(tContext);
assertPart0000DoesNotExist(outDir);
assertSuccessMarkerDoesNotExist(outDir);
describe("Aborting job into %s", outDir);
committer.abortJob(jContext, JobStatus.State.FAILED);
assertTaskAttemptPathDoesNotExist(committer, tContext);
assertJobAttemptPathDoesNotExist(committer, jContext);
// try again; expect abort to be idempotent.
committer.abortJob(jContext, JobStatus.State.FAILED);
assertNoMultipartUploadsPending(outDir);
verifyCommitterHasNoThreads(committer);
}
public void assertPart0000DoesNotExist(Path dir) throws Exception {
intercept(FileNotFoundException.class,
() -> getPart0000(dir));
assertPathDoesNotExist("expected output file", new Path(dir, PART_00000));
}
@Test
public void testAbortJobNotTask() throws Exception {
executeWork("abort task no work",
(job, jContext, tContext, committer) -> {
// write output
writeTextOutput(tContext);
committer.abortJob(jContext, JobStatus.State.RUNNING);
assertTaskAttemptPathDoesNotExist(
committer, tContext);
assertJobAttemptPathDoesNotExist(
committer, jContext);
assertNoMultipartUploadsPending(outDir);
});
}
/**
* This looks at what happens with concurrent commits.
* However, the failure condition it looks for (subdir under subdir)
* is the kind of failure you see on a rename-based commit.
*
* What it will not detect is the fact that both tasks will each commit
* to the destination directory. That is: whichever commits last wins.
*
* There's no way to stop this. Instead it is a requirement that the task
* commit operation is only executed when the committer is happy to
* commit only those tasks which it knows have succeeded, and abort those
* which have not.
* @throws Exception failure
*/
@Test
public void testConcurrentCommitTaskWithSubDir() throws Exception {
Job job = newJob();
FileOutputFormat.setOutputPath(job, outDir);
final Configuration conf = job.getConfiguration();
final JobContext jContext =
new JobContextImpl(conf, taskAttempt0.getJobID());
AbstractS3ACommitter amCommitter = createCommitter(
new TaskAttemptContextImpl(conf, taskAttempt0));
amCommitter.setupJob(jContext);
final TaskAttemptContext[] taCtx = new TaskAttemptContextImpl[2];
taCtx[0] = new TaskAttemptContextImpl(conf, taskAttempt0);
taCtx[1] = new TaskAttemptContextImpl(conf, taskAttempt1);
final TextOutputFormat[] tof = new LoggingTextOutputFormat[2];
for (int i = 0; i < tof.length; i++) {
tof[i] = new LoggingTextOutputFormat() {
@Override
public Path getDefaultWorkFile(
TaskAttemptContext context,
String extension) throws IOException {
final AbstractS3ACommitter foc = (AbstractS3ACommitter)
getOutputCommitter(context);
return new Path(new Path(foc.getWorkPath(), SUB_DIR),
getUniqueFile(context, getOutputName(context), extension));
}
};
}
final ExecutorService executor = HadoopExecutors.newFixedThreadPool(2);
try {
for (int i = 0; i < taCtx.length; i++) {
final int taskIdx = i;
executor.submit(() -> {
final OutputCommitter outputCommitter =
tof[taskIdx].getOutputCommitter(taCtx[taskIdx]);
outputCommitter.setupTask(taCtx[taskIdx]);
final RecordWriter rw =
tof[taskIdx].getRecordWriter(taCtx[taskIdx]);
writeOutput(rw, taCtx[taskIdx]);
describe("Committing Task %d", taskIdx);
outputCommitter.commitTask(taCtx[taskIdx]);
return null;
});
}
} finally {
executor.shutdown();
while (!executor.awaitTermination(1, TimeUnit.SECONDS)) {
log().info("Awaiting thread termination!");
}
}
// if we commit here then all tasks will be committed, so there will
// be contention for that final directory: both parts will go in.
describe("\nCommitting Job");
amCommitter.commitJob(jContext);
assertPathExists("base output directory", outDir);
assertPart0000DoesNotExist(outDir);
Path outSubDir = new Path(outDir, SUB_DIR);
assertPathDoesNotExist("Must not end up with sub_dir/sub_dir",
new Path(outSubDir, SUB_DIR));
// validate output
// There's no success marker in the subdirectory
validateContent(outSubDir, false);
}
/**
* Create a committer which fails; the class
* {@link CommitterFaultInjectionImpl} implements the logic.
* @param tContext task context
* @return committer instance
* @throws IOException failure to instantiate
*/
protected abstract AbstractS3ACommitter createFailingCommitter(
TaskAttemptContext tContext) throws IOException;
/**
* Factory for failing committers.
*/
public class FailingCommitterFactory implements CommitterFactory {
@Override
public AbstractS3ACommitter createCommitter(TaskAttemptContext context)
throws IOException {
return createFailingCommitter(context);
}
}
@Test
public void testOutputFormatIntegration() throws Throwable {
Configuration conf = getConfiguration();
Job job = newJob();
job.setOutputFormatClass(LoggingTextOutputFormat.class);
conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
JobContext jContext = new JobContextImpl(conf, taskAttempt0.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf,
taskAttempt0);
LoggingTextOutputFormat outputFormat = (LoggingTextOutputFormat)
ReflectionUtils.newInstance(tContext.getOutputFormatClass(), conf);
AbstractS3ACommitter committer = (AbstractS3ACommitter)
outputFormat.getOutputCommitter(tContext);
// setup
JobData jobData = new JobData(job, jContext, tContext, committer);
setup(jobData);
abortInTeardown(jobData);
LoggingTextOutputFormat.LoggingLineRecordWriter recordWriter
= outputFormat.getRecordWriter(tContext);
IntWritable iw = new IntWritable(1);
recordWriter.write(iw, iw);
Path dest = recordWriter.getDest();
validateTaskAttemptPathDuringWrite(dest);
recordWriter.close(tContext);
// at this point
validateTaskAttemptPathAfterWrite(dest);
assertTrue("Committer does not have data to commit " + committer,
committer.needsTaskCommit(tContext));
commitTask(committer, tContext);
commitJob(committer, jContext);
// validate output
verifySuccessMarker(outDir);
}
/**
* Create a committer through reflection then use it to abort
* a task. This mimics the action of an AM when a container fails and
* the AM wants to abort the task attempt.
*/
@Test
public void testAMWorkflow() throws Throwable {
describe("Create a committer with a null output path & use as an AM");
JobData jobData = startJob(true);
JobContext jContext = jobData.jContext;
TaskAttemptContext tContext = jobData.tContext;
TaskAttemptContext newAttempt = taskAttemptForJob(
MRBuilderUtils.newJobId(1, 1, 1), jContext);
Configuration conf = jContext.getConfiguration();
// bind
LoggingTextOutputFormat.bind(conf);
OutputFormat<?, ?> outputFormat
= ReflectionUtils.newInstance(newAttempt
.getOutputFormatClass(), conf);
Path outputPath = FileOutputFormat.getOutputPath(newAttempt);
assertNotNull("null output path in new task attempt", outputPath);
AbstractS3ACommitter committer2 = (AbstractS3ACommitter)
outputFormat.getOutputCommitter(newAttempt);
committer2.abortTask(tContext);
verifyCommitterHasNoThreads(committer2);
assertNoMultipartUploadsPending(getOutDir());
}
@Test
public void testParallelJobsToAdjacentPaths() throws Throwable {
describe("Run two jobs in parallel, assert they both complete");
JobData jobData = startJob(true);
Job job1 = jobData.job;
AbstractS3ACommitter committer1 = jobData.committer;
JobContext jContext1 = jobData.jContext;
TaskAttemptContext tContext1 = jobData.tContext;
// now build up a second job
String jobId2 = randomJobId();
String attempt20 = "attempt_" + jobId2 + "_m_000000_0";
TaskAttemptID taskAttempt20 = TaskAttemptID.forName(attempt20);
String attempt21 = "attempt_" + jobId2 + "_m_000001_0";
TaskAttemptID taskAttempt21 = TaskAttemptID.forName(attempt21);
Path job1Dest = outDir;
Path job2Dest = new Path(getOutDir().getParent(),
getMethodName() + "job2Dest");
// little safety check
assertNotEquals(job1Dest, job2Dest);
// create the second job
Job job2 = newJob(job2Dest, new JobConf(getConfiguration()), attempt20);
Configuration conf2 = job2.getConfiguration();
conf2.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
try {
JobContext jContext2 = new JobContextImpl(conf2,
taskAttempt20.getJobID());
TaskAttemptContext tContext2 =
new TaskAttemptContextImpl(conf2, taskAttempt20);
AbstractS3ACommitter committer2 = createCommitter(job2Dest, tContext2);
JobData jobData2 = new JobData(job2, jContext2, tContext2, committer2);
setup(jobData2);
abortInTeardown(jobData2);
// make sure the directories are different
assertEquals(job2Dest, committer2.getOutputPath());
// job2 setup, write some data there
writeTextOutput(tContext2);
// at this point, job1 and job2 both have uncommitted tasks
// commit tasks in order task 2, task 1.
commitTask(committer2, tContext2);
commitTask(committer1, tContext1);
assertMultipartUploadsPending(job1Dest);
assertMultipartUploadsPending(job2Dest);
// commit jobs in order job 1, job 2
commitJob(committer1, jContext1);
assertNoMultipartUploadsPending(job1Dest);
getPart0000(job1Dest);
assertMultipartUploadsPending(job2Dest);
commitJob(committer2, jContext2);
getPart0000(job2Dest);
assertNoMultipartUploadsPending(job2Dest);
} finally {
// uncommitted files to this path need to be deleted in tests which fail
abortMultipartUploadsUnderPath(job2Dest);
}
}
@Test
public void testS3ACommitterFactoryBinding() throws Throwable {
describe("Verify that the committer factory returns this "
+ "committer when configured to do so");
Job job = newJob();
FileOutputFormat.setOutputPath(job, outDir);
Configuration conf = job.getConfiguration();
conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt0);
conf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, 1);
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf,
taskAttempt0);
String name = getCommitterName();
S3ACommitterFactory factory = new S3ACommitterFactory();
assertEquals("Wrong committer from factory",
createCommitter(outDir, tContext).getClass(),
factory.createOutputCommitter(outDir, tContext).getClass());
}
/**
* Validate the path of a file being written to during the write
* itself.
* @param p path
* @throws IOException IO failure
*/
protected void validateTaskAttemptPathDuringWrite(Path p) throws IOException {
}
/**
* Validate the path of a file being written to after the write
* operation has completed.
* @param p path
* @throws IOException IO failure
*/
protected void validateTaskAttemptPathAfterWrite(Path p) throws IOException {
}
/**
* Perform any actions needed to validate the working directory of
* a committer.
* For example: filesystem, path attributes
* @param committer committer instance
* @param context task attempt context
* @throws IOException IO failure
*/
protected void validateTaskAttemptWorkingDirectory(
AbstractS3ACommitter committer,
TaskAttemptContext context) throws IOException {
}
/**
* Commit a task then validate the state of the committer afterwards.
* @param committer committer
* @param tContext task context
* @throws IOException IO failure
*/
protected void commitTask(final AbstractS3ACommitter committer,
final TaskAttemptContext tContext) throws IOException {
committer.commitTask(tContext);
verifyCommitterHasNoThreads(committer);
}
/**
* Commit a job then validate the state of the committer afterwards.
* @param committer committer
* @param jContext job context
* @throws IOException IO failure
*/
protected void commitJob(final AbstractS3ACommitter committer,
final JobContext jContext) throws IOException {
committer.commitJob(jContext);
verifyCommitterHasNoThreads(committer);
}
/**
* Verify that the committer does not have a thread pool.
* @param committer committer to validate.
*/
protected void verifyCommitterHasNoThreads(AbstractS3ACommitter committer) {
assertFalse("Committer has an active thread pool",
committer.hasThreadPool());
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.remote.services.rest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import javax.enterprise.context.RequestScoped;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.jbpm.process.audit.NodeInstanceLog;
import org.jbpm.process.audit.ProcessInstanceLog;
import org.jbpm.process.audit.VariableInstanceLog;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.remote.services.rest.exception.KieRemoteRestOperationException;
import org.kie.services.client.serialization.jaxb.impl.audit.JaxbHistoryLogList;
import org.kie.services.client.serialization.jaxb.impl.audit.JaxbProcessInstanceLog;
import org.kie.services.client.serialization.jaxb.rest.JaxbGenericResponse;
/**
* This resource is responsible for direct, simple access to the history information.
*
* For complex queries, see the {@link QueryResourceImpl}
*/
@Path("/history")
@RequestScoped
@SuppressWarnings("unchecked")
public class HistoryResourceImpl extends ResourceBase {
/* REST information */
@Context
private HttpHeaders headers;
// Rest methods --------------------------------------------------------------------------------------------------------------
@GET
@Path("/instances")
public Response getProcessInstanceLogs() {
String oper = getRelativePath();
Map<String, String []> params = getRequestParams();
String activeProcessesStr = getStringParam("activeProcesses", false, params, oper);
boolean activeProcesses = Boolean.parseBoolean(activeProcessesStr);
List<ProcessInstanceLog> procInstLogResults;
if( activeProcesses ) {
procInstLogResults = getAuditLogService().findActiveProcessInstances();
} else {
procInstLogResults = getAuditLogService().findProcessInstances();
}
sortProcessInstanceLogs(procInstLogResults);
List<Object> results = new ArrayList<Object>(procInstLogResults);
JaxbHistoryLogList resultList = paginateAndCreateResult(params, oper, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/instance/{procInstId: [0-9]+}")
public Response getProcessInstanceLog(@PathParam("procInstId") long procInstId ) {
ProcessInstanceLog procInstLog = getAuditLogService().findProcessInstance(procInstId);
JaxbProcessInstanceLog jaxbProcLog = new JaxbProcessInstanceLog(procInstLog);
return createCorrectVariant(jaxbProcLog, headers);
}
@GET
@Path("/instance/{procInstId: [0-9]+}/{type: [a-zA-Z]+}")
public Response getInstanceLogsByProcInstId( @PathParam("procInstId") long instId, @PathParam("type") String logType) {
Map<String, String []> params = getRequestParams();
String oper = getRelativePath();
List<? extends Object> varInstLogList;
if ("child".equalsIgnoreCase(logType)) {
Object result = getAuditLogService().findSubProcessInstances(instId);
varInstLogList = (List<Object>) result;
sortProcessInstanceLogs((List<ProcessInstanceLog>) varInstLogList);
} else if ("node".equalsIgnoreCase(logType)) {
Object result = getAuditLogService().findNodeInstances(instId);
varInstLogList = (List<Object>) result;
sortNodeInstanceLogs((List<NodeInstanceLog>) varInstLogList);
} else if ("variable".equalsIgnoreCase(logType)) {
Object result = getAuditLogService().findVariableInstances(instId);
varInstLogList = (List<Object>) result;
sortVariableInstanceLogs((List<VariableInstanceLog>) varInstLogList);
} else {
throw KieRemoteRestOperationException.badRequest("Unsupported operation: " + oper );
}
JaxbHistoryLogList resultList = paginateAndCreateResult(params, oper, (List<Object>) varInstLogList, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/instance/{procInstId: [0-9]+}/{type: [a-zA-Z]+}/{logId: [a-zA-Z0-9-:\\._]+}")
public Response getInstanceLogsByProcInstIdByLogId(@PathParam("procInstId") long procInstId, @PathParam("type") String operation, @PathParam("logId") String logId) {
Map<String, String []> params = getRequestParams();
String oper = getRelativePath();
List<? extends Object> varInstLogList;
if ("node".equalsIgnoreCase(operation)) {
Object result = getAuditLogService().findNodeInstances(procInstId, logId);
varInstLogList = (List<Object>) result;
sortNodeInstanceLogs((List<NodeInstanceLog>) varInstLogList);
} else if ("variable".equalsIgnoreCase(operation)) {
Object result = getAuditLogService().findVariableInstances(procInstId, logId);
varInstLogList = (List<Object>) result;
sortVariableInstanceLogs((List<VariableInstanceLog>) varInstLogList);
} else {
throw KieRemoteRestOperationException.badRequest("Unsupported operation: " + oper );
}
JaxbHistoryLogList resultList = paginateAndCreateResult(params, oper, (List<Object>) varInstLogList, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/process/{processDefId: [a-zA-Z0-9-:\\._]+}")
public Response getProcessInstanceLogsByProcessId(@PathParam("processDefId") String processId) {
Map<String, String []> params = getRequestParams();
Number statusParam = getNumberParam("status", false, params, getRelativePath(), false);
String oper = getRelativePath();
int[] pageInfo = getPageNumAndPageSize(params, oper);
Object result;
if (statusParam != null) {
if (statusParam.intValue() == ProcessInstance.STATE_ACTIVE) {
result = getAuditLogService().findActiveProcessInstances(processId);
} else {
result = getAuditLogService().findProcessInstances(processId);
}
} else {
result = getAuditLogService().findProcessInstances(processId);
}
List<ProcessInstanceLog> procInstLogList = (List<ProcessInstanceLog>) result;
List<ProcessInstanceLog> filteredProcLogList = procInstLogList;
if (statusParam != null && !statusParam.equals(ProcessInstance.STATE_ACTIVE)) {
filteredProcLogList = new ArrayList<ProcessInstanceLog>();
for (int i = 0;
i < procInstLogList.size() && filteredProcLogList.size() < getMaxNumResultsNeeded(pageInfo);
++i) {
ProcessInstanceLog procLog = procInstLogList.get(i);
if (procLog.getStatus().equals(statusParam.intValue())) {
filteredProcLogList.add(procLog);
}
}
}
sortProcessInstanceLogs(filteredProcLogList);
List<Object> results = new ArrayList<Object>(filteredProcLogList);
JaxbHistoryLogList resultList = paginateAndCreateResult(pageInfo, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/variable/{varId: [a-zA-Z0-9-:\\._]+}")
public Response getVariableInstanceLogsByVariableId(@PathParam("varId") String variableId) {
Map<String, String []> params = getRequestParams();
String oper = getRelativePath();
List<VariableInstanceLog> varInstLogList = internalGetVariableInstancesByVarAndValue(variableId, null, params, oper);
sortVariableInstanceLogs(varInstLogList);
List<Object> results = new ArrayList<Object>(varInstLogList);
JaxbHistoryLogList resultList = paginateAndCreateResult(params, oper, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/variable/{varId: [a-zA-Z0-9-:\\._]+}/value/{value: [a-zA-Z0-9-:\\._]+}")
public Response getVariableInstanceLogsByVariableIdByVariableValue(@PathParam("varId") String variableId, @PathParam("value") String value) {
Map<String, String []> params = getRequestParams();
String oper = getRelativePath();
List<VariableInstanceLog> varInstLogList = internalGetVariableInstancesByVarAndValue(variableId, value, params, oper);
sortVariableInstanceLogs(varInstLogList);
List<Object> results = new ArrayList<Object>(varInstLogList);
JaxbHistoryLogList resultList = paginateAndCreateResult(params, oper, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/variable/{varId: [a-zA-Z0-9-:\\._]+}/instances")
public Response getProcessInstanceLogsByVariableId(@PathParam("varId") String variableId) {
Map<String, String[]> params = getRequestParams();
String oper = getRelativePath();
// get variables
List<VariableInstanceLog> varLogList = internalGetVariableInstancesByVarAndValue(variableId, null, params, oper);
// get process instance logs
int [] pageInfo = getPageNumAndPageSize(params, oper);
int maxNumResults = getMaxNumResultsNeeded(pageInfo);
List<ProcessInstanceLog> procInstLogList = getProcessInstanceLogsByVariable(varLogList, maxNumResults);
// paginate
List<Object> results = new ArrayList<Object>(procInstLogList);
JaxbHistoryLogList resultList = paginateAndCreateResult(pageInfo, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@GET
@Path("/variable/{varId: [a-zA-Z0-9-:\\.]+}/value/{value: [a-zA-Z0-9-:\\._]+}/instances")
public Response getProcessInstanceLogsByVariableIdByVariableValue(@PathParam("varId") String variableId, @PathParam("value") String value) {
Map<String, String[]> params = getRequestParams();
String oper = getRelativePath();
// get variables
List<VariableInstanceLog> varLogList = internalGetVariableInstancesByVarAndValue(variableId, value, params, oper);
// get process instance logs
int [] pageInfo = getPageNumAndPageSize(params, oper);
int maxNumResults = getMaxNumResultsNeeded(pageInfo);
List<ProcessInstanceLog> procInstLogList = getProcessInstanceLogsByVariable(varLogList, maxNumResults);
List<Object> results = new ArrayList<Object>(procInstLogList);
JaxbHistoryLogList resultList = paginateAndCreateResult(pageInfo, results, new JaxbHistoryLogList());
return createCorrectVariant(resultList, headers);
}
@POST
@Path("/clear")
public Response clear() {
getAuditLogService().clear();
return createCorrectVariant(new JaxbGenericResponse(getRequestUri()), headers);
}
// Helper methods --------------------------------------------------------------------------------------------------------------
private List<VariableInstanceLog> internalGetVariableInstancesByVarAndValue(String varId, String value,
Map<String, String[]> params, String oper) {
// active processes parameter
String activeProcsParam = getStringParam("activeProcesses", false, params, oper);
boolean onlyActiveProcesses = false;
if( activeProcsParam != null ) {
onlyActiveProcesses = Boolean.parseBoolean(activeProcsParam);
}
Object result;
if( value == null ) {
result = getAuditLogService().findVariableInstancesByName(varId, onlyActiveProcesses);
} else {
result = getAuditLogService().findVariableInstancesByNameAndValue(varId, value, onlyActiveProcesses);
}
return (List<VariableInstanceLog>) result;
}
private List<ProcessInstanceLog> getProcessInstanceLogsByVariable(List<VariableInstanceLog> varLogList, int maxNumResults) {
int numVarLogs = varLogList.size();
int numProcInsts = 0;
List<ProcessInstanceLog> resultList = new ArrayList<ProcessInstanceLog>();
for( int i = 0; i < numVarLogs && numProcInsts < maxNumResults; ++i ) {
long procInstId = varLogList.get(i).getProcessInstanceId();
ProcessInstanceLog procInstlog = getAuditLogService().findProcessInstance(procInstId);
if( procInstlog != null ) {
resultList.add(procInstlog);
++numProcInsts;
}
}
return resultList;
}
private void sortProcessInstanceLogs(List<ProcessInstanceLog> procInstLogList) {
Collections.sort(procInstLogList, new Comparator<ProcessInstanceLog>() {
@Override
public int compare( ProcessInstanceLog o1, ProcessInstanceLog o2 ) {
if( ! o1.getExternalId().equals(o2.getExternalId()) ) {
return o1.getExternalId().compareTo(o2.getExternalId());
}
if( ! o1.getProcessId().equals(o2.getProcessId()) ) {
return o1.getProcessId().compareTo(o2.getProcessId());
}
return o1.getProcessInstanceId().compareTo(o2.getProcessInstanceId());
}
});
}
private void sortNodeInstanceLogs(List<NodeInstanceLog> procInstLogList) {
Collections.sort(procInstLogList, new Comparator<NodeInstanceLog>() {
@Override
public int compare( NodeInstanceLog o1, NodeInstanceLog o2 ) {
if( ! o1.getExternalId().equals(o2.getExternalId()) ) {
return o1.getExternalId().compareTo(o2.getExternalId());
}
if( ! o1.getProcessId().equals(o2.getProcessId()) ) {
return o1.getProcessId().compareTo(o2.getProcessId());
}
if( ! o1.getProcessInstanceId().equals(o2.getProcessInstanceId()) ) {
return o1.getProcessInstanceId().compareTo(o2.getProcessInstanceId());
}
if( ! o1.getNodeId().equals(o2.getNodeId()) ) {
return o1.getNodeId().compareTo(o2.getNodeId());
}
return o1.getNodeInstanceId().compareTo(o2.getNodeInstanceId());
}
});
}
private void sortVariableInstanceLogs(List<VariableInstanceLog> varInstLogList ) {
Collections.sort(varInstLogList, new Comparator<VariableInstanceLog>() {
@Override
public int compare( VariableInstanceLog o1, VariableInstanceLog o2 ) {
if( ! o1.getExternalId().equals(o2.getExternalId()) ) {
return o1.getExternalId().compareTo(o2.getExternalId());
}
if( ! o1.getProcessId().equals(o2.getProcessId()) ) {
return o1.getProcessId().compareTo(o2.getProcessId());
}
if( ! o1.getProcessInstanceId().equals(o2.getProcessInstanceId()) ) {
return o1.getProcessInstanceId().compareTo(o2.getProcessInstanceId());
}
if( ! o1.getVariableId().equals(o2.getVariableId()) ) {
return o1.getVariableId().compareTo(o2.getVariableId());
}
return o1.getVariableInstanceId().compareTo(o2.getVariableInstanceId());
}
});
}
}
| |
/*
* Copyright 2013 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.model.util;
import java.util.List;
import org.teavm.model.BasicBlock;
import org.teavm.model.InvokeDynamicInstruction;
import org.teavm.model.instructions.*;
/**
*
* @author Alexey Andreev
*/
public class InstructionTransitionExtractor implements InstructionVisitor {
private BasicBlock[] targets;
public BasicBlock[] getTargets() {
return targets;
}
@Override
public void visit(EmptyInstruction insn) {
targets = null;
}
@Override
public void visit(ClassConstantInstruction insn) {
targets = null;
}
@Override
public void visit(NullConstantInstruction insn) {
targets = null;
}
@Override
public void visit(IntegerConstantInstruction insn) {
targets = null;
}
@Override
public void visit(LongConstantInstruction insn) {
targets = null;
}
@Override
public void visit(FloatConstantInstruction insn) {
targets = null;
}
@Override
public void visit(DoubleConstantInstruction insn) {
targets = null;
}
@Override
public void visit(StringConstantInstruction insn) {
targets = null;
}
@Override
public void visit(BinaryInstruction insn) {
targets = null;
}
@Override
public void visit(NegateInstruction insn) {
targets = null;
}
@Override
public void visit(AssignInstruction insn) {
targets = null;
}
@Override
public void visit(BranchingInstruction insn) {
targets = new BasicBlock[] { insn.getConsequent(), insn.getAlternative() };
}
@Override
public void visit(BinaryBranchingInstruction insn) {
targets = new BasicBlock[] { insn.getConsequent(), insn.getAlternative() };
}
@Override
public void visit(JumpInstruction insn) {
targets = new BasicBlock[] { insn.getTarget() };
}
@Override
public void visit(SwitchInstruction insn) {
List<SwitchTableEntry> entries = insn.getEntries();
targets = new BasicBlock[entries.size() + 1];
for (int i = 0; i < entries.size(); ++i) {
targets[i] = entries.get(i).getTarget();
}
targets[entries.size()] = insn.getDefaultTarget();
}
@Override
public void visit(ExitInstruction insn) {
targets = new BasicBlock[0];
}
@Override
public void visit(RaiseInstruction insn) {
targets = new BasicBlock[0];
}
@Override
public void visit(ConstructArrayInstruction insn) {
targets = null;
}
@Override
public void visit(ConstructInstruction insn) {
targets = null;
}
@Override
public void visit(ConstructMultiArrayInstruction insn) {
targets = null;
}
@Override
public void visit(GetFieldInstruction insn) {
targets = null;
}
@Override
public void visit(PutFieldInstruction insn) {
targets = null;
}
@Override
public void visit(GetElementInstruction insn) {
targets = null;
}
@Override
public void visit(PutElementInstruction insn) {
targets = null;
}
@Override
public void visit(InvokeInstruction insn) {
targets = null;
}
@Override
public void visit(InvokeDynamicInstruction insn) {
targets = null;
}
@Override
public void visit(IsInstanceInstruction insn) {
targets = null;
}
@Override
public void visit(CastInstruction insn) {
targets = null;
}
@Override
public void visit(CastNumberInstruction insn) {
targets = null;
}
@Override
public void visit(CastIntegerInstruction insn) {
targets = null;
}
@Override
public void visit(ArrayLengthInstruction insn) {
targets = null;
}
@Override
public void visit(UnwrapArrayInstruction insn) {
targets = null;
}
@Override
public void visit(CloneArrayInstruction insn) {
targets = null;
}
@Override
public void visit(InitClassInstruction insn) {
targets = null;
}
@Override
public void visit(NullCheckInstruction insn) {
targets = null;
}
@Override
public void visit(MonitorEnterInstruction insn) {
targets = null;
}
@Override
public void visit(MonitorExitInstruction insn) {
targets = null;
}
}
| |
package hu.unideb.inf.Core;
import hu.unideb.inf.Dao.HighScore;
import hu.unideb.inf.Dao.HighScoreDAOImp;
import hu.unideb.inf.View.Effects;
import hu.unideb.inf.View.MediaController;
import hu.unideb.inf.View.ViewController;
import javafx.animation.*;
import javafx.application.Application;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.image.Image;
import javafx.scene.input.KeyCode;
import javafx.scene.layout.*;
import javafx.scene.paint.ImagePattern;
import javafx.stage.Stage;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.Math.random;
/**
* This class is the Main class of the application.
* @author MJ
*/
public class Main extends Application {
/** {@link Logger} for logging.*/
private static Logger logger = LoggerFactory.getLogger( Main.class );
private Ship ship = new Ship();
private ViewController viewController = new ViewController();
private MediaController mediaController = new MediaController();
private Effects effects = new Effects();
/** Width of the window. */
public static final int winWidth = 800;
/** Height of the window. */
public static final int winHeight= 600;
/** Score of the player. */
public static int score = 0;
private static Pane appRoot = new Pane();
private static Pane gameRoot = new Pane();
/** List of the pipes. */
public static final ArrayList<Wall> walls = new ArrayList<>();
/** List of the pipes. */
public static final ArrayList<Wall> walls2 = new ArrayList<>();
/** The game is running or not. */
public static boolean running = false;
/** This ship is hit a pipe or not. */
public static boolean failGame = false;
/** Is it the options menu? */
public static boolean isOptions = false;
/** Is it the highscore menu? */
public static boolean isHighScore = false;
/** If true can play effects. */
public static boolean effectPlaying = false;
private Image img_g = new Image(getClass().getResourceAsStream("spaceShip.png"));
private Image img_r = new Image(getClass().getResourceAsStream("spaceShip_r.png"));
/**
* Add a highscore to the HighScores.xml.
* @param player Name of the player.
* @param score The reached score.
*/
public static void initData(String player, int score) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm");
LocalDateTime dateTime = LocalDateTime.now();
String formattedDateTime = dateTime.format(formatter);
HighScore highScore = new HighScore(player, String.valueOf(score), formattedDateTime);
HighScoreDAOImp higshScoreDAO = new HighScoreDAOImp();
higshScoreDAO.addHighScore(highScore);
}
private Parent createContent(){
gameRoot.setPrefSize(winWidth, winHeight);
logger.debug("Creating walls...");
for (int i = 0; i < 100; i++) {
int enter = (int)(random()*100+100);
int height = new Random().nextInt(winHeight-enter);
Wall wall = new Wall(height, 1);
wall.setTranslateX(i*350+winHeight);
wall.setTranslateY(0);
walls.add(wall);
walls2.add(wall);
Wall wall2 = new Wall(winHeight-enter-height, 0);
wall2.setTranslateX(i*350+winHeight);
wall2.setTranslateY(height+enter);
walls.add(wall2);
gameRoot.getChildren().addAll(wall,wall2);
}
logger.debug("Walls are created.");
appRoot.setBackground(mediaController.bg);
gameRoot.getChildren().add(ship);
appRoot.getChildren().addAll(gameRoot, viewController.descLabel, viewController.scoreLabel, viewController.failLabel,
viewController.newGameLabel, viewController.highScoreLabel, viewController.optionsLabel,
viewController.exitLabel, viewController.backLabel, viewController.onButton, viewController.soundText,
viewController.leadBoardLabel, viewController.doneLabel, viewController.playerName,
viewController.playerNameLabel, viewController.resumeLabel, viewController.tableView
);
return appRoot;
}
private void update() {
if (running) {
viewController.scoreLabel.setVisible(true);
if (ship.velocity.getY() < 5) {
ship.velocity = ship.velocity.add(0, 1);
}
ship.moveX((int) ship.velocity.getX());
ship.moveY((int) ship.velocity.getY());
viewController.scoreLabel.setText("Score: " + score);
ship.translateXProperty().addListener((ods, old, newValue) -> {
int offset = newValue.intValue();
if (offset > 200) {
gameRoot.setLayoutX(-(offset - 200));
}
});
if (score < 10) {
ship.rect.setFill(new ImagePattern(img_g, 0, 0, 1, 1, true));
} else if (score >= 10) {
ship.rect.setFill(new ImagePattern(img_r, 0, 0, 1, 1, true));
}
if (score % 10 == 0 && score != 0) {
effects.scaleEffect(ViewController.scoreLabel);
ship.rect.setEffect(Effects.randomColor());
}
effectPlaying = true;
} else if ( !running && !failGame) {
if (viewController.backLabel.isVisible()){
viewController.descLabel.setVisible(false);
} else {
viewController.descLabel.setVisible(true);
}
viewController.scoreLabel.setVisible(false);
viewController.newGame(ship);
viewController.highScore();
viewController.optionsMenu();
mediaController.onSoundButton();
viewController.addToLeadBoardMenu();
viewController.exit();
} else if (!running && failGame){
if (!viewController.backLabel.isVisible()) {
effects.scaleEffect(viewController.failLabel);
viewController.failLabel.setVisible(true);
viewController.failLabel.setTextFill(Effects.randomTextColor());
}
viewController.newGame(ship);
viewController.highScore();
viewController.optionsMenu();
viewController.addToLeadBoardMenu();
mediaController.onSoundButton();
viewController.exit();
}
}
@Override
public void start(Stage primaryStage) throws Exception {
mediaController.playMusic();
Scene scene = new Scene(createContent());
scene.setOnMouseClicked(event -> {
mediaController.playJumpMusic();
ship.jump();
});
effectPlaying = true;
scene.setOnKeyPressed(event -> {
if (event.getCode() == KeyCode.SPACE){
ship.jump();
mediaController.playJumpMusic();
effectPlaying = false;
}
if (running && event.getCode() == KeyCode.ESCAPE){
logger.debug("Game paused.");
running = false;
failGame = false;
viewController.newGame(ship);
viewController.highScore();
viewController.optionsMenu();
mediaController.onSoundButton();
viewController.addToLeadBoardMenu();
viewController.resumeMenu();
viewController.descLabel.setVisible(true);
}
});
primaryStage.setScene(scene);
primaryStage.show();
AnimationTimer timer = new AnimationTimer() {
@Override
public void handle(long now) {
update();
}
};
timer.start();
}
/**
* Main method.
* @param args Arguments.
*/
public static void main(String[] args) {
logger.info("Application started!");
launch(args);
}
}
| |
package org.jgroups.protocols.relay.config;
import org.jgroups.JChannel;
import org.jgroups.stack.Protocol;
import org.w3c.dom.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.*;
/**
* Parses and maintains the RELAY2 configuration (in memory)
* @author Bela Ban
* @since 3.2
*/
public class RelayConfig {
protected static final String RELAY_CONFIG = "RelayConfiguration";
protected static final String SITES = "sites";
protected static final String SITE = "site";
protected static final String BRIDGES = "bridges";
protected static final String BRIDGE = "bridge";
protected static final String FORWARDS = "forwards";
protected static final String FORWARD = "forward";
/*public String toString() {
StringBuilder sb=new StringBuilder();
sb.append("sites:\n");
for(Map.Entry<String,SiteConfig> entry: sites.entrySet())
sb.append(entry.getKey() + " --> " + entry.getValue() + "\n");
return sb.toString();
}*/
/** Parses site names and their configuration (e.g. "nyc" --> SiteConfig) into the map passed as argument */
public static void parse(InputStream input, final Map<String,SiteConfig> map) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(false); // for now
DocumentBuilder builder=factory.newDocumentBuilder();
Document document=builder.parse(input);
Element root=document.getDocumentElement();
match(RELAY_CONFIG, root.getNodeName(), true);
NodeList children=root.getChildNodes();
if(children == null || children.getLength() == 0)
return;
for(int i=0; i < children.getLength(); i++) {
Node node=children.item(i);
if(node.getNodeType() != Node.ELEMENT_NODE)
continue;
String element_name=node.getNodeName();
if(SITES.equals(element_name))
parseSites(map, node);
else
throw new Exception("expected <" + SITES + ">, but got " + "<" + element_name + ">");
}
}
protected static void parseSites(final Map<String,SiteConfig> map, Node root) throws Exception {
NodeList children=root.getChildNodes();
if(children == null || children.getLength() == 0)
return;
for(int i=0; i < children.getLength(); i++) {
Node node=children.item(i);
if(node.getNodeType() != Node.ELEMENT_NODE)
continue;
match(SITE, node.getNodeName(), true);
NamedNodeMap attrs=node.getAttributes();
if(attrs == null || attrs.getLength() == 0)
continue;
Attr name_attr=(Attr)attrs.getNamedItem("name");
String name=name_attr.getValue();
if(map.containsKey(name))
throw new Exception("Site \"" + name + "\" already defined");
SiteConfig site_config=new SiteConfig(name);
map.put(name, site_config);
parseBridgesAndForwards(site_config, node);
}
}
protected static void parseBridgesAndForwards(SiteConfig site_config, Node root) throws Exception {
NodeList children=root.getChildNodes();
if(children == null || children.getLength() == 0)
return;
for(int i=0; i < children.getLength(); i++) {
Node node=children.item(i);
if(node.getNodeType() != Node.ELEMENT_NODE)
continue;
String node_name=node.getNodeName();
if(BRIDGES.equals(node_name))
parseBridges(site_config, node);
else if(FORWARDS.equals(node_name))
parseForwards(site_config, node);
else
throw new Exception("expected \"" + BRIDGES + "\" or \"" + FORWARDS + "\" keywords");
}
}
protected static void parseBridges(SiteConfig site_config, Node root) throws Exception {
NodeList children=root.getChildNodes();
if(children == null || children.getLength() == 0)
return;
for(int i=0; i < children.getLength(); i++) {
Node node=children.item(i);
if(node.getNodeType() != Node.ELEMENT_NODE)
continue;
String node_name=node.getNodeName();
match(BRIDGE, node_name, true);
NamedNodeMap attrs=node.getAttributes();
if(attrs == null || attrs.getLength() == 0)
continue;
Attr name_attr=(Attr)attrs.getNamedItem("name");
Attr config_attr=(Attr)attrs.getNamedItem("config");
String name=name_attr != null? name_attr.getValue() : null;
String config=config_attr.getValue();
BridgeConfig bridge_config=new PropertiesBridgeConfig(name, config);
site_config.addBridge(bridge_config);
}
}
protected static void parseForwards(SiteConfig site_config, Node root) throws Exception {
NodeList children=root.getChildNodes();
if(children == null || children.getLength() == 0)
return;
for(int i=0; i < children.getLength(); i++) {
Node node=children.item(i);
if(node.getNodeType() != Node.ELEMENT_NODE)
continue;
String node_name=node.getNodeName();
match(FORWARD, node_name, true);
NamedNodeMap attrs=node.getAttributes();
if(attrs == null || attrs.getLength() == 0)
continue;
Attr to_attr=(Attr)attrs.getNamedItem("to");
Attr gw_attr=(Attr)attrs.getNamedItem("gateway");
String to=to_attr.getValue();
String gateway=gw_attr.getValue();
ForwardConfig forward_config=new ForwardConfig(to, gateway);
site_config.addForward(forward_config);
}
}
protected static void match(String expected_name, String name, boolean is_element) throws Exception {
if(!expected_name.equals(name))
throw new Exception((is_element? "Element " : "Attribute ") + "\"" + name + "\" didn't match \"" + expected_name + "\"");
}
public static class SiteConfig {
protected final String name;
protected final List<BridgeConfig> bridges=new ArrayList<BridgeConfig>();
protected final List<ForwardConfig> forwards=new ArrayList<ForwardConfig>();
public SiteConfig(String name) {
this.name=name;
}
public String getName() {return name;}
public List<BridgeConfig> getBridges() {return bridges;}
public List<ForwardConfig> getForwards() {return forwards;}
public SiteConfig addBridge(BridgeConfig bridge_config) {bridges.add(bridge_config); return this;}
public SiteConfig addForward(ForwardConfig forward_config) {forwards.add(forward_config); return this;}
public String toString() {
StringBuilder sb=new StringBuilder("name=" + name + "\n");
if(!bridges.isEmpty())
for(BridgeConfig bridge_config: bridges)
sb.append(bridge_config).append("\n");
if(!forwards.isEmpty())
for(ForwardConfig forward_config: forwards)
sb.append(forward_config).append("\n");
return sb.toString();
}
}
public abstract static class BridgeConfig {
protected final String cluster_name;
protected BridgeConfig(String cluster_name) {this.cluster_name=cluster_name;}
public String getClusterName() {return cluster_name;}
public abstract JChannel createChannel() throws Exception;
public String toString() {return "cluster=" + cluster_name;}
}
public static class PropertiesBridgeConfig extends BridgeConfig {
protected final String config;
public PropertiesBridgeConfig(String cluster_name, String config) {
super(cluster_name);
this.config=config;
}
public JChannel createChannel() throws Exception {return new JChannel(config);}
public String toString() {return "config=" + config + super.toString();}
}
public static class ProgrammaticBridgeConfig extends BridgeConfig {
protected Protocol[] protocols;
public ProgrammaticBridgeConfig(String cluster_name, Protocol[] prots) {
super(cluster_name);
this.protocols=prots;
}
public JChannel createChannel() throws Exception {
return new JChannel(protocols);
}
public String toString() {
return super.toString() + ", protocols=" + printProtocols(protocols);
}
protected static String printProtocols(Protocol[] protocols) {
StringBuilder sb=new StringBuilder("[");
boolean first=true;
for(Protocol prot: protocols) {
if(first)
first=false;
else
sb.append(", ");
sb.append(prot.getName());
}
sb.append("]");
return sb.toString();
}
}
public static class ForwardConfig {
protected final String to;
protected final String gateway;
public ForwardConfig(String to, String gateway) {
this.to=to;
this.gateway=gateway;
}
public String getGateway() {return gateway;}
public String getTo() {return to;}
public String toString() {
return "forward to=" + to + " gateway=" + gateway;
}
}
public static void main(String[] args) throws Exception {
InputStream input=new FileInputStream("/home/bela/relay2.xml");
Map<String,SiteConfig> sites=new HashMap<String,SiteConfig>();
RelayConfig.parse(input, sites);
System.out.println("sites:");
for(Map.Entry<String,SiteConfig> entry: sites.entrySet())
System.out.println(entry.getKey() + ":\n" + entry.getValue() + "\n");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.commons.api.format;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.List;
import org.junit.Test;
public class AcceptTypeTest {
@Test
public void wildcard() {
List<AcceptType> atl = AcceptType.create("*/*");
assertEquals(1, atl.size());
assertEquals("*/*", atl.get(0).toString());
assertTrue(atl.get(0).matches(ContentType.create("a/a")));
assertTrue(atl.get(0).matches(ContentType.create("b/b")));
}
@Test
public void wildcardSubtype() {
List<AcceptType> atl = AcceptType.create("a/*");
assertEquals(1, atl.size());
assertEquals("a/*", atl.get(0).toString());
assertTrue(atl.get(0).matches(ContentType.create("a/a")));
assertFalse(atl.get(0).matches(ContentType.create("b/b")));
}
@Test
public void singleAcceptType() {
assertTrue(AcceptType.create("a/a").get(0).matches(ContentType.create("a/a")));
assertTrue(AcceptType.create("a/a;q=0.2").get(0).matches(ContentType.create("a/a")));
assertFalse(AcceptType.create("a/a;x=y;q=0.2").get(0).matches(ContentType.create("a/a")));
assertTrue(AcceptType.create("a/a;x=y;q=0.2").get(0).matches(ContentType.create("a/a;x=y")));
assertTrue(AcceptType.create("a/a; q=0.2").get(0).matches(ContentType.create("a/a")));
assertEquals("a/a;q=0.2;x=y", AcceptType.create("a/a;x=y;q=0.2").get(0).toString());
}
@Test
public void acceptTypes() {
List<AcceptType> atl;
atl = AcceptType.create("b/b,*/*,a/a,c/*");
assertNotNull(atl);
assertTrue(atl.get(0).matches(ContentType.create("b/b")));
assertTrue(atl.get(1).matches(ContentType.create("a/a")));
assertEquals("c", atl.get(2).getType());
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(2).getSubtype());
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(3).getType());
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(3).getSubtype());
atl = AcceptType.create("a/a;q=0.3,*/*;q=0.1,b/b;q=0.2");
assertNotNull(atl);
assertTrue(atl.get(0).matches(ContentType.create("a/a")));
assertTrue(atl.get(1).matches(ContentType.create("b/b")));
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(2).getType());
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(2).getSubtype());
atl = AcceptType.create("a/a;q=0.3,*/*;q=0.3");
assertNotNull(atl);
assertTrue(atl.get(0).matches(ContentType.create("a/a")));
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(1).getType());
assertEquals(TypeUtil.MEDIA_TYPE_WILDCARD, atl.get(1).getSubtype());
atl = AcceptType.create("a/a;x=y;q=0.1,b/b;x=y;q=0.3");
assertNotNull(atl);
assertTrue(atl.get(0).matches(ContentType.create("b/b;x=y")));
assertFalse(atl.get(0).matches(ContentType.create("b/b;x=z")));
assertTrue(atl.get(1).matches(ContentType.create("a/a;x=y")));
assertFalse(atl.get(1).matches(ContentType.create("a/a;x=z")));
atl = AcceptType.create("a/a; q=0.3, */*; q=0.1, b/b; q=0.2");
assertNotNull(atl);
}
@Test
public void withQParameter() {
List<AcceptType> acceptTypes = AcceptType.create("application/json;q=0.2");
assertEquals(1, acceptTypes.size());
final AcceptType acceptType = acceptTypes.get(0);
assertEquals("application", acceptType.getType());
assertEquals("json", acceptType.getSubtype());
assertEquals("0.2", acceptType.getParameters().get(TypeUtil.PARAMETER_Q));
assertEquals("0.2", acceptType.getParameter(TypeUtil.PARAMETER_Q));
assertEquals(Float.valueOf(0.2F), acceptType.getQuality());
assertEquals("application/json;q=0.2", acceptType.toString());
}
@Test
public void formatErrors() {
expectCreateError("/");
expectCreateError("//");
expectCreateError("///");
expectCreateError("a/b/c");
expectCreateError("a//b");
}
@Test
public void abbreviationsNotAllowed() {
expectCreateError("application");
}
@Test
public void wildcardError() {
expectCreateError("*/json");
}
@Test
public void wrongQParameter() {
expectCreateError(" a/a;q=z ");
expectCreateError("a/a;q=42");
expectCreateError("a/a;q=0.0001");
expectCreateError("a/a;q='");
expectCreateError("a/a;q=0.8,abc");
}
@Test
public void parameterErrors() {
expectCreateError("a/b;parameter");
expectCreateError("a/b;parameter=");
expectCreateError("a/b;name= value");
expectCreateError("a/b;=value");
expectCreateError("a/b;the name=value");
}
@Test
public void trailingSemicolon() {
expectCreateError("a/b;");
}
@Test
public void fromContentType() {
final List<AcceptType> acceptType = AcceptType.fromContentType(ContentType.APPLICATION_JSON);
assertNotNull(acceptType);
assertEquals(1, acceptType.size());
assertEquals(ContentType.APPLICATION_JSON.toContentTypeString(), acceptType.get(0).toString());
}
private void expectCreateError(final String value) {
try {
AcceptType.create(value);
fail("Expected exception not thrown.");
} catch (final IllegalArgumentException e) {
assertNotNull(e);
}
}
@Test
public void multipleTypeswithQParameter() {
List<AcceptType> acceptTypes = AcceptType.create("application/json;q=0.2,application/json;q=0.2");
assertEquals(2, acceptTypes.size());
final AcceptType acceptType = acceptTypes.get(0);
assertEquals("application", acceptType.getType());
assertEquals("json", acceptType.getSubtype());
assertEquals("0.2", acceptType.getParameters().get(TypeUtil.PARAMETER_Q));
assertEquals("0.2", acceptType.getParameter(TypeUtil.PARAMETER_Q));
assertEquals(Float.valueOf(0.2F), acceptType.getQuality());
assertEquals("application/json;q=0.2", acceptType.toString());
}
@Test
public void multipleTypeswithIllegalTypes() {
List<AcceptType> acceptTypes = AcceptType.create("application/json;q=0.2,abc/xyz");
assertEquals(2, acceptTypes.size());
final AcceptType acceptType = acceptTypes.get(1);
assertEquals("application", acceptType.getType());
assertEquals("json", acceptType.getSubtype());
assertEquals("0.2", acceptType.getParameters().get(TypeUtil.PARAMETER_Q));
assertEquals("0.2", acceptType.getParameter(TypeUtil.PARAMETER_Q));
assertEquals(Float.valueOf(0.2F), acceptType.getQuality());
assertEquals("application/json;q=0.2", acceptType.toString());
}
@Test
public void multipleFormatErrors() {
expectCreateError("/,abc,a/a;parameter=");
}
@Test
public void nullAcceptType() {
expectCreateError(null);
}
@Test
public void emptyAcceptType() {
expectCreateError("");
}
@Test
public void noTypeAcceptType() {
expectCreateError("/json");
}
@Test
public void withCharset() {
List<AcceptType> acceptTypes = AcceptType.create("application/json;charset=utf-8");
assertEquals(1, acceptTypes.size());
final AcceptType acceptType = acceptTypes.get(0);
assertEquals("application", acceptType.getType());
assertEquals("json", acceptType.getSubtype());
assertEquals("utf-8", acceptType.getParameter(ContentType.PARAMETER_CHARSET));
assertTrue(acceptType.matches(ContentType.create("application/json;"
+ "odata.metadata=minimal;charset=utf-8")));
assertFalse(acceptType.matches(ContentType.create("application/atom+xml;"
+ "odata.metadata=minimal;charset=utf-8")));
assertFalse(acceptType.matches(ContentType.create("application/json;"
+ "odata.metadata=minimal")));
}
@Test
public void withSubtypeStar1() {
List<AcceptType> acceptTypes = AcceptType.create("application/json,application/*");
assertEquals(2, acceptTypes.size());
final AcceptType acceptType1 = acceptTypes.get(0);
assertEquals("application", acceptType1.getType());
assertEquals("json", acceptType1.getSubtype());
final AcceptType acceptType2 = acceptTypes.get(1);
assertEquals("application", acceptType2.getType());
assertEquals("*", acceptType2.getSubtype());
}
@Test
public void withSubtypeStar2() {
List<AcceptType> acceptTypes = AcceptType.create("application/*,application/json");
assertEquals(2, acceptTypes.size());
final AcceptType acceptType1 = acceptTypes.get(0);
assertEquals("application", acceptType1.getType());
assertEquals("json", acceptType1.getSubtype());
final AcceptType acceptType2 = acceptTypes.get(1);
assertEquals("application", acceptType2.getType());
assertEquals("*", acceptType2.getSubtype());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.parser.ocr;
import static java.nio.charset.StandardCharsets.UTF_8;
import javax.imageio.ImageIO;
import javax.xml.parsers.SAXParser;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.PumpStreamHandler;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.tika.exception.TikaException;
import org.apache.tika.io.TemporaryResources;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.mime.MediaType;
import org.apache.tika.mime.MediaTypeRegistry;
import org.apache.tika.parser.AbstractParser;
import org.apache.tika.parser.CompositeParser;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.parser.Parser;
import org.apache.tika.parser.external.ExternalParser;
import org.apache.tika.parser.image.ImageParser;
import org.apache.tika.parser.image.TiffParser;
import org.apache.tika.parser.jpeg.JpegParser;
import org.apache.tika.sax.OfflineContentHandler;
import org.apache.tika.sax.XHTMLContentHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* TesseractOCRParser powered by tesseract-ocr engine. To enable this parser,
* create a {@link TesseractOCRConfig} object and pass it through a
* ParseContext. Tesseract-ocr must be installed and on system path or the path
* to its root folder must be provided:
* <p>
* TesseractOCRConfig config = new TesseractOCRConfig();<br>
* //Needed if tesseract is not on system path<br>
* config.setTesseractPath(tesseractFolder);<br>
* parseContext.set(TesseractOCRConfig.class, config);<br>
* </p>
*
*
*/
public class TesseractOCRParser extends AbstractParser {
private static final Logger LOG = LoggerFactory.getLogger(TesseractOCRParser.class);
private static final long serialVersionUID = -8167538283213097265L;
private static final TesseractOCRConfig DEFAULT_CONFIG = new TesseractOCRConfig();
private static final Set<MediaType> SUPPORTED_TYPES = Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(new MediaType[]{
MediaType.image("png"), MediaType.image("jpeg"), MediaType.image("tiff"),
MediaType.image("bmp"), MediaType.image("gif"), MediaType.image("jp2"),
MediaType.image("jpx"), MediaType.image("x-portable-pixmap")
})));
private static Map<String,Boolean> TESSERACT_PRESENT = new HashMap<>();
private static volatile boolean HAS_ALERTED = false;
@Override
public Set<MediaType> getSupportedTypes(ParseContext context) {
// If Tesseract is installed, offer our supported image types
TesseractOCRConfig config = context.get(TesseractOCRConfig.class, DEFAULT_CONFIG);
if (hasTesseract(config)) {
if (! HAS_ALERTED) {
LOG.info("Tesseract OCR is installed and will be automatically applied to image files.\n"+
"This may dramatically slow down content extraction (TIKA-2359).\n"+
"As of Tika 1.15 (and prior versions), Tesseract is automatically called.\n"+
"In future versions of Tika, users may need to turn the TesseractOCRParser on via TikaConfig."
);
HAS_ALERTED = true;
}
return SUPPORTED_TYPES;
}
// Otherwise don't advertise anything, so the other image parsers
// can be selected instead
return Collections.emptySet();
}
private void setEnv(TesseractOCRConfig config, ProcessBuilder pb) {
String tessdataPrefix = "TESSDATA_PREFIX";
Map<String, String> env = pb.environment();
if (!config.getTessdataPath().isEmpty()) {
env.put(tessdataPrefix, config.getTessdataPath());
}
else if(!config.getTesseractPath().isEmpty()) {
env.put(tessdataPrefix, config.getTesseractPath());
}
}
public boolean hasTesseract(TesseractOCRConfig config) {
// Fetch where the config says to find Tesseract
String tesseract = config.getTesseractPath() + getTesseractProg();
// Have we already checked for a copy of Tesseract there?
if (TESSERACT_PRESENT.containsKey(tesseract)) {
return TESSERACT_PRESENT.get(tesseract);
}
// Try running Tesseract from there, and see if it exists + works
String[] checkCmd = { tesseract };
boolean hasTesseract = ExternalParser.check(checkCmd);
TESSERACT_PRESENT.put(tesseract, hasTesseract);
return hasTesseract;
}
private boolean hasImageMagick(TesseractOCRConfig config) {
// Fetch where the config says to find ImageMagick Program
String ImageMagick = config.getImageMagickPath() + getImageMagickProg();
// Have we already checked for a copy of ImageMagick Program there?
if (TESSERACT_PRESENT.containsKey(ImageMagick)) {
return TESSERACT_PRESENT.get(ImageMagick);
}
// Try running ImageMagick program from there, and see if it exists + works
String[] checkCmd = { ImageMagick };
boolean hasImageMagick = ExternalParser.check(checkCmd);
TESSERACT_PRESENT.put(ImageMagick, hasImageMagick);
return hasImageMagick;
}
private static boolean hasPython() {
// check if python is installed and if the rotation program path has been specified correctly
boolean hasPython = false;
try {
Process proc = Runtime.getRuntime().exec("python -h");
BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream(), "UTF-8"));
if(stdInput.read() != -1) {
hasPython = true;
}
} catch (IOException e) {
}
return hasPython;
}
public void parse(Image image, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException,
SAXException, TikaException {
TemporaryResources tmp = new TemporaryResources();
FileOutputStream fos = null;
TikaInputStream tis = null;
try {
int w = image.getWidth(null);
int h = image.getHeight(null);
BufferedImage bImage = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
File file = tmp.createTemporaryFile();
fos = new FileOutputStream(file);
ImageIO.write(bImage, "png", fos);
tis = TikaInputStream.get(file);
parse(tis, handler, metadata, context);
} finally {
tmp.dispose();
if (tis != null)
tis.close();
if (fos != null)
fos.close();
}
}
@Override
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext parseContext)
throws IOException, SAXException, TikaException {
TesseractOCRConfig config = parseContext.get(TesseractOCRConfig.class, DEFAULT_CONFIG);
// If Tesseract is not on the path with the current config, do not try to run OCR
// getSupportedTypes shouldn't have listed us as handling it, so this should only
// occur if someone directly calls this parser, not via DefaultParser or similar
if (! hasTesseract(config))
return;
TemporaryResources tmp = new TemporaryResources();
try {
TikaInputStream tikaStream = TikaInputStream.get(stream, tmp);
//trigger the spooling to a tmp file if the stream wasn't
//already a TikaInputStream that contained a file
tikaStream.getPath();
//this is the text output file name specified on the tesseract
//commandline. The actual output file name will have a suffix added.
File tmpOCROutputFile = tmp.createTemporaryFile();
// Temporary workaround for TIKA-1445 - until we can specify
// composite parsers with strategies (eg Composite, Try In Turn),
// always send the image onwards to the regular parser to have
// the metadata for them extracted as well
_TMP_IMAGE_METADATA_PARSER.parse(tikaStream, new DefaultHandler(), metadata, parseContext);
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
parse(tikaStream, tmpOCROutputFile, parseContext, xhtml, config);
xhtml.endDocument();
} finally {
tmp.dispose();
}
}
/**
* Use this to parse content without starting a new document.
* This appends SAX events to xhtml without re-adding the metadata, body start, etc.
*
* @param stream inputstream
* @param xhtml handler
* @param config TesseractOCRConfig to use for this parse
* @throws IOException
* @throws SAXException
* @throws TikaException
*
* @deprecated use {@link #parseInline(InputStream, XHTMLContentHandler, ParseContext, TesseractOCRConfig)}
*/
public void parseInline(InputStream stream, XHTMLContentHandler xhtml, TesseractOCRConfig config)
throws IOException, SAXException, TikaException {
parseInline(stream, xhtml, new ParseContext(), config);
}
/**
* Use this to parse content without starting a new document.
* This appends SAX events to xhtml without re-adding the metadata, body start, etc.
*
* @param stream inputstream
* @param xhtml handler
* @param config TesseractOCRConfig to use for this parse
* @throws IOException
* @throws SAXException
* @throws TikaException
*
*/
public void parseInline(InputStream stream, XHTMLContentHandler xhtml, ParseContext parseContext,
TesseractOCRConfig config)
throws IOException, SAXException, TikaException {
// If Tesseract is not on the path with the current config, do not try to run OCR
// getSupportedTypes shouldn't have listed us as handling it, so this should only
// occur if someone directly calls this parser, not via DefaultParser or similar
if (! hasTesseract(config))
return;
TemporaryResources tmp = new TemporaryResources();
try {
TikaInputStream tikaStream = TikaInputStream.get(stream, tmp);
File tmpImgFile = tmp.createTemporaryFile();
parse(tikaStream, tmpImgFile, parseContext, xhtml, config);
} finally {
tmp.dispose();
}
}
/**
* This method is used to process the image to an OCR-friendly format.
* @param streamingObject input image to be processed
* @param config TesseractOCRconfig class to get ImageMagick properties
* @throws IOException if an input error occurred
* @throws TikaException if an exception timed out
*/
private void processImage(File streamingObject, TesseractOCRConfig config) throws IOException, TikaException {
// fetch rotation script from resources
InputStream in = getClass().getResourceAsStream("rotation.py");
TemporaryResources tmp = new TemporaryResources();
File rotationScript = tmp.createTemporaryFile();
Files.copy(in, rotationScript.toPath(), StandardCopyOption.REPLACE_EXISTING);
String cmd = "python " + rotationScript.getAbsolutePath() + " -f " + streamingObject.getAbsolutePath();
String angle = "0";
DefaultExecutor executor = new DefaultExecutor();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
executor.setStreamHandler(streamHandler);
// determine the angle of rotation required to make the text horizontal
CommandLine cmdLine = CommandLine.parse(cmd);
if(hasPython()) {
try {
executor.execute(cmdLine);
angle = outputStream.toString("UTF-8").trim();
} catch(Exception e) {
}
}
// process the image - parameter values can be set in TesseractOCRConfig.properties
String line = "convert -density " + config.getDensity() + " -depth " + config.getDepth() +
" -colorspace " + config.getColorspace() + " -filter " + config.getFilter() +
" -resize " + config.getResize() + "% -rotate "+ angle + " " + streamingObject.getAbsolutePath() +
" " + streamingObject.getAbsolutePath();
cmdLine = CommandLine.parse(line);
try {
executor.execute(cmdLine);
} catch(Exception e) {
}
tmp.close();
}
private void parse(TikaInputStream tikaInputStream, File tmpOCROutputFile, ParseContext parseContext,
XHTMLContentHandler xhtml, TesseractOCRConfig config)
throws IOException, SAXException, TikaException {
File tmpTxtOutput = null;
try {
File input = tikaInputStream.getFile();
long size = tikaInputStream.getLength();
if (size >= config.getMinFileSizeToOcr() && size <= config.getMaxFileSizeToOcr()) {
// Process image if ImageMagick Tool is present
if(config.isEnableImageProcessing() == 1 && hasImageMagick(config)) {
// copy the contents of the original input file into a temporary file
// which will be preprocessed for OCR
TemporaryResources tmp = new TemporaryResources();
try {
File tmpFile = tmp.createTemporaryFile();
FileUtils.copyFile(input, tmpFile);
processImage(tmpFile, config);
doOCR(tmpFile, tmpOCROutputFile, config);
} finally {
if (tmp != null) {
tmp.dispose();
}
}
} else {
doOCR(input, tmpOCROutputFile, config);
}
// Tesseract appends the output type (.txt or .hocr) to output file name
tmpTxtOutput = new File(tmpOCROutputFile.getAbsolutePath() + "." +
config.getOutputType().toString().toLowerCase(Locale.US));
if (tmpTxtOutput.exists()) {
try (InputStream is = new FileInputStream(tmpTxtOutput)) {
if (config.getOutputType().equals(TesseractOCRConfig.OUTPUT_TYPE.HOCR)) {
extractHOCROutput(is, parseContext, xhtml);
} else {
extractOutput(is, xhtml);
}
}
}
}
} finally {
if (tmpTxtOutput != null) {
tmpTxtOutput.delete();
}
}
}
// TIKA-1445 workaround parser
private static Parser _TMP_IMAGE_METADATA_PARSER = new CompositeImageParser();
private static class CompositeImageParser extends CompositeParser {
private static final long serialVersionUID = -2398203346206381382L;
private static List<Parser> imageParsers = Arrays.asList(new Parser[]{
new ImageParser(), new JpegParser(), new TiffParser()
});
CompositeImageParser() {
super(new MediaTypeRegistry(), imageParsers);
}
}
/**
* Run external tesseract-ocr process.
*
* @param input
* File to be ocred
* @param output
* File to collect ocr result
* @param config
* Configuration of tesseract-ocr engine
* @throws TikaException
* if the extraction timed out
* @throws IOException
* if an input error occurred
*/
private void doOCR(File input, File output, TesseractOCRConfig config) throws IOException, TikaException {
String[] cmd = { config.getTesseractPath() + getTesseractProg(), input.getPath(), output.getPath(), "-l",
config.getLanguage(), "-psm", config.getPageSegMode(),
config.getOutputType().name().toLowerCase(Locale.US),
"-c",
(config.getPreserveInterwordSpacing())? "preserve_interword_spaces=1" : "preserve_interword_spaces=0"};
ProcessBuilder pb = new ProcessBuilder(cmd);
setEnv(config, pb);
final Process process = pb.start();
process.getOutputStream().close();
InputStream out = process.getInputStream();
InputStream err = process.getErrorStream();
logStream("OCR MSG", out, input);
logStream("OCR ERROR", err, input);
FutureTask<Integer> waitTask = new FutureTask<>(new Callable<Integer>() {
public Integer call() throws Exception {
return process.waitFor();
}
});
Thread waitThread = new Thread(waitTask);
waitThread.start();
try {
waitTask.get(config.getTimeout(), TimeUnit.SECONDS);
} catch (InterruptedException e) {
waitThread.interrupt();
process.destroy();
Thread.currentThread().interrupt();
throw new TikaException("TesseractOCRParser interrupted", e);
} catch (ExecutionException e) {
// should not be thrown
} catch (TimeoutException e) {
waitThread.interrupt();
process.destroy();
throw new TikaException("TesseractOCRParser timeout", e);
}
}
/**
* Reads the contents of the given stream and write it to the given XHTML
* content handler. The stream is closed once fully processed.
*
* @param stream
* Stream where is the result of ocr
* @param xhtml
* XHTML content handler
* @throws SAXException
* if the XHTML SAX events could not be handled
* @throws IOException
* if an input error occurred
*/
private void extractOutput(InputStream stream, XHTMLContentHandler xhtml) throws SAXException, IOException {
xhtml.startElement("div", "class", "ocr");
try (Reader reader = new InputStreamReader(stream, UTF_8)) {
char[] buffer = new char[1024];
for (int n = reader.read(buffer); n != -1; n = reader.read(buffer)) {
if (n > 0) {
xhtml.characters(buffer, 0, n);
}
}
}
xhtml.endElement("div");
}
private void extractHOCROutput(InputStream is, ParseContext parseContext,
XHTMLContentHandler xhtml) throws TikaException, IOException, SAXException {
if (parseContext == null) {
parseContext = new ParseContext();
}
SAXParser parser = parseContext.getSAXParser();
xhtml.startElement("div", "class", "ocr");
parser.parse(is, new OfflineContentHandler(new HOCRPassThroughHandler(xhtml)));
xhtml.endElement("div");
}
/**
* Starts a thread that reads the contents of the standard output or error
* stream of the given process to not block the process. The stream is closed
* once fully processed.
*/
private void logStream(final String logType, final InputStream stream, final File file) {
new Thread() {
public void run() {
Reader reader = new InputStreamReader(stream, UTF_8);
StringBuilder out = new StringBuilder();
char[] buffer = new char[1024];
try {
for (int n = reader.read(buffer); n != -1; n = reader.read(buffer))
out.append(buffer, 0, n);
} catch (IOException e) {
} finally {
IOUtils.closeQuietly(stream);
}
LOG.debug("{}", out);
}
}.start();
}
static String getTesseractProg() {
return System.getProperty("os.name").startsWith("Windows") ? "tesseract.exe" : "tesseract";
}
static String getImageMagickProg() {
return System.getProperty("os.name").startsWith("Windows") ? "convert.exe" : "convert";
}
private static class HOCRPassThroughHandler extends DefaultHandler {
private final ContentHandler xhtml;
public static final Set<String> IGNORE = unmodifiableSet(
"html", "head", "title", "meta", "body");
public HOCRPassThroughHandler(ContentHandler xhtml) {
this.xhtml = xhtml;
}
/**
* Starts the given element. Table cells and list items are automatically
* indented by emitting a tab character as ignorable whitespace.
*/
@Override
public void startElement(
String uri, String local, String name, Attributes attributes)
throws SAXException {
if (!IGNORE.contains(name)) {
xhtml.startElement(uri, local, name, attributes);
}
}
/**
* Ends the given element. Block elements are automatically followed
* by a newline character.
*/
@Override
public void endElement(String uri, String local, String name) throws SAXException {
if (!IGNORE.contains(name)) {
xhtml.endElement(uri, local, name);
}
}
/**
* @see <a href="https://issues.apache.org/jira/browse/TIKA-210">TIKA-210</a>
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
xhtml.characters(ch, start, length);
}
private static Set<String> unmodifiableSet(String... elements) {
return Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(elements)));
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.fsx.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* The response object for <code>DescribeFileSystems</code> operation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/fsx-2018-03-01/DescribeFileSystems" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeFileSystemsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An array of file system descriptions.
* </p>
*/
private java.util.List<FileSystem> fileSystems;
/**
* <p>
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
* </p>
*/
private String nextToken;
/**
* <p>
* An array of file system descriptions.
* </p>
*
* @return An array of file system descriptions.
*/
public java.util.List<FileSystem> getFileSystems() {
return fileSystems;
}
/**
* <p>
* An array of file system descriptions.
* </p>
*
* @param fileSystems
* An array of file system descriptions.
*/
public void setFileSystems(java.util.Collection<FileSystem> fileSystems) {
if (fileSystems == null) {
this.fileSystems = null;
return;
}
this.fileSystems = new java.util.ArrayList<FileSystem>(fileSystems);
}
/**
* <p>
* An array of file system descriptions.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFileSystems(java.util.Collection)} or {@link #withFileSystems(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param fileSystems
* An array of file system descriptions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFileSystemsResult withFileSystems(FileSystem... fileSystems) {
if (this.fileSystems == null) {
setFileSystems(new java.util.ArrayList<FileSystem>(fileSystems.length));
}
for (FileSystem ele : fileSystems) {
this.fileSystems.add(ele);
}
return this;
}
/**
* <p>
* An array of file system descriptions.
* </p>
*
* @param fileSystems
* An array of file system descriptions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFileSystemsResult withFileSystems(java.util.Collection<FileSystem> fileSystems) {
setFileSystems(fileSystems);
return this;
}
/**
* <p>
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
* </p>
*
* @param nextToken
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
* </p>
*
* @return Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
* </p>
*
* @param nextToken
* Present if there are more file systems than returned in the response (String). You can use the
* <code>NextToken</code> value in the later request to fetch the descriptions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeFileSystemsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFileSystems() != null)
sb.append("FileSystems: ").append(getFileSystems()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeFileSystemsResult == false)
return false;
DescribeFileSystemsResult other = (DescribeFileSystemsResult) obj;
if (other.getFileSystems() == null ^ this.getFileSystems() == null)
return false;
if (other.getFileSystems() != null && other.getFileSystems().equals(this.getFileSystems()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFileSystems() == null) ? 0 : getFileSystems().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeFileSystemsResult clone() {
try {
return (DescribeFileSystemsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.base;
import android.os.Looper;
import android.os.MessageQueue;
import android.os.SystemClock;
import android.util.Log;
import android.util.Printer;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.annotations.MainDex;
import org.chromium.base.annotations.NativeMethods;
/**
* Java mirror of Chrome trace event API. See base/trace_event/trace_event.h.
*
* To get scoped trace events, use the "try with resource" construct, for instance:
* <pre>{@code
* try (TraceEvent e = TraceEvent.scoped("MyTraceEvent")) {
* // code.
* }
* }</pre>
*
* The event name of the trace events must be a string literal or a |static final String| class
* member. Otherwise NoDynamicStringsInTraceEventCheck error will be thrown.
*
* It is OK to use tracing before the native library has loaded, in a slightly restricted fashion.
* @see EarlyTraceEvent for details.
*/
@JNINamespace("base::android")
@MainDex
public class TraceEvent implements AutoCloseable {
private static volatile boolean sEnabled;
private static volatile boolean sATraceEnabled; // True when taking an Android systrace.
private static class BasicLooperMonitor implements Printer {
private static final String LOOPER_TASK_PREFIX = "Looper.dispatch: ";
private static final int SHORTEST_LOG_PREFIX_LENGTH = "<<<<< Finished to ".length();
private String mCurrentTarget;
@Override
public void println(final String line) {
if (line.startsWith(">")) {
beginHandling(line);
} else {
assert line.startsWith("<");
endHandling(line);
}
}
void beginHandling(final String line) {
// May return an out-of-date value. this is not an issue as EarlyTraceEvent#begin()
// will filter the event in this case.
boolean earlyTracingActive = EarlyTraceEvent.enabled();
if (sEnabled || earlyTracingActive) {
mCurrentTarget = getTraceEventName(line);
if (sEnabled) {
TraceEventJni.get().beginToplevel(mCurrentTarget);
} else {
EarlyTraceEvent.begin(mCurrentTarget, true /*isToplevel*/);
}
}
}
void endHandling(final String line) {
boolean earlyTracingActive = EarlyTraceEvent.enabled();
if ((sEnabled || earlyTracingActive) && mCurrentTarget != null) {
if (sEnabled) {
TraceEventJni.get().endToplevel(mCurrentTarget);
} else {
EarlyTraceEvent.end(mCurrentTarget, true /*isToplevel*/);
}
}
mCurrentTarget = null;
}
private static String getTraceEventName(String line) {
return LOOPER_TASK_PREFIX + getTarget(line) + "(" + getTargetName(line) + ")";
}
/**
* Android Looper formats |logLine| as
*
* ">>>>> Dispatching to (TARGET) {HASH_CODE} TARGET_NAME: WHAT"
*
* and
*
* "<<<<< Finished to (TARGET) {HASH_CODE} TARGET_NAME".
*
* This has been the case since at least 2009 (Donut). This function extracts the
* TARGET part of the message.
*/
private static String getTarget(String logLine) {
int start = logLine.indexOf('(', SHORTEST_LOG_PREFIX_LENGTH);
int end = start == -1 ? -1 : logLine.indexOf(')', start);
return end != -1 ? logLine.substring(start + 1, end) : "";
}
// Extracts the TARGET_NAME part of the log message (see above).
private static String getTargetName(String logLine) {
int start = logLine.indexOf('}', SHORTEST_LOG_PREFIX_LENGTH);
int end = start == -1 ? -1 : logLine.indexOf(':', start);
if (end == -1) {
end = logLine.length();
}
return start != -1 ? logLine.substring(start + 2, end) : "";
}
}
/**
* A class that records, traces and logs statistics about the UI thead's Looper.
* The output of this class can be used in a number of interesting ways:
* <p>
* <ol><li>
* When using chrometrace, there will be a near-continuous line of
* measurements showing both event dispatches as well as idles;
* </li><li>
* Logging messages are output for events that run too long on the
* event dispatcher, making it easy to identify problematic areas;
* </li><li>
* Statistics are output whenever there is an idle after a non-trivial
* amount of activity, allowing information to be gathered about task
* density and execution cadence on the Looper;
* </li></ol>
* <p>
* The class attaches itself as an idle handler to the main Looper, and
* monitors the execution of events and idle notifications. Task counters
* accumulate between idle notifications and get reset when a new idle
* notification is received.
*/
private static final class IdleTracingLooperMonitor extends BasicLooperMonitor
implements MessageQueue.IdleHandler {
// Tags for dumping to logcat or TraceEvent
private static final String TAG = "TraceEvent_LooperMonitor";
private static final String IDLE_EVENT_NAME = "Looper.queueIdle";
// Calculation constants
private static final long FRAME_DURATION_MILLIS = 1000L / 60L; // 60 FPS
// A reasonable threshold for defining a Looper event as "long running"
private static final long MIN_INTERESTING_DURATION_MILLIS =
FRAME_DURATION_MILLIS;
// A reasonable threshold for a "burst" of tasks on the Looper
private static final long MIN_INTERESTING_BURST_DURATION_MILLIS =
MIN_INTERESTING_DURATION_MILLIS * 3;
// Stats tracking
private long mLastIdleStartedAt;
private long mLastWorkStartedAt;
private int mNumTasksSeen;
private int mNumIdlesSeen;
private int mNumTasksSinceLastIdle;
// State
private boolean mIdleMonitorAttached;
// Called from within the begin/end methods only.
// This method can only execute on the looper thread, because that is
// the only thread that is permitted to call Looper.myqueue().
private final void syncIdleMonitoring() {
if (sEnabled && !mIdleMonitorAttached) {
// approximate start time for computational purposes
mLastIdleStartedAt = SystemClock.elapsedRealtime();
Looper.myQueue().addIdleHandler(this);
mIdleMonitorAttached = true;
Log.v(TAG, "attached idle handler");
} else if (mIdleMonitorAttached && !sEnabled) {
Looper.myQueue().removeIdleHandler(this);
mIdleMonitorAttached = false;
Log.v(TAG, "detached idle handler");
}
}
@Override
final void beginHandling(final String line) {
// Close-out any prior 'idle' period before starting new task.
if (mNumTasksSinceLastIdle == 0) {
TraceEvent.end(IDLE_EVENT_NAME);
}
mLastWorkStartedAt = SystemClock.elapsedRealtime();
syncIdleMonitoring();
super.beginHandling(line);
}
@Override
final void endHandling(final String line) {
final long elapsed = SystemClock.elapsedRealtime()
- mLastWorkStartedAt;
if (elapsed > MIN_INTERESTING_DURATION_MILLIS) {
traceAndLog(Log.WARN, "observed a task that took "
+ elapsed + "ms: " + line);
}
super.endHandling(line);
syncIdleMonitoring();
mNumTasksSeen++;
mNumTasksSinceLastIdle++;
}
private static void traceAndLog(int level, String message) {
TraceEvent.instant("TraceEvent.LooperMonitor:IdleStats", message);
Log.println(level, TAG, message);
}
@Override
public final boolean queueIdle() {
final long now = SystemClock.elapsedRealtime();
if (mLastIdleStartedAt == 0) mLastIdleStartedAt = now;
final long elapsed = now - mLastIdleStartedAt;
mNumIdlesSeen++;
TraceEvent.begin(IDLE_EVENT_NAME, mNumTasksSinceLastIdle + " tasks since last idle.");
if (elapsed > MIN_INTERESTING_BURST_DURATION_MILLIS) {
// Dump stats
String statsString = mNumTasksSeen + " tasks and "
+ mNumIdlesSeen + " idles processed so far, "
+ mNumTasksSinceLastIdle + " tasks bursted and "
+ elapsed + "ms elapsed since last idle";
traceAndLog(Log.DEBUG, statsString);
}
mLastIdleStartedAt = now;
mNumTasksSinceLastIdle = 0;
return true; // stay installed
}
}
// Holder for monitor avoids unnecessary construction on non-debug runs
private static final class LooperMonitorHolder {
private static final BasicLooperMonitor sInstance =
CommandLine.getInstance().hasSwitch(BaseSwitches.ENABLE_IDLE_TRACING)
? new IdleTracingLooperMonitor() : new BasicLooperMonitor();
}
private final String mName;
/**
* Constructor used to support the "try with resource" construct.
*/
private TraceEvent(String name, String arg) {
mName = name;
begin(name, arg);
}
@Override
public void close() {
end(mName);
}
/**
* Factory used to support the "try with resource" construct.
*
* Note that if tracing is not enabled, this will not result in allocating an object.
*
* @param name Trace event name.
* @param arg The arguments of the event.
* @return a TraceEvent, or null if tracing is not enabled.
*/
public static TraceEvent scoped(String name, String arg) {
if (!(EarlyTraceEvent.enabled() || enabled())) return null;
return new TraceEvent(name, arg);
}
/**
* Similar to {@link #scoped(String, String arg)}, but uses null for |arg|.
*/
public static TraceEvent scoped(String name) {
return scoped(name, null);
}
/**
* Register an enabled observer, such that java traces are always enabled with native.
*/
public static void registerNativeEnabledObserver() {
TraceEventJni.get().registerEnabledObserver();
}
/**
* Notification from native that tracing is enabled/disabled.
*/
@CalledByNative
public static void setEnabled(boolean enabled) {
if (enabled) EarlyTraceEvent.disable();
// Only disable logging if Chromium enabled it originally, so as to not disrupt logging done
// by other applications
if (sEnabled != enabled) {
sEnabled = enabled;
// Android M+ systrace logs this on its own. Only log it if not writing to Android
// systrace.
if (sATraceEnabled) return;
ThreadUtils.getUiThreadLooper().setMessageLogging(
enabled ? LooperMonitorHolder.sInstance : null);
}
}
/**
* May enable early tracing depending on the environment.
*
* Must be called after the command-line has been read.
*/
public static void maybeEnableEarlyTracing() {
EarlyTraceEvent.maybeEnable();
if (EarlyTraceEvent.enabled()) {
ThreadUtils.getUiThreadLooper().setMessageLogging(LooperMonitorHolder.sInstance);
}
}
/**
* Enables or disabled Android systrace path of Chrome tracing. If enabled, all Chrome
* traces will be also output to Android systrace. Because of the overhead of Android
* systrace, this is for WebView only.
*/
public static void setATraceEnabled(boolean enabled) {
if (sATraceEnabled == enabled) return;
sATraceEnabled = enabled;
if (enabled) {
// Calls TraceEvent.setEnabled(true) via
// TraceLog::EnabledStateObserver::OnTraceLogEnabled
TraceEventJni.get().startATrace();
} else {
// Calls TraceEvent.setEnabled(false) via
// TraceLog::EnabledStateObserver::OnTraceLogDisabled
TraceEventJni.get().stopATrace();
}
}
/**
* @return True if tracing is enabled, false otherwise.
* It is safe to call trace methods without checking if TraceEvent
* is enabled.
*/
public static boolean enabled() {
return sEnabled;
}
/**
* Triggers the 'instant' native trace event with no arguments.
* @param name The name of the event.
*/
public static void instant(String name) {
if (sEnabled) TraceEventJni.get().instant(name, null);
}
/**
* Triggers the 'instant' native trace event.
* @param name The name of the event.
* @param arg The arguments of the event.
*/
public static void instant(String name, String arg) {
if (sEnabled) TraceEventJni.get().instant(name, arg);
}
/**
* Triggers the 'start' native trace event with no arguments.
* @param name The name of the event.
* @param id The id of the asynchronous event.
*/
public static void startAsync(String name, long id) {
EarlyTraceEvent.startAsync(name, id);
if (sEnabled) TraceEventJni.get().startAsync(name, id);
}
/**
* Triggers the 'finish' native trace event with no arguments.
* @param name The name of the event.
* @param id The id of the asynchronous event.
*/
public static void finishAsync(String name, long id) {
EarlyTraceEvent.finishAsync(name, id);
if (sEnabled) TraceEventJni.get().finishAsync(name, id);
}
/**
* Triggers the 'begin' native trace event with no arguments.
* @param name The name of the event.
*/
public static void begin(String name) {
begin(name, null);
}
/**
* Triggers the 'begin' native trace event.
* @param name The name of the event.
* @param arg The arguments of the event.
*/
public static void begin(String name, String arg) {
EarlyTraceEvent.begin(name, false /*isToplevel*/);
if (sEnabled) TraceEventJni.get().begin(name, arg);
}
/**
* Triggers the 'end' native trace event with no arguments.
* @param name The name of the event.
*/
public static void end(String name) {
end(name, null);
}
/**
* Triggers the 'end' native trace event.
* @param name The name of the event.
* @param arg The arguments of the event.
*/
public static void end(String name, String arg) {
EarlyTraceEvent.end(name, false /*isToplevel*/);
if (sEnabled) TraceEventJni.get().end(name, arg);
}
@NativeMethods
interface Natives {
void registerEnabledObserver();
void startATrace();
void stopATrace();
void instant(String name, String arg);
void begin(String name, String arg);
void end(String name, String arg);
void beginToplevel(String target);
void endToplevel(String target);
void startAsync(String name, long id);
void finishAsync(String name, long id);
}
}
| |
package iceGUI;
import java.awt.BorderLayout;
import java.awt.FlowLayout;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.border.EmptyBorder;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.GridBagLayout;
import javax.swing.JLabel;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JRadioButton;
import javax.swing.JSlider;
import javax.swing.JTextField;
//import newerGameLogic.ComputerPlayer;
//import newerGameLogic.HumanPlayer;
//import newerGameLogic.Player;
public class SetupDialog extends JDialog implements ChangeListener, ActionListener{
private final JPanel contentPanel = new JPanel();
private JTextField txtPlayerW;
private JTextField txtPlayerB;
private JLabel lblDifficultyW;
private JLabel lblDifficultyB;
private JLabel lblDiffValW;
private JLabel lblDiffValB;
private JRadioButton rbHumanW;
private JRadioButton rbHumanB;
private JLabel lblHumanW;
private JLabel lblHumanB;
private JRadioButton rbComputerW;
private JRadioButton rbComputerB;
private JSlider sliderW;
private JSlider sliderB;
private JButton okButton;
private JButton cancelButton;
private GamePanel parent;
/**
* Launch the application.
*/
public static void main(String[] args) {
try {
SetupDialog dialog = new SetupDialog();
dialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
dialog.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
public SetupDialog(GamePanel gp){
this();
parent = gp;
}
/**
* Create the dialog.
*/
public SetupDialog() {
setModal(true);
setAlwaysOnTop(true);
setBounds(100, 100, 450, 300);
setTitle("Setup Game");
getContentPane().setLayout(new BorderLayout());
contentPanel.setBorder(new EmptyBorder(5, 5, 5, 5));
getContentPane().add(contentPanel, BorderLayout.CENTER);
GridBagLayout gbl_contentPanel = new GridBagLayout();
gbl_contentPanel.columnWidths = new int[]{0, 0, 0, 0, 0, 0};
gbl_contentPanel.rowHeights = new int[]{0, 0, 0, 0, 0};
gbl_contentPanel.columnWeights = new double[]{0.0, 0.0, 0.0, 0.0, 1.0, Double.MIN_VALUE};
gbl_contentPanel.rowWeights = new double[]{0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE};
contentPanel.setLayout(gbl_contentPanel);
{
JLabel lblWhite = new JLabel("White:");
GridBagConstraints gbc_lblWhite = new GridBagConstraints();
gbc_lblWhite.insets = new Insets(0, 0, 5, 5);
gbc_lblWhite.gridx = 0;
gbc_lblWhite.gridy = 0;
contentPanel.add(lblWhite, gbc_lblWhite);
}
{
rbHumanW = new JRadioButton("Human");
rbHumanW.setSelected(true);
rbHumanW.addActionListener(this);
GridBagConstraints gbc_rbHumanW = new GridBagConstraints();
gbc_rbHumanW.insets = new Insets(0, 0, 5, 5);
gbc_rbHumanW.gridx = 1;
gbc_rbHumanW.gridy = 0;
contentPanel.add(rbHumanW, gbc_rbHumanW);
}
{
lblHumanW = new JLabel("Name:");
GridBagConstraints gbc_lblHumanW = new GridBagConstraints();
gbc_lblHumanW.insets = new Insets(0, 0, 5, 5);
gbc_lblHumanW.gridx = 2;
gbc_lblHumanW.gridy = 0;
contentPanel.add(lblHumanW, gbc_lblHumanW);
}
{
txtPlayerW = new JTextField();
txtPlayerW.setText("Player1");
GridBagConstraints gbc_txtPlayerW = new GridBagConstraints();
gbc_txtPlayerW.insets = new Insets(0, 0, 5, 0);
gbc_txtPlayerW.fill = GridBagConstraints.HORIZONTAL;
gbc_txtPlayerW.gridx = 4;
gbc_txtPlayerW.gridy = 0;
contentPanel.add(txtPlayerW, gbc_txtPlayerW);
txtPlayerW.setColumns(10);
}
{
rbComputerW = new JRadioButton("Computer");
rbComputerW.addActionListener(this);
GridBagConstraints gbc_rbComputerW = new GridBagConstraints();
gbc_rbComputerW.insets = new Insets(0, 0, 5, 5);
gbc_rbComputerW.gridx = 1;
gbc_rbComputerW.gridy = 1;
contentPanel.add(rbComputerW, gbc_rbComputerW);
}
{
lblDifficultyW = new JLabel("Difficulty:");
GridBagConstraints gbc_lblDifficultyW = new GridBagConstraints();
gbc_lblDifficultyW.insets = new Insets(0, 0, 5, 5);
gbc_lblDifficultyW.gridx = 2;
gbc_lblDifficultyW.gridy = 1;
contentPanel.add(lblDifficultyW, gbc_lblDifficultyW);
}
{
lblDiffValW = new JLabel("5");
GridBagConstraints gbc_lblDiffValW = new GridBagConstraints();
gbc_lblDiffValW.insets = new Insets(0, 0, 5, 5);
gbc_lblDiffValW.gridx = 3;
gbc_lblDiffValW.gridy = 1;
contentPanel.add(lblDiffValW, gbc_lblDiffValW);
}
{
sliderW = new JSlider();
sliderW.setValue(5);
sliderW.setSnapToTicks(true);
sliderW.setMaximum(9);
sliderW.addChangeListener(this);
GridBagConstraints gbc_sliderW = new GridBagConstraints();
gbc_sliderW.fill = GridBagConstraints.HORIZONTAL;
gbc_sliderW.insets = new Insets(0, 0, 5, 0);
gbc_sliderW.gridx = 4;
gbc_sliderW.gridy = 1;
contentPanel.add(sliderW, gbc_sliderW);
}
{
JLabel lblBlack = new JLabel("Black:");
GridBagConstraints gbc_lblBlack = new GridBagConstraints();
gbc_lblBlack.insets = new Insets(0, 0, 5, 5);
gbc_lblBlack.gridx = 0;
gbc_lblBlack.gridy = 2;
contentPanel.add(lblBlack, gbc_lblBlack);
}
{
rbHumanB = new JRadioButton("Human");
rbHumanB.setSelected(true);
rbHumanB.addActionListener(this);
GridBagConstraints gbc_rbHumanB = new GridBagConstraints();
gbc_rbHumanB.insets = new Insets(0, 0, 5, 5);
gbc_rbHumanB.gridx = 1;
gbc_rbHumanB.gridy = 2;
contentPanel.add(rbHumanB, gbc_rbHumanB);
}
{
lblHumanB = new JLabel("Name:");
GridBagConstraints gbc_lblHumanB = new GridBagConstraints();
gbc_lblHumanB.insets = new Insets(0, 0, 5, 5);
gbc_lblHumanB.gridx = 2;
gbc_lblHumanB.gridy = 2;
contentPanel.add(lblHumanB, gbc_lblHumanB);
}
{
txtPlayerB = new JTextField();
txtPlayerB.setText("Player2");
GridBagConstraints gbc_txtPlayerB = new GridBagConstraints();
gbc_txtPlayerB.insets = new Insets(0, 0, 5, 0);
gbc_txtPlayerB.fill = GridBagConstraints.HORIZONTAL;
gbc_txtPlayerB.gridx = 4;
gbc_txtPlayerB.gridy = 2;
contentPanel.add(txtPlayerB, gbc_txtPlayerB);
txtPlayerB.setColumns(10);
}
{
rbComputerB = new JRadioButton("Computer");
rbComputerB.addActionListener(this);
GridBagConstraints gbc_rbComputerB = new GridBagConstraints();
gbc_rbComputerB.insets = new Insets(0, 0, 0, 5);
gbc_rbComputerB.gridx = 1;
gbc_rbComputerB.gridy = 3;
contentPanel.add(rbComputerB, gbc_rbComputerB);
}
{
lblDifficultyB = new JLabel("Difficulty:");
GridBagConstraints gbc_lblDifficultyB = new GridBagConstraints();
gbc_lblDifficultyB.insets = new Insets(0, 0, 0, 5);
gbc_lblDifficultyB.gridx = 2;
gbc_lblDifficultyB.gridy = 3;
contentPanel.add(lblDifficultyB, gbc_lblDifficultyB);
}
{
lblDiffValB = new JLabel("5");
GridBagConstraints gbc_lblDiffValB = new GridBagConstraints();
gbc_lblDiffValB.insets = new Insets(0, 0, 0, 5);
gbc_lblDiffValB.gridx = 3;
gbc_lblDiffValB.gridy = 3;
contentPanel.add(lblDiffValB, gbc_lblDiffValB);
}
{
sliderB = new JSlider();
sliderB.setValue(5);
sliderB.setSnapToTicks(true);
sliderB.setMaximum(9);
sliderB.addChangeListener(this);
GridBagConstraints gbc_sliderB = new GridBagConstraints();
gbc_sliderB.insets = new Insets(0, 0, 5, 0);
gbc_sliderB.fill = GridBagConstraints.HORIZONTAL;
gbc_sliderB.gridx = 4;
gbc_sliderB.gridy = 3;
contentPanel.add(sliderB, gbc_sliderB);
}
{
JPanel buttonPane = new JPanel();
buttonPane.setLayout(new FlowLayout(FlowLayout.RIGHT));
getContentPane().add(buttonPane, BorderLayout.SOUTH);
{
okButton = new JButton("OK");
okButton.setActionCommand("OK");
okButton.addActionListener(this);
buttonPane.add(okButton);
getRootPane().setDefaultButton(okButton);
}
{
cancelButton = new JButton("Cancel");
cancelButton.setActionCommand("Cancel");
cancelButton.addActionListener(this);
buttonPane.add(cancelButton);
}
}
toggleWhiteHuman(true);
toggleBlackHuman(true);
}
/**
* Toggles the White human components' visibility, and White computer components' the opposite
* @param visible Human components' visibility (computer components are opposite)
*/
private void toggleWhiteHuman(boolean visible){
rbHumanW.setSelected(visible);
txtPlayerW.setVisible(visible);
lblHumanW.setVisible(visible);
toggleWhiteComp(!visible);
}
/**
* Toggles the White computer components' visibility
* (SHOULD ONLY BE CALLED FROM toggleWhiteHuman(boolean visible)
* @param visible Computer components' visibility
*/
private void toggleWhiteComp(boolean visible){
rbComputerW.setSelected(visible);
lblDifficultyW.setVisible(visible);
lblDiffValW.setVisible(visible);
sliderW.setVisible(visible);
}
/**
* Toggles the Black human components' visibility, and Black computer components' the opposite
* @param visible Human components' visibility (computer components are opposite)
*/
private void toggleBlackHuman(boolean visible){
rbHumanB.setSelected(visible);
txtPlayerB.setVisible(visible);
lblHumanB.setVisible(visible);
toggleBlackComp(!visible);
}
/**
* Toggles the Black computer components' visibility
* (SHOULD ONLY BE CALLED FROM toggleBlackHuman(boolean visible)
* @param visible Computer components' visibility
*/
private void toggleBlackComp(boolean visible){
rbComputerB.setSelected(visible);
lblDifficultyB.setVisible(visible);
lblDiffValB.setVisible(visible);
sliderB.setVisible(visible);
}
/*
private void gatherAndReturnChoices(){
Player white = collectPlayerInfo(WBColor.White);
Player black = collectPlayerInfo(WBColor.Black);
if(parent != null){
parent.newGame(white, black);
} else {
System.out.println(white.getClass().getName() + " " + white.getName());
System.out.println(black.getClass().getName() + " " + black.getName());
}
}
private Player collectPlayerInfo(WBColor color){
switch(color){
case White:
if(rbHumanW.isSelected()){
Player white = new HumanPlayer(WBColor.White);
white.setName(txtPlayerW.getText());
return white;
} else
return new ComputerPlayer(WBColor.White, sliderW.getValue());
case Black:
if(rbHumanB.isSelected()){
Player black = new HumanPlayer(WBColor.Black);
black.setName(txtPlayerB.getText());
return black;
} else
return new ComputerPlayer(WBColor.Black, sliderB.getValue());
default:
throw new InvalidParameterException("Color enum unrecognized....");
}
}
*/
//Called when the sliders are changed
public void stateChanged(ChangeEvent e) {
//Keep the slider label values locked to the slider values
if(sliderW.getValue() != Integer.parseInt(lblDiffValW.getText()))
lblDiffValW.setText(Integer.toString(sliderW.getValue()));
if(sliderB.getValue() != Integer.parseInt(lblDiffValB.getText()))
lblDiffValB.setText(Integer.toString(sliderB.getValue()));
}
//Called when a radio button or regular button is clicked
public void actionPerformed(ActionEvent e) {
if(e.getSource().equals(rbHumanW)){
//White player is human, toggle visible elements accordingly
toggleWhiteHuman(true);
} else if(e.getSource().equals(rbComputerW)){
//White player is computer, toggle visible elements accordingly
toggleWhiteHuman(false);
} else if(e.getSource().equals(rbHumanB)){
//Black player is human, toggle visible elements accordingly
toggleBlackHuman(true);
} else if(e.getSource().equals(rbComputerB)){
//Black player is computer, toggle visible elements accordingly
toggleBlackHuman(false);
} else if(e.getSource().equals(okButton)){
//Choices made, return choices to 'parent' and dispose of self
//gatherAndReturnChoices();
this.dispose();
} else if(e.getSource().equals(cancelButton)){
//Canceled, dispose of self
this.dispose();
}
}
}
| |
/*
* Copyright 1997-2017 Optimatika (www.optimatika.se)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.algo.function.aggregator;
import static org.algo.function.ComplexFunction.*;
import org.algo.constant.PrimitiveMath;
import org.algo.function.ComplexFunction;
import org.algo.function.PrimitiveFunction;
import org.algo.scalar.ComplexNumber;
import org.algo.scalar.PrimitiveScalar;
import org.algo.scalar.Scalar;
public final class ComplexAggregator extends AggregatorSet<ComplexNumber> {
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> CARDINALITY = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private int myCount = 0;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return ComplexNumber.valueOf(myCount);
}
public int intValue() {
return myCount;
}
public void invoke(final ComplexNumber anArg) {
if (!PrimitiveScalar.isSmall(PrimitiveMath.ONE, anArg.norm())) {
myCount++;
}
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
myCount += result.intValue();
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ADD.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myCount = 0;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> LARGEST = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = ComplexFunction.MAX.invoke(myNumber, ABS.invoke(anArg));
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ComplexFunction.MAX.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> MAX = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = ComplexFunction.MAX.invoke(myNumber, anArg);
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ComplexFunction.MAX.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> MIN = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.INFINITY;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
if (ComplexNumber.isInfinite(myNumber)) {
return ComplexNumber.ZERO;
} else {
return myNumber;
}
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = ComplexFunction.MIN.invoke(myNumber, anArg);
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ComplexFunction.MIN.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.INFINITY;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> NORM1 = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = myNumber.add(anArg.norm());
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ADD.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> NORM2 = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return ComplexNumber.valueOf(PrimitiveFunction.SQRT.invoke(myNumber.norm()));
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
final double tmpMod = anArg.norm();
myNumber = myNumber.add(tmpMod * tmpMod);
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return HYPOT.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> PRODUCT = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ONE;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = myNumber.multiply(anArg);
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return MULTIPLY.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ONE;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> PRODUCT2 = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ONE;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = myNumber.multiply(anArg.multiply(anArg));
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
myNumber = myNumber.multiply(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return MULTIPLY.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ONE;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> SMALLEST = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.INFINITY;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
if (ComplexNumber.isInfinite(myNumber)) {
return ComplexNumber.ZERO;
} else {
return myNumber;
}
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
if (!ComplexNumber.isSmall(PrimitiveMath.ONE, anArg)) {
myNumber = ComplexFunction.MIN.invoke(myNumber, ABS.invoke(anArg));
}
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ComplexFunction.MIN.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.INFINITY;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> SUM = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = myNumber.add(anArg);
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
this.invoke(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ADD.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
public static final ThreadLocal<AggregatorFunction<ComplexNumber>> SUM2 = new ThreadLocal<AggregatorFunction<ComplexNumber>>() {
@Override
protected AggregatorFunction<ComplexNumber> initialValue() {
return new AggregatorFunction<ComplexNumber>() {
private ComplexNumber myNumber = ComplexNumber.ZERO;
public double doubleValue() {
return this.getNumber().doubleValue();
}
public ComplexNumber getNumber() {
return myNumber;
}
public int intValue() {
return this.getNumber().intValue();
}
public void invoke(final ComplexNumber anArg) {
myNumber = myNumber.add(anArg.multiply(anArg));
}
public void invoke(final double anArg) {
this.invoke(ComplexNumber.valueOf(anArg));
}
public void merge(final ComplexNumber result) {
myNumber = myNumber.add(result);
}
public ComplexNumber merge(final ComplexNumber result1, final ComplexNumber result2) {
return ADD.invoke(result1, result2);
}
public AggregatorFunction<ComplexNumber> reset() {
myNumber = ComplexNumber.ZERO;
return this;
}
public Scalar<ComplexNumber> toScalar() {
return this.getNumber();
}
};
}
};
private static final ComplexAggregator SET = new ComplexAggregator();
public static ComplexAggregator getSet() {
return SET;
}
private ComplexAggregator() {
super();
}
@Override
public AggregatorFunction<ComplexNumber> cardinality() {
return CARDINALITY.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> largest() {
return LARGEST.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> maximum() {
return MAX.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> minimum() {
return MIN.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> norm1() {
return NORM1.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> norm2() {
return NORM2.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> product() {
return PRODUCT.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> product2() {
return PRODUCT2.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> smallest() {
return SMALLEST.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> sum() {
return SUM.get().reset();
}
@Override
public AggregatorFunction<ComplexNumber> sum2() {
return SUM2.get().reset();
}
}
| |
/*
* TxtReader.java
* Copyright (C) 2015 University of NanChang, JiangXi, China
*
*/
package org.java.apriori;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.io.StreamTokenizer;
import java.util.Vector;
/**
* Reads data from an TXT file.
*
* @author myluo
* @version $Revision: 1512 $
*/
public class TxtReader {
/** The source file. */
private File m_file = null;
/** The reader for the source file. */
private Reader m_sourceReader = null;
/** The tokenizer for reading the stream. */
private StreamTokenizer m_Tokenizer = null;
/** The vector for attributes. */
private Vector<Attribute> attributes = null;
/** The vector for instances. */
private Vector<Object> instances = null;
/**
* Class for handling an attribute. Once an attribute has been created, it
* can't be changed.
*
* @author myluo
* @version $Revision: 1512 $
*/
public class Attribute implements Serializable {
/** for serialization */
private static final long serialVersionUID = 4581950157712585216L;
/** The attribute's name. */
private String m_Name;
/** The attribute's type. */
private int m_Type;
/** The attribute's values. */
private Vector<String> m_Values = null;
/** The value' weights. */
private Vector<Integer> m_Weights = null;
/**
* Constructor for a attribute.
*/
public Attribute() {
m_Values = new Vector<String>();
m_Weights = new Vector<Integer>();
}
/**
* Constructor for a attribute.
*
* @param attributeName
* the name for the attribute
*/
public Attribute(String attributeName) {
this();
m_Name = attributeName;
}
/**
* Returns the attribute's name.
*
* @return the attribute's name as a string
*/
public String getName() {
return m_Name;
}
/**
* sets the attribute's type
*
* @param type
* the attribute's type
*/
public void setType(int type) {
m_Type = type;
}
/**
* Returns the attribute's type as an integer.
*
* @return the attribute's type
*/
public int getType() {
return m_Type;
}
/**
* Returns the number of attribute values.
*
* @return the number of attribute values
*/
public int numValues() {
return m_Values.size();
}
/**
* Adds an attribute value.
*
* @param value
* the attribute value
*/
private void append(String value) {
append(value, 1);
}
/**
* Adds some attribute value.
*
* @param value
* the attribute value
* @param number
* the number of added
*/
private void append(String value, int number) {
int index = m_Values.indexOf(value);
if (index != -1) {
m_Weights.setElementAt(m_Weights.elementAt(index) + number,
index);
} else {
m_Values.addElement(value);
m_Weights.addElement(new Integer(number));
}
}
/**
* Returns a description of this attribute in TXT format after
* pretreatment.
*
* @param option
* the pretreat option
* @return a description of this attribute in TXT format after
* pretreatment
*/
public Attribute pretreat(String option) {
if (option == null)
return this;
Attribute attribute = new Attribute(m_Name);
for (int i = 0; i < numValues(); i++) {
attribute.append(
Pretreat.pretreat(m_Values.elementAt(i), option),
m_Weights.elementAt(i));
}
return attribute;
}
/**
* Returns the vector of values.
*
* @return the vector of values
*/
public Vector<String> getValues() {
return m_Values;
}
/**
* Returns the vector of weights.
*
* @return the vector of weights
*/
public Vector<Integer> getWeights() {
return m_Weights;
}
/**
* Returns a description of this attribute in TXT format.
*
* @return a description of this attribute as a string
*/
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("@attribute " + getName() + " {");
for (int i = 0; i < numValues(); i++) {
sb.append(" " + m_Values.elementAt(i));
if (i < numValues() - 1)
sb.append(",");
}
sb.append(" }\n");
return sb.toString();
}
}
/**
* Reads the data completely from the reader.
*/
public TxtReader() {
}
/**
* Reads the data completely from the reader.
*
* @param file
* the reader for the source file
*/
public TxtReader(File file) {
this();
setSource(file);
initTokenizer();
readAttributes();
readInstances();
}
/**
* sets the source File
*
* @param file
* the source file
*/
public void setSource(File file) {
try {
m_sourceReader = new BufferedReader(new InputStreamReader(
new FileInputStream(file)));
m_file = file;
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
/**
* Initializes the StreamTokenizer used for reading the TXT file.
*/
private void initTokenizer() {
if (m_sourceReader == null)
return;
m_Tokenizer = new StreamTokenizer(m_sourceReader);
m_Tokenizer.resetSyntax();
m_Tokenizer.whitespaceChars(0, ' ');
m_Tokenizer.wordChars(' ' + 1, '\u00FF');
m_Tokenizer.whitespaceChars(',', ',');
m_Tokenizer.commentChar('%');
m_Tokenizer.quoteChar('"');
m_Tokenizer.quoteChar('\'');
m_Tokenizer.ordinaryChar('{');
m_Tokenizer.ordinaryChar('}');
m_Tokenizer.eolIsSignificant(true);
}
/**
* Reads and stores attributes of an TXT file.
*/
private void readAttributes() {
if (m_Tokenizer == null)
return;
attributes = new Vector<Attribute>();
try {
while (m_Tokenizer.nextToken() != StreamTokenizer.TT_EOL) {
attributes.add(new Attribute(m_Tokenizer.sval));
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Reads and stores instances of an TXT file.
*/
private void readInstances() {
if (m_Tokenizer == null)
return;
instances = new Vector<Object>();
String[] instance = new String[numAttributes()];
int index = -1;
try {
while (m_Tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
if (m_Tokenizer.ttype != StreamTokenizer.TT_EOL
&& ++index < numAttributes()) {
if (m_Tokenizer.ttype == StreamTokenizer.TT_WORD)
instance[index] = m_Tokenizer.sval;
else if (m_Tokenizer.ttype == StreamTokenizer.TT_NUMBER)
instance[index] = "" + (int) m_Tokenizer.nval;
attributes.elementAt(index).append(instance[index]);
} else {
if (index + 1 >= numAttributes())
instances.add(instance);
instance = new String[numAttributes()];
index = -1;
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Returns the number of attributes.
*
* @return the number of attributes
*/
public int numAttributes() {
return attributes.size();
}
/**
* Returns the number of instances.
*
* @return the number of instances
*/
public int numInstances() {
return instances.size();
}
/**
* Returns the source file.
*
* @return the source file
*/
public File getFile() {
return m_file;
}
/**
* Returns the vector of attributes.
*
* @return the vector of attributes
*/
public Vector<Attribute> getAttributes() {
return attributes;
}
/**
* Returns the vector of instances.
*
* @return the vector of instances
*/
public Vector<Object> getInstances() {
return instances;
}
/**
* Returns a description of this txtReader in TXT format.
*
* @return a description of this txtReader as a string
*/
public String toString() {
if (attributes == null || instances == null)
return null;
StringBuilder sb = new StringBuilder("");
for (Attribute attribute : attributes)
sb.append(attribute);
sb.append("\n");
sb.append("@data\n\n");
for (Object instance : instances) {
for (String str : (String[]) instance)
sb.append(" " + str);
sb.append("\n");
}
return sb.toString();
}
/**
* Main method.
*
* @param args
* should contain the name of an input file.
*/
public static void main(String[] args) {
if (args.length == 0) {
System.out.println("\nUsage: " + TxtReader.class.getName()
+ " <dataset>\n");
return;
}
System.out.println(new TxtReader(new File(args[0])));
}
}
| |
/*
* Copyright 2011 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projectodd.stilts.stomp.protocol;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.projectodd.stilts.stomp.Headers;
/**
* A base STOMP frame.
*
* @author Bob McWhirter
*/
public class StompFrame {
public enum Version {
VERSION_1_0("1.0", 1.0F),
VERSION_1_1("1.1", 1.1F);
private String versionString;
private float versionValue;
Version(String versionString, float versionValue) {
this.versionString = versionString;
this.versionValue = versionValue;
}
public boolean isAfter(Version version) {
return versionValue > version.versionValue;
}
public boolean isBefore(Version version) {
return versionValue < version.versionValue;
}
public static Version forVersionString(String versionString) {
for (Version version : Version.values()) {
if (versionString.equals( version.versionString ))
return version;
}
return null;
}
public static String supportedVersions() {
Version[] versions = Version.values();
String[] supportedVersions = new String[versions.length];
for (int i = 0; i < versions.length; i++) {
supportedVersions[i] = versions[i].versionString;
}
return StringUtils.join( supportedVersions, "," );
}
public String versionString() {
return versionString;
}
}
public static class Header {
public static final String CONTENT_LENGTH = "content-length";
public static final String CONTENT_TYPE = "content-type";
public static final String SESSION = "session";
public static final String DESTINATION = "destination";
public static final String ID = "id";
public static final String RECEIPT = "receipt";
public static final String RECEIPT_ID = "receipt-id";
public static final String ACK = "ack";
public static final String SELECTOR = "selector";
public static final String TRANSACTION = "transaction";
public static final String SUBSCRIPTION = "subscription";
public static final String MESSAGE_ID = "message-id";
public static final String HOST = "host";
public static final String ACCEPT_VERSION = "accept-version";
public static final String VERSION = "version";
public static final String SERVER = "server";
public static final String MESSAGE = "message";
public static final String HEARTBEAT = "heart-beat";
public static final String LOGIN = "login";
public static final String PASSCODE = "passcode";
}
public static class Command {
public static Map<String, Command> commands = new HashMap<String, Command>();
public static Command valueOf(String text) {
text = text.toLowerCase();
Command c = Command.commands.get( text );
return c;
}
private String name;
private boolean hasContent;
public Command(String name, boolean hasContent) {
this.name = name;
this.hasContent = hasContent;
Command.commands.put( name.toLowerCase(), this );
}
public boolean hasContent() {
return this.hasContent;
}
public byte[] getBytes() {
return this.name.getBytes();
}
public String toString() {
return this.name;
}
public boolean equals(Object o) {
return toString().equals( o.toString() );
}
public static final Command STOMP = new Command( "STOMP", false );
public static final Command CONNECT = new Command( "CONNECT", false );
public static final Command CONNECTED = new Command( "CONNECTED", false );
public static final Command DISCONNECT = new Command( "DISCONNECT",
false );
public static final Command SEND = new Command( "SEND", true );
public static final Command MESSAGE = new Command( "MESSAGE", true );
public static final Command SUBSCRIBE = new Command( "SUBSCRIBE", false );
public static final Command UNSUBSCRIBE = new Command( "UNSUBSCRIBE",
false );
public static final Command BEGIN = new Command( "BEGIN", false );
public static final Command COMMIT = new Command( "COMMIT", false );
public static final Command ACK = new Command( "ACK", false );
public static final Command NACK = new Command( "NACK", false );
public static final Command ABORT = new Command( "ABORT", false );
public static final Command RECEIPT = new Command( "RECEIPT", false );
public static final Command ERROR = new Command( "ERROR", true );
}
/**
* Create a new outbound frame.
*
* @param command
*/
public StompFrame(Command command) {
this.header = new FrameHeader( command );
}
public StompFrame(Command command, Headers headers) {
this.header = new FrameHeader( command, headers );
}
public StompFrame(FrameHeader header) {
this.header = header;
}
public Command getCommand() {
return this.header.getCommand();
}
public String getHeader(String name) {
return this.header.get( name );
}
public void setHeader(String name, String value) {
this.header.set( name, value );
}
public Set<String> getHeaderNames() {
return this.header.getNames();
}
public Headers getHeaders() {
return this.header.getMap();
}
public String toString() {
return "[" + getClass().getSimpleName() + ": header=" + this.header
+ "]";
}
private FrameHeader header;
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.adapter.example.authorization;
import org.jboss.arquillian.container.test.api.Deployer;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.graphene.page.Page;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Before;
import org.junit.Test;
import org.keycloak.admin.client.resource.AuthorizationResource;
import org.keycloak.admin.client.resource.ClientResource;
import org.keycloak.admin.client.resource.ClientsResource;
import org.keycloak.admin.client.resource.ResourcesResource;
import org.keycloak.admin.client.resource.RoleResource;
import org.keycloak.admin.client.resource.UserResource;
import org.keycloak.admin.client.resource.UsersResource;
import org.keycloak.representations.idm.ClientRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.RoleRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.representations.idm.authorization.PolicyRepresentation;
import org.keycloak.representations.idm.authorization.ResourceRepresentation;
import org.keycloak.representations.idm.authorization.ResourceServerRepresentation;
import org.keycloak.testsuite.adapter.AbstractExampleAdapterTest;
import org.keycloak.testsuite.adapter.page.PhotozClientAuthzTestApp;
import org.keycloak.util.JsonSerialization;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.keycloak.testsuite.util.IOUtil.loadJson;
import static org.keycloak.testsuite.util.IOUtil.loadRealm;
/**
* @author <a href="mailto:psilva@redhat.com">Pedro Igor</a>
*/
public abstract class AbstractPhotozExampleAdapterTest extends AbstractExampleAdapterTest {
private static final String REALM_NAME = "photoz";
private static final String RESOURCE_SERVER_ID = "photoz-restful-api";
private static int TOKEN_LIFESPAN_LEEWAY = 3; // seconds
@ArquillianResource
private Deployer deployer;
@Page
private PhotozClientAuthzTestApp clientPage;
@Override
public void setDefaultPageUriParameters() {
super.setDefaultPageUriParameters();
testRealmPage.setAuthRealm(REALM_NAME);
}
@Before
public void beforePhotozExampleAdapterTest() {
deleteAllCookiesForClientPage();
}
@Override
public void addAdapterTestRealms(List<RealmRepresentation> testRealms) {
RealmRepresentation realm = loadRealm(new File(TEST_APPS_HOME_DIR + "/photoz/photoz-realm.json"));
realm.setAccessTokenLifespan(30 + TOKEN_LIFESPAN_LEEWAY); // seconds
testRealms.add(realm);
}
@Deployment(name = PhotozClientAuthzTestApp.DEPLOYMENT_NAME)
public static WebArchive deploymentClient() throws IOException {
return exampleDeployment(PhotozClientAuthzTestApp.DEPLOYMENT_NAME);
}
@Deployment(name = RESOURCE_SERVER_ID, managed = false)
public static WebArchive deploymentResourceServer() throws IOException {
return exampleDeployment(RESOURCE_SERVER_ID);
}
@Override
public void beforeAbstractKeycloakTest() throws Exception {
super.beforeAbstractKeycloakTest();
importResourceServerSettings();
}
@Test
public void testUserCanCreateAndDeleteAlbum() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
this.clientPage.createAlbum("Alice Family Album");
List<ResourceRepresentation> resources = getAuthorizationResource().resources().resources();
assertFalse(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
this.clientPage.deleteAlbum("Alice Family Album");
resources = getAuthorizationResource().resources().resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testOnlyOwnerCanDeleteAlbum() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
this.clientPage.createAlbum("Alice-Family-Album");
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
List<ResourceRepresentation> resources = getAuthorizationResource().resources().resources();
assertFalse(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Delete Album Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Only Owner Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum("Alice-Family-Album");
assertTrue(this.clientPage.wasDenied());
resources = getAuthorizationResource().resources().resources();
assertFalse(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Delete Album Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Only Owner and Administrators Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum("Alice-Family-Album");
assertFalse(this.clientPage.wasDenied());
resources = getAuthorizationResource().resources().resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testRegularUserCanNotAccessAdminResources() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
this.clientPage.navigateToAdminAlbum();
assertTrue(this.clientPage.wasDenied());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testAdminOnlyFromSpecificAddress() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
assertFalse(this.clientPage.wasDenied());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Only From a Specific Client Address".equals(policy.getName())) {
String code = policy.getConfig().get("code");
policy.getConfig().put("code", code.replaceAll("127.0.0.1", "127.3.3.3"));
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
this.clientPage.navigateToAdminAlbum();
assertTrue(this.clientPage.wasDenied());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testAdminWithoutPermissionsToTypedResource() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
this.clientPage.createAlbum("Alice Family Album");
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
assertFalse(this.clientPage.wasDenied());
this.clientPage.viewAlbum("Alice Family Album");
assertFalse(this.clientPage.wasDenied());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Album Resource Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Any User Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
if ("Any User Policy".equals(policy.getName())) {
ClientResource resourceServerClient = getClientResource(RESOURCE_SERVER_ID);
RoleResource manageAlbumRole = resourceServerClient.roles().get("manage-albums");
RoleRepresentation roleRepresentation = manageAlbumRole.toRepresentation();
List<Map> roles = JsonSerialization.readValue(policy.getConfig().get("roles"), List.class);
roles = roles.stream().filter(new Predicate<Map>() {
@Override
public boolean test(Map map) {
return !map.get("id").equals(roleRepresentation.getId());
}
}).collect(Collectors.toList());
policy.getConfig().put("roles", JsonSerialization.writeValueAsString(roles));
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum("Alice Family Album");
assertTrue(this.clientPage.wasDenied());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Album Resource Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Any User Policy\", \"Administration Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum("Alice Family Album");
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum("Alice Family Album");
List<ResourceRepresentation> resources = getAuthorizationResource().resources().resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testAdminWithoutPermissionsToDeleteAlbum() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
this.clientPage.createAlbum("Alice Family Album");
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
assertFalse(this.clientPage.wasDenied());
this.clientPage.deleteAlbum("Alice Family Album");
assertFalse(this.clientPage.wasDenied());
List<ResourceRepresentation> resources = getAuthorizationResource().resources().resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Delete Album Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Only Owner Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
loginToClientPage("alice", "alice");
this.clientPage.createAlbum("Alice Family Album");
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum("Alice Family Album");
assertFalse(this.clientPage.wasDenied());
resources = getAuthorizationResource().resources().resources();
assertFalse(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum("Alice Family Album");
assertTrue(this.clientPage.wasDenied());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Delete Album Permission".equals(policy.getName())) {
policy.getConfig().put("applyPolicies", "[\"Only Owner and Administrators Policy\"]");
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum("Alice Family Album");
assertFalse(this.clientPage.wasDenied());
resources = getAuthorizationResource().resources().resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testClientRoleRepresentingUserConsent() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
assertFalse(this.clientPage.wasDenied());
UsersResource usersResource = realmsResouce().realm(REALM_NAME).users();
List<UserRepresentation> users = usersResource.search("alice", null, null, null, null, null);
assertFalse(users.isEmpty());
UserRepresentation userRepresentation = users.get(0);
UserResource userResource = usersResource.get(userRepresentation.getId());
ClientResource html5ClientApp = getClientResource("photoz-html5-client");
userResource.revokeConsent(html5ClientApp.toRepresentation().getClientId());
ClientResource resourceServerClient = getClientResource(RESOURCE_SERVER_ID);
RoleResource roleResource = resourceServerClient.roles().get("manage-albums");
RoleRepresentation roleRepresentation = roleResource.toRepresentation();
roleRepresentation.setScopeParamRequired(true);
roleResource.update(roleRepresentation);
loginToClientPage("alice", "alice");
assertTrue(this.clientPage.wasDenied());
loginToClientPage("alice", "alice", RESOURCE_SERVER_ID + "/manage-albums");
assertFalse(this.clientPage.wasDenied());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testClientRoleNotRequired() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
assertFalse(this.clientPage.wasDenied());
UsersResource usersResource = realmsResouce().realm(REALM_NAME).users();
List<UserRepresentation> users = usersResource.search("alice", null, null, null, null, null);
assertFalse(users.isEmpty());
UserRepresentation userRepresentation = users.get(0);
UserResource userResource = usersResource.get(userRepresentation.getId());
ClientResource html5ClientApp = getClientResource("photoz-html5-client");
userResource.revokeConsent(html5ClientApp.toRepresentation().getClientId());
ClientResource resourceServerClient = getClientResource(RESOURCE_SERVER_ID);
RoleResource manageAlbumRole = resourceServerClient.roles().get("manage-albums");
RoleRepresentation roleRepresentation = manageAlbumRole.toRepresentation();
roleRepresentation.setScopeParamRequired(true);
manageAlbumRole.update(roleRepresentation);
loginToClientPage("alice", "alice");
assertTrue(this.clientPage.wasDenied());
for (PolicyRepresentation policy : getAuthorizationResource().policies().policies()) {
if ("Any User Policy".equals(policy.getName())) {
List<Map> roles = JsonSerialization.readValue(policy.getConfig().get("roles"), List.class);
roles.forEach(role -> {
String roleId = (String) role.get("id");
if (roleId.equals(manageAlbumRole.toRepresentation().getId())) {
role.put("required", false);
}
});
policy.getConfig().put("roles", JsonSerialization.writeValueAsString(roles));
getAuthorizationResource().policies().policy(policy.getId()).update(policy);
}
}
loginToClientPage("alice", "alice");
assertFalse(this.clientPage.wasDenied());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testOverridePermissionFromResourceParent() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
String resourceName = "My Resource Instance";
this.clientPage.createAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.viewAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateTo();
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.createAlbum(resourceName);
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();;
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
loginToClientPage("alice", "alice");
this.clientPage.createAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
getAuthorizationResource().resources().resources().forEach(resource -> {
if (resource.getName().equals(resourceName)) {
try {
PolicyRepresentation resourceInstancePermission = new PolicyRepresentation();
resourceInstancePermission.setName(resourceName + "Permission");
resourceInstancePermission.setType("resource");
Map<String, String> config = new HashMap<>();
config.put("resources", JsonSerialization.writeValueAsString(Arrays.asList(resource.getId())));
config.put("applyPolicies", JsonSerialization.writeValueAsString(Arrays.asList("Only Owner Policy")));
resourceInstancePermission.setConfig(config);
getAuthorizationResource().policies().create(resourceInstancePermission);
} catch (Exception e) {
throw new RuntimeException("Error creating policy.", e);
}
}
});
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum(resourceName);
assertTrue(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum(resourceName);
assertTrue(this.clientPage.wasDenied());
loginToClientPage("alice", "alice");
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
ResourcesResource resourcesResource = getAuthorizationResource().resources();
List<ResourceRepresentation> resources = resourcesResource.resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
@Test
public void testInheritPermissionFromResourceParent() throws Exception {
try {
this.deployer.deploy(RESOURCE_SERVER_ID);
loginToClientPage("alice", "alice");
String resourceName = "My Resource Instance";
this.clientPage.createAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.viewAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateTo();
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.createAlbum(resourceName);
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();;
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
loginToClientPage("alice", "alice");
this.clientPage.createAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
ResourcesResource resourcesResource = getAuthorizationResource().resources();
resourcesResource.resources().forEach(resource -> {
if (resource.getName().equals(resourceName)) {
try {
PolicyRepresentation resourceInstancePermission = new PolicyRepresentation();
resourceInstancePermission.setName(resourceName + "Permission");
resourceInstancePermission.setType("resource");
Map<String, String> config = new HashMap<>();
config.put("resources", JsonSerialization.writeValueAsString(Arrays.asList(resource.getId())));
config.put("applyPolicies", JsonSerialization.writeValueAsString(Arrays.asList("Only Owner Policy")));
resourceInstancePermission.setConfig(config);
getAuthorizationResource().policies().create(resourceInstancePermission);
} catch (Exception e) {
throw new RuntimeException("Error creating policy.", e);
}
}
});
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum(resourceName);
assertTrue(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum(resourceName);
assertTrue(this.clientPage.wasDenied());
resourcesResource.resources().forEach(resource -> {
if (resource.getName().equals(resourceName)) {
resource.setScopes(resource.getScopes().stream().filter(scope -> !scope.getName().equals("urn:photoz.com:scopes:album:view")).collect(Collectors.toSet()));
resourcesResource.resource(resource.getId()).update(resource);
}
});
loginToClientPage("admin", "admin");
this.clientPage.navigateToAdminAlbum();
this.clientPage.viewAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
this.clientPage.navigateToAdminAlbum();
this.clientPage.deleteAlbum(resourceName);
assertTrue(this.clientPage.wasDenied());
loginToClientPage("alice", "alice");
this.clientPage.deleteAlbum(resourceName);
assertFalse(this.clientPage.wasDenied());
List<ResourceRepresentation> resources = resourcesResource.resources();
assertTrue(resources.stream().filter(resource -> resource.getOwner().getName().equals("alice")).collect(Collectors.toList()).isEmpty());
resourcesResource.resources().forEach(resource -> {
if (resource.getName().equals(resourceName)) {
resource.setScopes(Collections.emptySet());
resourcesResource.resource(resource.getId()).update(resource);
}
});
} finally {
this.deployer.undeploy(RESOURCE_SERVER_ID);
}
}
private void importResourceServerSettings() throws FileNotFoundException {
getAuthorizationResource().importSettings(loadJson(new FileInputStream(new File(TEST_APPS_HOME_DIR + "/photoz/photoz-restful-api-authz-service.json")), ResourceServerRepresentation.class));
}
private AuthorizationResource getAuthorizationResource() throws FileNotFoundException {
return getClientResource(RESOURCE_SERVER_ID).authorization();
}
private ClientResource getClientResource(String clientId) {
ClientsResource clients = this.realmsResouce().realm(REALM_NAME).clients();
ClientRepresentation resourceServer = clients.findByClientId(clientId).get(0);
return clients.get(resourceServer.getId());
}
private void deleteAllCookiesForClientPage() {
clientPage.navigateTo();
driver.manage().deleteAllCookies();
}
private void loginToClientPage(String username, String password, String... scopes) {
// We need to log out by deleting cookies because the log out button sometimes doesn't work in PhantomJS
deleteAllCookiesForClientPage();
deleteAllCookiesForTestRealm();
clientPage.navigateTo();
clientPage.login(username, password, scopes);
}
}
| |
package org.docksidestage.hangar.dbflute.dtomapper.bs.customize;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import org.dbflute.Entity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.InstanceKeyEntity;
import org.dbflute.dbmeta.dtomap.DtoMapper;
import org.dbflute.dbmeta.dtomap.InstanceKeyDto;
import org.dbflute.helper.beans.DfBeanDesc;
import org.dbflute.helper.beans.DfPropertyDesc;
import org.dbflute.helper.beans.factory.DfBeanDescFactory;
import org.dbflute.jdbc.Classification;
import org.docksidestage.hangar.dbflute.exentity.customize.*;
import org.docksidestage.hangar.simpleflute.dto.customize.*;
import org.docksidestage.hangar.dbflute.dtomapper.customize.*;
/**
* The DTO mapper of PurchaseMaxPriceMember. <br>
* <pre>
* [primary-key]
*
*
* [column]
* MEMBER_ID, MEMBER_NAME, PURCHASE_MAX_PRICE, MEMBER_STATUS_NAME
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign-table]
*
*
* [referrer-table]
*
*
* [foreign-property]
*
*
* [referrer-property]
*
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsPurchaseMaxPriceMemberDtoMapper implements DtoMapper<PurchaseMaxPriceMember, PurchaseMaxPriceMemberDto>, Serializable {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
protected final Map<Entity, Object> _relationDtoMap;
protected final Map<Object, Entity> _relationEntityMap;
protected boolean _exceptCommonColumn;
protected boolean _reverseReference; // default: one-way reference
protected boolean _instanceCache = true; // default: cached
// ===================================================================================
// Constructor
// ===========
public BsPurchaseMaxPriceMemberDtoMapper() {
_relationDtoMap = new HashMap<Entity, Object>();
_relationEntityMap = new HashMap<Object, Entity>();
}
public BsPurchaseMaxPriceMemberDtoMapper(Map<Entity, Object> relationDtoMap, Map<Object, Entity> relationEntityMap) {
_relationDtoMap = relationDtoMap;
_relationEntityMap = relationEntityMap;
}
// ===================================================================================
// Mapping
// =======
// -----------------------------------------------------
// to DTO
// ------
/**
* {@inheritDoc}
*/
public PurchaseMaxPriceMemberDto mappingToDto(PurchaseMaxPriceMember entity) {
if (entity == null) {
return null;
}
PurchaseMaxPriceMemberDto dto = new PurchaseMaxPriceMemberDto();
dto.setMemberId(entity.getMemberId());
dto.setMemberName(entity.getMemberName());
dto.setPurchaseMaxPrice(entity.getPurchaseMaxPrice());
dto.setMemberStatusName(entity.getMemberStatusName());
reflectDerivedProperty(entity, dto, true);
return dto;
}
/**
* {@inheritDoc}
*/
public List<PurchaseMaxPriceMemberDto> mappingToDtoList(List<PurchaseMaxPriceMember> entityList) {
if (entityList == null) {
throw new IllegalArgumentException("The argument 'entityList' should not be null.");
}
List<PurchaseMaxPriceMemberDto> dtoList = new ArrayList<PurchaseMaxPriceMemberDto>();
for (PurchaseMaxPriceMember entity : entityList) {
PurchaseMaxPriceMemberDto dto = mappingToDto(entity);
if (dto != null) {
dtoList.add(dto);
} else {
if (isAcceptNullElementOnList()) {
dtoList.add(null);
}
}
}
return dtoList;
}
// -----------------------------------------------------
// to Entity
// ---------
/**
* {@inheritDoc}
*/
public PurchaseMaxPriceMember mappingToEntity(PurchaseMaxPriceMemberDto dto) {
if (dto == null) {
return null;
}
PurchaseMaxPriceMember entity = new PurchaseMaxPriceMember();
if (needsMapping(dto, dto.getMemberId(), "memberId")) {
entity.setMemberId(dto.getMemberId());
}
if (needsMapping(dto, dto.getMemberName(), "memberName")) {
entity.setMemberName(dto.getMemberName());
}
if (needsMapping(dto, dto.getPurchaseMaxPrice(), "purchaseMaxPrice")) {
entity.setPurchaseMaxPrice(dto.getPurchaseMaxPrice());
}
if (needsMapping(dto, dto.getMemberStatusName(), "memberStatusName")) {
entity.setMemberStatusName(dto.getMemberStatusName());
}
reflectDerivedProperty(entity, dto, false);
return entity;
}
/**
* Does the property need to be mapped to an entity? <br>
* If modified info of DTO has at least one property, only modified properties are mapped.
* And if no property is modified, all properties are mapped (but the other option exists).
* @param dto The instance of DTO. (NotNull)
* @param value The value of DTO's property. (NotNull)
* @param propName The property name of DTO. (NotNull)
* @return The determination, true or false.
*/
protected boolean needsMapping(PurchaseMaxPriceMemberDto dto, Object value, String propName) {
Set<String> modifiedProperties = dto.mymodifiedProperties();
if (modifiedProperties.isEmpty()) {
return isMappingToEntityContainsNull() || value != null;
}
return modifiedProperties.contains(propName);
}
/**
* Does the mapping to an entity contain null values? (when no property is modified) <br>
* Default is true that means a setter is called if the value is null.
* But this method is valid only when no property is modified.
* @return The determination, true or false.
*/
protected boolean isMappingToEntityContainsNull() { // for extension
return true; // as default
}
/**
* {@inheritDoc}
*/
public List<PurchaseMaxPriceMember> mappingToEntityList(List<PurchaseMaxPriceMemberDto> dtoList) {
if (dtoList == null) {
throw new IllegalArgumentException("The argument 'dtoList' should not be null.");
}
List<PurchaseMaxPriceMember> entityList = new ArrayList<PurchaseMaxPriceMember>();
for (PurchaseMaxPriceMemberDto dto : dtoList) {
PurchaseMaxPriceMember entity = mappingToEntity(dto);
if (entity != null) {
entityList.add(entity);
} else {
if (isAcceptNullElementOnList()) {
entityList.add(null);
}
}
}
return entityList;
}
protected boolean isAcceptNullElementOnList() {
return true; // as default
}
// -----------------------------------------------------
// Instance Key
// ------------
protected Object createInstanceKeyDto(final Object dto, final int instanceHash) {
return new InstanceKeyDto(dto, instanceHash);
}
protected InstanceKeyEntity createInstanceKeyEntity(Entity entity) {
return new InstanceKeyEntity(entity);
}
public void disableInstanceCache() { // internal option
_instanceCache = false;
}
// -----------------------------------------------------
// Derived Property
// ----------------
protected void reflectDerivedProperty(Entity entity, Object dto, boolean toDto) {
DfBeanDesc entityDesc = DfBeanDescFactory.getBeanDesc(entity.getClass());
DfBeanDesc dtoDesc = DfBeanDescFactory.getBeanDesc(dto.getClass());
DBMeta dbmeta = entity.asDBMeta();
for (String propertyName : entityDesc.getProppertyNameList()) {
if (isOutOfDerivedPropertyName(entity, dto, toDto, dbmeta, entityDesc, dtoDesc, propertyName)) {
continue;
}
DfPropertyDesc entityProp = entityDesc.getPropertyDesc(propertyName);
Class<?> propertyType = entityProp.getPropertyType();
if (isOutOfDerivedPropertyType(entity, dto, toDto, propertyName, propertyType)) {
continue;
}
if (entityProp.isReadable() && entityProp.isWritable()) {
DfPropertyDesc dtoProp = dtoDesc.getPropertyDesc(propertyName);
if (dtoProp.isReadable() && dtoProp.isWritable()) {
if (toDto) {
dtoProp.setValue(dto, entityProp.getValue(entity));
} else {
entityProp.setValue(entity, dtoProp.getValue(dto));
}
}
}
}
}
protected boolean isOutOfDerivedPropertyName(Entity entity, Object dto, boolean toDto
, DBMeta dbmeta, DfBeanDesc entityDesc, DfBeanDesc dtoDesc
, String propertyName) {
return dbmeta.hasColumn(propertyName)
|| dbmeta.hasForeign(propertyName) || dbmeta.hasReferrer(propertyName)
|| !dtoDesc.hasPropertyDesc(propertyName);
}
protected boolean isOutOfDerivedPropertyType(Entity entity, Object dto, boolean toDto
, String propertyName, Class<?> propertyType) {
return List.class.isAssignableFrom(propertyType)
|| Entity.class.isAssignableFrom(propertyType)
|| Classification.class.isAssignableFrom(propertyType);
}
// ===================================================================================
// Suppress Relation
// =================
// (basically) to suppress infinity loop
protected void doSuppressAll() { // internal
}
protected void doSuppressClear() { // internal
}
// ===================================================================================
// Mapping Option
// ==============
/**
* {@inheritDoc}
*/
public void setBaseOnlyMapping(boolean baseOnlyMapping) {
if (baseOnlyMapping) {
doSuppressAll();
} else {
doSuppressClear();
}
}
protected boolean isExceptCommonColumn() {
return _exceptCommonColumn;
}
/**
* {@inheritDoc}
*/
public void setExceptCommonColumn(boolean exceptCommonColumn) {
_exceptCommonColumn = exceptCommonColumn;
}
protected boolean isReverseReference() {
return _reverseReference;
}
/**
* {@inheritDoc}
*/
public void setReverseReference(boolean reverseReference) {
_reverseReference = reverseReference;
}
// -----------------------------------------------------
// Easy-to-Use
// -----------
/**
* Enable base-only mapping that means the mapping ignores all references.
* @return this. (NotNull)
*/
public PurchaseMaxPriceMemberDtoMapper baseOnlyMapping() {
setBaseOnlyMapping(true);
return (PurchaseMaxPriceMemberDtoMapper)this;
}
/**
* Enable except common column that means the mapping excepts common column.
* @return this. (NotNull)
*/
public PurchaseMaxPriceMemberDtoMapper exceptCommonColumn() {
setExceptCommonColumn(true);
return (PurchaseMaxPriceMemberDtoMapper)this;
}
/**
* Enable reverse reference that means the mapping contains reverse references.
* @return this. (NotNull)
*/
public PurchaseMaxPriceMemberDtoMapper reverseReference() {
setReverseReference(true);
return (PurchaseMaxPriceMemberDtoMapper)this;
}
}
| |
/**
* Copyright 2017 iovation, Inc.
* <p>
* Licensed under the MIT License.
* You may not use this file except in compliance with the License.
* A copy of the License is located in the "LICENSE.txt" file accompanying
* this file. This file is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iovation.launchkey.sdk.integration;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Stage;
import com.google.inject.spi.Message;
import com.iovation.launchkey.sdk.FactoryFactoryBuilder;
import com.iovation.launchkey.sdk.client.OrganizationFactory;
import com.iovation.launchkey.sdk.crypto.Crypto;
import com.iovation.launchkey.sdk.crypto.JCECrypto;
import com.iovation.launchkey.sdk.integration.constants.Appium;
import com.iovation.launchkey.sdk.integration.constants.Capability;
import com.iovation.launchkey.sdk.integration.constants.Launchkey;
import com.iovation.launchkey.sdk.integration.managers.kobiton.KobitonDevice;
import com.iovation.launchkey.sdk.integration.managers.kobiton.KobitonManager;
import com.iovation.launchkey.sdk.integration.managers.kobiton.transport.RequestFactory;
import com.iovation.launchkey.sdk.integration.mobile.driver.NullMobileDriver;
import com.iovation.launchkey.sdk.integration.mobile.driver.SampleAppMobileDriver;
import com.iovation.launchkey.sdk.integration.mobile.driver.android.SampleAppAndroidDriver;
import io.cucumber.core.backend.ObjectFactory;
import io.cucumber.guice.CucumberModules;
import io.cucumber.guice.ScenarioScope;
import io.cucumber.java.Before;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.junit.AssumptionViolatedException;
import org.openqa.selenium.remote.DesiredCapabilities;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.security.Provider;
import java.security.Security;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.security.interfaces.RSAPrivateKey;
public class CucumberGuiceObjectFactory implements ObjectFactory {
private final Injector injector;
public CucumberGuiceObjectFactory() {
injector = Guice.createInjector( Stage.PRODUCTION, CucumberModules.createScenarioModule(), new CucumberJuiceModule());
}
@Override
public void start() {
injector.getInstance( ScenarioScope.class ).enterScope();
}
@Override
public void stop() {
injector.getInstance( ScenarioScope.class ).exitScope();
}
@Override
public boolean addClass(Class<?> aClass) {
return true;
}
@Override
public <T> T getInstance(Class<T> aClass) {
return injector.getInstance( aClass );
}
public static class CucumberJuiceModule extends AbstractModule {
@Before("@device_testing")
public void skipDeviceScenarios() throws Exception {
if (!System.getProperty(Capability.run_device_tests).equals("true")) {
throw new AssumptionViolatedException("Skipping device tests");
}
}
@Override
protected void configure() {
try (InputStream in = CucumberGuiceObjectFactory.class.getResourceAsStream("/application.properties")) {
Properties properties = new Properties();
properties.load(in);
for (String key : properties.stringPropertyNames()) {
if (System.getProperty(key) == null) {
System.setProperty(key, properties.getProperty(key));
}
}
} catch (Exception e) {
addError("Unable to load properties file.", e);
}
Provider provider = new BouncyCastleProvider();
if (Security.getProvider(BouncyCastleProvider.PROVIDER_NAME) == null) {
Security.addProvider(provider);
}
bind(Provider.class).toInstance(provider);
JCECrypto crypto = new JCECrypto(provider);
bind(Crypto.class).toInstance(crypto);
String dualPurposeKey = getPrivateKeyPEM(provider);
RSAPrivateKey rsaDualPurposeKey = makePrivateKeyFromPEM(provider, dualPurposeKey);
String dualPurposeKeyFingerprint = getPublicKeyFingerprintFromPrivateKey(provider, rsaDualPurposeKey);
Map<String, RSAPrivateKey> keys = new ConcurrentHashMap<>();
keys.put(dualPurposeKeyFingerprint, rsaDualPurposeKey);
String baseURL = getApiBaseUrl();
String organizationId = getOrganizationId();
OrganizationFactory organizationFactory;
if (dualPurposeKey != null && baseURL != null && organizationId != null) {
organizationFactory = new FactoryFactoryBuilder()
.setAPIBaseURL(baseURL)
.setJCEProvider(provider)
.setRequestExpireSeconds(1)
.build()
.makeOrganizationFactory(organizationId, keys, dualPurposeKeyFingerprint);
} else {
organizationFactory = null;
}
bind(OrganizationFactory.class).toInstance(organizationFactory);
if (getBooleanPropertyElseAddError(Capability.run_device_tests)) {
bind(SampleAppMobileDriver.class).toInstance(getMobileDriver());
} else {
Logger.getGlobal().warning("Not running device based tests");
bind(SampleAppMobileDriver.class).toInstance(new NullMobileDriver());
}
}
private String getApiBaseUrl() {
String baseUrl = getPropertyElseAddError(Launchkey.API.base_url);
try {
//noinspection ResultOfMethodCallIgnored
URI.create(baseUrl);
} catch (Exception e) {
addError(new Message("Invalid Base URL specified.", e));
}
return baseUrl;
}
private String getPrivateKeyPEM(Provider provider) {
String privateKeyFile = getPropertyElseAddError(Launchkey.Organization.dual_purpose_key);
if (privateKeyFile == null) {
addError(new Message("Unrecognized key type provided."));
return null;
}
String privateKey = null;
if (!privateKeyFile.isEmpty()) {
try {
privateKey = readFile(privateKeyFile);
JCECrypto.getRSAPrivateKeyFromPEM(provider, privateKey);
} catch (IOException e) {
addError(new Message("Unable to read RSA dual purpose key from file.", e));
} catch (Exception e) {
addError(new Message("Invalid RSA dual purpose key provided. The key must be PEM formatted.", e));
}
}
return privateKey;
}
private RSAPrivateKey makePrivateKeyFromPEM(Provider provider, String privateKeyPEM) {
JCECrypto jceCrypto = new JCECrypto(provider);
return jceCrypto.getRSAPrivateKeyFromPEM(provider, privateKeyPEM);
}
private String getPublicKeyFingerprintFromPrivateKey(Provider provider, RSAPrivateKey privateKey) {
JCECrypto jceCrypto = new JCECrypto(provider);
return jceCrypto.getRsaPublicKeyFingerprint(provider, privateKey);
}
private String getOrganizationId() {
String organizationId = getPropertyElseAddError(Launchkey.Organization.id);
if (organizationId != null && !organizationId.isEmpty()) {
try {
//noinspection ResultOfMethodCallIgnored
UUID.fromString(organizationId);
} catch (Exception e) {
addError(new Message("The Organization ID was invalid. It must be a UUID.", e));
}
}
return organizationId;
}
@SuppressWarnings("Duplicates")
private String readFile(String fileName) throws IOException {
StringBuilder sb = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new FileReader(fileName))) {
String line = reader.readLine();
while (line != null) {
sb.append(line);
sb.append("\n");
line = reader.readLine();
}
}
return sb.toString();
}
private SampleAppMobileDriver getMobileDriver() {
SampleAppMobileDriver mobileDriver = null;
boolean useKobiton = getBooleanPropertyElseAddError(Appium.Kobiton.use_kobiton);
if (useKobiton) {
String kobitonUploadUrl = getPropertyElseAddError(Appium.Kobiton.upload_url);
String kobitonAuthCreds = getPropertyElseAddError(Appium.Kobiton.auth);
String kobitonAppName = getPropertyElseAddError(Appium.Kobiton.app_name);
String appPhysicalLocation = getPropertyElseAddError(Capability.app);
String kobitonAppsUrl = getPropertyElseAddError(Appium.Kobiton.apps_url);
String kobitonDevicesUrl = getPropertyElseAddError(Appium.Kobiton.devices_url);
KobitonManager kobitonManager = new KobitonManager(new RequestFactory(), kobitonUploadUrl, kobitonAuthCreds, kobitonAppsUrl, kobitonDevicesUrl);
try {
kobitonManager.createApplication(
kobitonAppName,
appPhysicalLocation);
} catch (Exception e) {
addError("Could not create application on Kobiton", e);
}
System.setProperty(Capability.app, kobitonManager.getCurrentAppLocation());
String platformName = getPropertyElseAddError(Capability.platform_name);
try {
List<KobitonDevice> devices = kobitonManager.getAllDevices();
for (KobitonDevice device : devices) {
if (!device.isBooked() && device.getPlatformName().equals(platformName) && Integer.valueOf(String.valueOf(device.getPlatformVersion().charAt(0))) >= 5) {
System.setProperty(Capability.device_name, device.getDeviceName());
System.setProperty(Capability.platform_version, device.getPlatformVersion());
break;
}
}
} catch (Exception e) {
addError("Could not get Kobiton Device", e);
}
}
URL appiumUrl = null;
try {
appiumUrl = new URL(getPropertyElseAddError(Appium.url));
} catch (MalformedURLException e) {
addError("Appium URL provided is invalid", e);
}
SampleAppAndroidDriver driver = null;
try {
driver = new SampleAppAndroidDriver(appiumUrl, getDesiredCapabilities());
mobileDriver = driver;
} catch (Exception e) {
addError("Could not load platform driver, make sure Appium.url is set to the correct value", e);
}
String commandTimeoutString = getPropertyElseAddError(Capability.new_command_timeout);
try {
int commandTimeout = Integer.valueOf(commandTimeoutString);
driver.manage().timeouts().implicitlyWait(commandTimeout, TimeUnit.SECONDS);
} catch (NumberFormatException e) {
addInvalidPropertyError(Capability.new_command_timeout);
}
return mobileDriver;
}
private DesiredCapabilities getDesiredCapabilities() {
DesiredCapabilities capabilities = new DesiredCapabilities();
if (getPropertyElseAddError(Capability.platform_name).equals("Android")) {
capabilities.setCapability("gpsEnabled", true);
capabilities.setCapability("disableWindowAnimation", true);
}
// General
capabilities.setCapability("app",
getPropertyElseAddError(Capability.app));
capabilities.setCapability("automationName",
getPropertyElseAddError(Capability.automation_name));
capabilities.setCapability("fullReset",
getBooleanPropertyElseAddError(Capability.full_reset));
capabilities.setCapability("noReset",
getBooleanPropertyElseAddError(Capability.no_reset));
capabilities.setCapability("applicationCacheEnabled",
getBooleanPropertyElseAddError(Capability.application_cache_enabled));
capabilities.setCapability("locationContextEnabled",
getPropertyElseAddError(Capability.location_context_enabled));
// Device specific
capabilities.setCapability("sessionName",
getPropertyElseAddError(Capability.session_name));
capabilities.setCapability("deviceOrientation",
getPropertyElseAddError(Capability.device_orientation));
capabilities.setCapability("captureScreenshots",
getBooleanPropertyElseAddError(Capability.capture_screenshots));
capabilities.setCapability("deviceGroup",
getPropertyElseAddError(Capability.device_group));
capabilities.setCapability("deviceName",
getPropertyElseAddError(Capability.device_name));
capabilities.setCapability("platformVersion",
getPropertyElseAddError(Capability.platform_version));
capabilities.setCapability("platformName",
getPropertyElseAddError(Capability.platform_name));
return capabilities;
}
private void addInvalidPropertyError(String prop) {
addError(new Message("Property \""+ prop +
"\" not provided or invalid. Cannot run tests without this property"));
}
private String getPropertyElseAddError(String prop) {
String propString = System.getProperty(prop);
if (propString == null || propString.isEmpty()) {
addInvalidPropertyError(prop);
}
return propString;
}
private boolean getBooleanPropertyElseAddError(String positiveProp) {
String propString = getPropertyElseAddError(positiveProp);
if (!propString.equals("true") && !propString.equals("false")) {
addError(new Message("Boolean property \"" + positiveProp +
"\" has invalid string, true or false are the only accepted values."));
}
return propString.equals("true");
}
}
}
| |
//$Id: EJBQLTest.java 9162 2006-01-27 23:40:32Z steveebersole $
package org.hibernate.test.hql;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Collections;
import java.util.List;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.hibernate.engine.SessionFactoryImplementor;
import org.hibernate.hql.QueryTranslator;
import org.hibernate.hql.QueryTranslatorFactory;
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
import org.hibernate.hql.ast.ASTQueryTranslatorFactory;
import org.hibernate.hql.ast.HqlParser;
import org.hibernate.hql.ast.QueryTranslatorImpl;
import org.hibernate.hql.ast.util.ASTUtil;
import org.hibernate.test.TestCase;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import antlr.collections.AST;
/**
* @author <a href="mailto:alex@jboss.org">Alexey Loubyansky</a>
*/
public class EJBQLTest
extends TestCase {
public EJBQLTest(String x) {
super( x );
}
public void testEjb3PositionalParameters() throws Exception {
QueryTranslatorImpl qt = compile( "from Animal a where a.bodyWeight = ?1" );
AST ast = ( AST ) qt.getSqlAST();
// make certain that the ejb3-positional param got recognized as a named param
List namedParams = ASTUtil.collectChildren(
ast,
new ASTUtil.FilterPredicate() {
public boolean exclude(AST n) {
return n.getType() != HqlSqlTokenTypes.NAMED_PARAM;
}
}
);
assertTrue( "ejb3 positional param not recognized as a named param", namedParams.size() > 0 );
}
/**
* SELECT OBJECT(identifier)
*/
public void testSelectObjectClause() throws Exception {
//parse("select object(m) from Model m");
assertEjbqlEqualsHql( "select object(m) from Model m", "from Model m" );
}
/**
* IN(collection_valued_path) identifier
*/
public void testCollectionMemberDeclaration() throws Exception {
String hql = "select o from Animal a inner join a.offspring o";
String ejbql = "select object(o) from Animal a, in(a.offspring) o";
//parse(hql);
//parse(ejbql);
assertEjbqlEqualsHql( ejbql, hql );
}
/**
* collection_valued_path IS [NOT] EMPTY
*/
public void testIsEmpty() throws Exception {
//String hql = "from Animal a where not exists (from a.offspring)";
String hql = "from Animal a where not exists elements(a.offspring)";
String ejbql = "select object(a) from Animal a where a.offspring is empty";
//parse(hql);
//parse(ejbql);
assertEjbqlEqualsHql(ejbql, hql);
hql = "from Animal a where exists (from a.mother.father.offspring)";
ejbql = "select object(a) from Animal a where a.mother.father.offspring is not empty";
assertEjbqlEqualsHql( ejbql, hql );
}
/**
* [NOT] MEMBER OF
*/
public void testMemberOf() throws Exception {
String hql = "from Animal a where a.mother in (from a.offspring)";
//String hql = "from Animal a where a.mother in elements(a.offspring)";
String ejbql = "select object(a) from Animal a where a.mother member of a.offspring";
//parse(hql);
//parse(ejbql);
assertEjbqlEqualsHql( ejbql, hql );
hql = "from Animal a where a.mother not in (from a.offspring)";
//hql = "from Animal a where a.mother not in elements(a.offspring)";
ejbql = "select object(a) from Animal a where a.mother not member of a.offspring";
//parse(hql);
//parse(ejbql);
assertEjbqlEqualsHql( ejbql, hql );
}
/**
* Various functions.
* Tests just parsing for now which means it doesn't guarantee that the generated SQL is as expected or even valid.
*/
public void testEJBQLFunctions() throws Exception {
String hql = "select object(a) from Animal a where a.description = concat('1', concat('2','3'), '4'||'5')||0";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "from Animal a where substring(a.description, 1, 3) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select substring(a.description, 1, 3) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "from Animal a where lower(a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select lower(a.description) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "from Animal a where upper(a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select upper(a.description) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "from Animal a where length(a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select length(a.description) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "from Animal a where locate(a.description, 'abc', 2) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select locate(a.description, :p1, 2) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where trim(trailing '_' from a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select trim(trailing '_' from a.description) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where trim(leading '_' from a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where trim(both a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where trim(a.description) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where abs(a.bodyWeight) = sqrt(a.bodyWeight)";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where mod(a.bodyWeight, a.mother.bodyWeight) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where BIT_LENGTH(a.bodyWeight) = :p1";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select BIT_LENGTH(a.bodyWeight) from Animal a";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where CURRENT_DATE = :p1 or CURRENT_TIME = :p2 or CURRENT_TIMESTAMP = :p3";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
// todo the following is not supported
//hql = "select CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP from Animal a";
//parse(hql, true);
//System.out.println("sql: " + toSql(hql));
hql = "select object(a) from Animal a where a.bodyWeight like '%a%'";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where a.bodyWeight not like '%a%'";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
hql = "select object(a) from Animal a where a.bodyWeight like '%a%' escape '%'";
parse( hql, false );
System.out.println( "sql: " + toSql( hql ) );
}
public void testTrueFalse() throws Exception {
assertEjbqlEqualsHql( "from Human h where h.pregnant is true", "from Human h where h.pregnant = true" );
assertEjbqlEqualsHql( "from Human h where h.pregnant is false", "from Human h where h.pregnant = false" );
assertEjbqlEqualsHql( "from Human h where not(h.pregnant is true)", "from Human h where not( h.pregnant=true )" );
}
// Private
private void assertEjbqlEqualsHql(String ejbql, String hql) {
SessionFactoryImplementor factory = getSessionFactoryImplementor();
QueryTranslatorFactory ast = new ASTQueryTranslatorFactory();
QueryTranslator queryTranslator = ast.createQueryTranslator( hql, hql, Collections.EMPTY_MAP, factory );
queryTranslator.compile( Collections.EMPTY_MAP, true );
String hqlSql = queryTranslator.getSQLString();
queryTranslator = ast.createQueryTranslator( ejbql, ejbql, Collections.EMPTY_MAP, factory );
queryTranslator.compile( Collections.EMPTY_MAP, true );
String ejbqlSql = queryTranslator.getSQLString();
assertEquals( hqlSql, ejbqlSql );
}
private void assertEjbqlEqualsSql(String sql, String hql) {
SessionFactoryImplementor factory = getSessionFactoryImplementor();
QueryTranslatorFactory ast = new ASTQueryTranslatorFactory();
QueryTranslator queryTranslator = ast.createQueryTranslator( hql, hql, Collections.EMPTY_MAP, factory );
queryTranslator.compile( Collections.EMPTY_MAP, true );
assertEquals( sql, queryTranslator.getSQLString() );
}
private QueryTranslatorImpl compile(String input) {
SessionFactoryImplementor factory = getSessionFactoryImplementor();
QueryTranslatorFactory ast = new ASTQueryTranslatorFactory();
QueryTranslator queryTranslator = ast.createQueryTranslator( input, input, Collections.EMPTY_MAP, factory );
queryTranslator.compile( Collections.EMPTY_MAP, true );
return ( QueryTranslatorImpl ) queryTranslator;
}
private AST parse(String input, boolean logging) throws RecognitionException, TokenStreamException {
if ( logging ) {
System.out.println( "input: ->" + input + "<-" );
}
HqlParser parser = HqlParser.getInstance( input );
parser.setFilter( false );
parser.statement();
AST ast = parser.getAST();
if ( logging ) {
System.out.println( "AST : " + ast.toStringTree() + "" );
ByteArrayOutputStream baos = new ByteArrayOutputStream();
parser.showAst( ast, new PrintStream( baos ) );
System.out.println( baos.toString() );
}
assertEquals( "At least one error occurred during parsing!", 0, parser.getParseErrorHandler().getErrorCount() );
return ast;
}
private String toSql(String hql) {
SessionFactoryImplementor factory = getSessionFactoryImplementor();
QueryTranslatorFactory ast = new ASTQueryTranslatorFactory();
QueryTranslator queryTranslator = ast.createQueryTranslator( hql, hql, Collections.EMPTY_MAP, factory );
queryTranslator.compile( Collections.EMPTY_MAP, true );
return queryTranslator.getSQLString();
}
private SessionFactoryImplementor getSessionFactoryImplementor() {
SessionFactoryImplementor factory = ( SessionFactoryImplementor ) getSessions();
if ( factory == null ) {
throw new NullPointerException( "Unable to create factory!" );
}
return factory;
}
protected String[] getMappings() {
return new String[]{
"hql/Animal.hbm.xml",
"batchfetch/ProductLine.hbm.xml",
"cid/Customer.hbm.xml",
"cid/Order.hbm.xml",
"cid/LineItem.hbm.xml",
"cid/Product.hbm.xml",
"legacy/Glarch.hbm.xml",
"legacy/Fee.hbm.xml",
"legacy/Qux.hbm.xml",
"legacy/Fum.hbm.xml",
"legacy/Holder.hbm.xml",
"legacy/One.hbm.xml",
"legacy/FooBar.hbm.xml",
"legacy/Many.hbm.xml",
"legacy/Baz.hbm.xml",
"legacy/Simple.hbm.xml",
"legacy/Middle.hbm.xml",
"legacy/Category.hbm.xml",
"legacy/Multi.hbm.xml",
"legacy/Commento.hbm.xml",
"legacy/Marelo.hbm.xml",
"compositeelement/Parent.hbm.xml",
"legacy/Container.hbm.xml",
};
}
public static Test suite() {
return new TestSuite( EJBQLTest.class );
}
}
| |
package cd4017be.automation.TileEntity;
import java.util.HashMap;
import java.util.Map.Entry;
import cd4017be.api.automation.AreaProtect;
import cd4017be.api.automation.PipeEnergy;
import cd4017be.automation.AreaConfig;
import cd4017be.automation.Config;
import cd4017be.lib.Gui.DataContainer;
import cd4017be.lib.Gui.DataContainer.IGuiData;
import cd4017be.lib.templates.AutomatedTile;
import cd4017be.lib.util.ItemFluidUtil;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.PacketBuffer;
import net.minecraft.network.play.server.SPacketUpdateTileEntity;
import net.minecraft.server.management.PlayerList;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.world.WorldServer;
/**
*
* @author CD4017BE
*/
public class SecuritySys extends AutomatedTile implements IGuiData {
public static int Umax = 1200;
public static float Ecap = 16000;
public AreaConfig prot = new AreaConfig(this);
public HashMap<String, ItemStack[]> itemStorage = new HashMap<String, ItemStack[]>();
public boolean enabled = false;
public boolean enabledC = false;
private byte perm = 0;
public int Uref, EuseP, rstCtr, EuseL;
public float Estor;
public String mainOwner = "";
public SecuritySys()
{
energy = new PipeEnergy(Umax, Config.Rcond[1]);
/**
* long:
* int: voltage, energyUse, rstMode{1:ProtSrc, 2:ProtInv, 4:LoadSrc, 8:LoadInv}, energyUseC
* float: storage
*/
}
@Override
public void update()
{
super.update();
if (worldObj.isRemote) return;
float e = (energy.Ucap * energy.Ucap - (float)(Uref * Uref)) * 0.001F;
if (e > 0) {
energy.Ucap = Uref;
if (Estor + e < Ecap)
{
Estor += e;
} else
{
e -= Ecap - Estor;
Estor = Ecap;
energy.addEnergy(e * 1000F);
}
}
if (perm == 1) rstCtr &= 12;
else if (perm == 2) rstCtr &= 3;
else if (perm == 3) rstCtr = 0;
boolean rst = worldObj.isBlockPowered(getPos());
if ((rstCtr & 2) == 0) enabled = (rstCtr & 1) != 0;
else enabled = (rstCtr & 1) != 0 ^ rst;
if (Estor < EuseP) enabled = false;
else if (enabled) Estor -= EuseP;
boolean enabledCl = enabledC;
if ((rstCtr & 8) == 0) enabledC = (rstCtr & 4) != 0;
else enabledC = (rstCtr & 4) != 0 ^ rst;
if (Estor < EuseL || (!enabledCl && Estor < Ecap / 2)) enabledC = false;
else if (enabledC) Estor -= EuseL;
prot.update |= enabledC ^ enabledCl;
prot.tick();
}
public float getStorage() {
return Estor / Ecap;
}
@Override
public void readFromNBT(NBTTagCompound nbt)
{
super.readFromNBT(nbt);
prot.readFromNbt(nbt);
Estor = nbt.getFloat("storage");
Uref = nbt.getInteger("voltage");
rstCtr = nbt.getByte("mode");
perm = nbt.getByte("access");
mainOwner = nbt.getString("owner");
EuseP = (int)prot.getEnergyCost();
EuseL = (int)prot.getLoadEnergyCost();
prot.px = ((this.pos.getX() + 8) >> 4) - 4;
prot.pz = ((this.pos.getZ() + 8) >> 4) - 4;
itemStorage.clear();
NBTTagCompound stor = nbt.getCompoundTag("itemBuff");
for (String name : stor.getKeySet()) {
ItemStack[] items = new ItemStack[40];
ItemFluidUtil.loadItems(stor.getTagList(name, 10), items);
itemStorage.put(name, items);
}
}
@Override
public NBTTagCompound writeToNBT(NBTTagCompound nbt)
{
energy.writeToNBT(nbt, "Wire");
prot.writeToNbt(nbt);
nbt.setFloat("storage", Estor);
nbt.setInteger("voltage", Uref);
nbt.setByte("mode", (byte)rstCtr);
nbt.setByte("access", perm);
nbt.setString("owner", mainOwner);
NBTTagCompound stor = new NBTTagCompound();
for (Entry<String, ItemStack[]> e : itemStorage.entrySet())
stor.setTag(e.getKey(), ItemFluidUtil.saveItems(e.getValue()));
nbt.setTag("itemBuff", stor);
return super.writeToNBT(nbt);
}
@Override
public boolean onActivated(EntityPlayer player, EnumHand hand, ItemStack item, EnumFacing s, float X, float Y, float Z)
{
if (!prot.isPlayerOwner(player.getName())){
if (!worldObj.isRemote) player.addChatMessage(new TextComponentString("You are not given the necessary rights to use this!"));
return true;
} else if (player.isSneaking() && player.getHeldItemMainhand() == null) {
if (!itemStorage.isEmpty()) {
//TODO drop confiscated items;
}
this.getBlockType().dropBlockAsItem(worldObj, getPos(), worldObj.getBlockState(pos), 0);
worldObj.setBlockToAir(getPos());
return true;
} else return super.onActivated(player, hand, item, s, X, Y, Z);
}
@Override
public SPacketUpdateTileEntity getUpdatePacket()
{
NBTTagCompound nbt = new NBTTagCompound();
prot.writeToNbt(nbt);
return new SPacketUpdateTileEntity(getPos(), -1, nbt);
}
@Override
public void onDataPacket(NetworkManager net, SPacketUpdateTileEntity pkt)
{
NBTTagCompound nbt = pkt.getNbtCompound();
prot.readFromNbt(nbt);
EuseP = (int)prot.getEnergyCost();
EuseL = (int)prot.getLoadEnergyCost();
}
@Override
public void onPlayerCommand(PacketBuffer dis, EntityPlayerMP player) {
boolean update = false;
if (!prot.isPlayerOwner(player.getName())) return;
byte cmd = dis.readByte();
if (cmd == 0) rstCtr = dis.readByte();
else if (cmd == 1) {
byte g = dis.readByte();
byte p = dis.readByte();
prot.removePLayer(p, g);
update = true;
} else if (cmd == 2) {
byte g = dis.readByte();
String name = dis.readStringFromBuffer(64);
prot.addPlayer(name, g);
update = true;
} else if (cmd == 3) {
int g = dis.readByte() % 4;
int i = dis.readByte() % 64;
if (g < 0) {
prot.loadedChunks ^= 1L << i;
EuseL = (int)prot.getLoadEnergyCost();
prot.update = true;
} else {
prot.setProtection(i, g, prot.getProtection(i, g) + 1);
EuseP = (int)prot.getEnergyCost();
}
update = true;
} else if (cmd == 4) {
Uref = dis.readInt();
if (Uref < 0) Uref = 0;
else if (Uref > energy.Umax) Uref = energy.Umax;
}
if (update) this.markUpdate();
}
@Override
public void onPlaced(EntityLivingBase entity, ItemStack item)
{
this.mainOwner = entity.getName();
prot.addPlayer(mainOwner, 0);
if ((AreaProtect.permissions > 0 && AreaProtect.chunkloadPerm > 0) || !(worldObj instanceof WorldServer)) return;
PlayerList manager = ((WorldServer)worldObj).getMinecraftServer().getPlayerList();
boolean admin = entity instanceof EntityPlayer && manager.canSendCommands(((EntityPlayer)entity).getGameProfile());
if (AreaProtect.permissions < 0 || (!admin && AreaProtect.permissions == 0)) perm |= 1;
if (AreaProtect.chunkloadPerm < 0 || (!admin && AreaProtect.chunkloadPerm == 0)) perm |= 2;
if (perm == 0) return;
if (perm == 3) worldObj.setBlockToAir(getPos());
if (entity instanceof EntityPlayer) ((EntityPlayer)entity).addChatMessage(new TextComponentString(perm == 3 ? "You are not allowed to use this device on this server !" : perm == 2 ? "Chunk loading functionality of this device disabled for you !" : "Chunk protection functionality of this device disabled for you !"));
}
@Override
public void onChunkUnload()
{
onUnload();
super.onChunkUnload();
if (!this.prot.isChunkLoaded(pos.getX() >> 4, pos.getZ() >> 4)) AreaProtect.instance.removeChunkLoader(prot);
}
@Override
public void invalidate()
{
onUnload();
super.invalidate();
AreaProtect.instance.removeChunkLoader(prot);
}
private void onUnload()
{
AreaProtect.instance.unloadSecuritySys(prot);
}
@Override
public void validate()
{
prot.px = ((this.pos.getX() + 8) >> 4) - 4;
prot.pz = ((this.pos.getZ() + 8) >> 4) - 4;
AreaProtect.instance.loadSecuritySys(prot);
super.validate();
}
@Override
public void initContainer(DataContainer container) {
}
@Override
public int[] getSyncVariables() {
return new int[]{Uref, EuseP, rstCtr, EuseL, Float.floatToIntBits(Estor)};
}
@Override
public void setSyncVariable(int i, int v) {
switch(i) {
case 0: Uref = v; break;
case 1: EuseP = v; break;
case 2: rstCtr = v; break;
case 3: EuseL = v; break;
case 4: Estor = Float.intBitsToFloat(v); break;
}
}
}
| |
/* Copyright (c) 2014 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Gabriel Roldan (Boundless) - initial implementation
*/
package org.locationtech.geogig.geotools.data.stresstest;
import static com.google.common.collect.ImmutableList.copyOf;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.geotools.data.DataUtilities;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.Transaction;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.geotools.data.simple.SimpleFeatureSource;
import org.geotools.data.simple.SimpleFeatureStore;
import org.geotools.feature.SchemaException;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.locationtech.geogig.api.Context;
import org.locationtech.geogig.api.GeoGIG;
import org.locationtech.geogig.api.RevCommit;
import org.locationtech.geogig.api.TestPlatform;
import org.locationtech.geogig.api.porcelain.ConfigOp;
import org.locationtech.geogig.api.porcelain.ConfigOp.ConfigAction;
import org.locationtech.geogig.api.porcelain.InitOp;
import org.locationtech.geogig.api.porcelain.LogOp;
import org.locationtech.geogig.cli.test.functional.general.CLITestContextBuilder;
import org.locationtech.geogig.geotools.data.GeoGigDataStore;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class DataStoreConcurrencyTest {
private GeoGigDataStore store;
private static final SimpleFeatureType pointType;
static {
final String pointsTypeSpec = "sp:String,ip:Integer,pp:Point:srid=4326";
try {
pointType = DataUtilities.createType("point", pointsTypeSpec);
} catch (SchemaException e) {
throw Throwables.propagate(e);
}
}
private ExecutorService editThreads;
private ExecutorService readThreads;
private final int writeThreadCount = 4, readThreadCount = 4;
@Rule
public TemporaryFolder tmp = new TemporaryFolder();
private int initialCommitCount;
@Before
public void beforeTest() throws Exception {
File workingDirectory = tmp.newFolder("repo");
File userHomeDirectory = tmp.newFolder("home");
TestPlatform platform = new TestPlatform(workingDirectory);
platform.setUserHome(userHomeDirectory);
Context injector = new CLITestContextBuilder(platform).build();
GeoGIG geogig = new GeoGIG(injector);
geogig.command(InitOp.class).call();
geogig.command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setName("user.name")
.setValue("gabriel").call();
geogig.command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setName("user.email")
.setValue("gabriel@roldan.example.com").call();
store = new GeoGigDataStore(geogig);
store.createSchema(pointType);
editThreads = Executors.newFixedThreadPool(writeThreadCount, new ThreadFactoryBuilder()
.setNameFormat("edit-thread-%d").build());
readThreads = Executors.newFixedThreadPool(readThreadCount, new ThreadFactoryBuilder()
.setNameFormat("read-thread-%d").build());
initialCommitCount = copyOf(store.getGeogig().command(LogOp.class).call()).size();
}
@After
public void afterTest() throws Exception {
if (store != null) {
store.dispose();
}
if (editThreads != null) {
editThreads.shutdownNow();
}
if (readThreads != null) {
readThreads.shutdownNow();
}
}
@Test
public void testConcurrentEdits() throws Exception {
final int insertsPerTask = 20;
List<Future<Integer>> insertResults = runInserts(writeThreadCount, insertsPerTask);
for (Future<Integer> f : insertResults) {
assertEquals(insertsPerTask, f.get().intValue());
}
List<RevCommit> commits = copyOf(store.getGeogig().command(LogOp.class).call());
final int expectedCommitCount = initialCommitCount + insertsPerTask * writeThreadCount;
assertEquals(expectedCommitCount, commits.size());
}
@Test
public void testConcurrentReads() throws Exception {
final int insertsPerTask = 20;
assertEquals(insertsPerTask, runInserts(1, insertsPerTask).get(0).get().intValue());
final int readsPerTask = 20;
List<Future<Integer>> readResults = runReads(readThreadCount, readsPerTask);
for (Future<Integer> f : readResults) {
assertEquals(readsPerTask, f.get().intValue());
}
}
@Test
public void testConcurrentEditsAndReads() throws Exception {
final int insertsPerTask = 40;
final int readsPerTask = 200;
// have something to read
runInserts(1, insertsPerTask).get(0).get();
List<Future<Integer>> insertResults = runInserts(writeThreadCount, insertsPerTask);
Thread.sleep(3000);
List<Future<Integer>> readResults = runReads(readThreadCount, readsPerTask);
for (Future<Integer> f : insertResults) {
assertEquals(insertsPerTask, f.get().intValue());
}
for (Future<Integer> f : readResults) {
assertEquals(readsPerTask, f.get().intValue());
}
List<RevCommit> commits = copyOf(store.getGeogig().command(LogOp.class).call());
final int expectedCommitCount = insertsPerTask + initialCommitCount + insertsPerTask
* writeThreadCount;
assertEquals(expectedCommitCount, commits.size());
}
private List<Future<Integer>> runInserts(final int writeThreadCount, final int insertsPerTask) {
List<Future<Integer>> insertResults = Lists.newArrayList();
for (int i = 0; i < writeThreadCount; i++) {
insertResults.add(editThreads.submit(new InsertTask(store, insertsPerTask)));
}
return insertResults;
}
private List<Future<Integer>> runReads(final int readThreadCount, final int readsPerTask) {
List<Future<Integer>> readResults = Lists.newArrayList();
for (int i = 0; i < readThreadCount; i++) {
readResults.add(readThreads.submit(new ReadTask(store, readsPerTask)));
}
return readResults;
}
public static class InsertTask implements Callable<Integer> {
private static final Random rnd = new Random(1000);
private final GeoGigDataStore dataStore;
private final SimpleFeatureBuilder builder;
private int numInserts;
public InsertTask(GeoGigDataStore store, int numInserts) {
this.dataStore = store;
this.numInserts = numInserts;
this.builder = new SimpleFeatureBuilder(pointType);
}
@Override
public Integer call() {
int random;
synchronized (rnd) {
random = rnd.nextInt();
}
final String typeName = pointType.getTypeName();
SimpleFeatureStore featureSource;
int insertCount = 0;
try {
for (int i = 0; i < numInserts; i++) {
builder.reset();
builder.set("sp", String.valueOf(random));
builder.set("ip", Integer.valueOf(random));
SimpleFeature feature = builder.buildFeature(String.valueOf(random));
featureSource = (SimpleFeatureStore) dataStore.getFeatureSource(typeName);
Transaction tx = new DefaultTransaction();
featureSource.setTransaction(tx);
try {
featureSource.addFeatures(DataUtilities.collection(feature));
tx.commit();
insertCount++;
} finally {
tx.close();
}
}
} catch (Exception e) {
e.printStackTrace();
throw Throwables.propagate(e);
}
System.err.printf("Thread %s finished\n", Thread.currentThread().getName());
return insertCount;
}
}
public static class ReadTask implements Callable<Integer> {
private final GeoGigDataStore dataStore;
private final int numReads;
public ReadTask(GeoGigDataStore store, final int numReads) {
this.dataStore = store;
this.numReads = numReads;
}
@Override
public Integer call() {
int readCount = 0;
try {
for (int i = 0; i < numReads; i++) {
doRead();
readCount++;
}
} catch (Exception e) {
e.printStackTrace();
throw Throwables.propagate(e);
}
System.err.printf("Thread %s finished\n", Thread.currentThread().getName());
return readCount;
}
private void doRead() throws IOException {
final String typeName = pointType.getTypeName();
SimpleFeatureSource featureSource;
featureSource = dataStore.getFeatureSource(typeName);
SimpleFeatureCollection fc = featureSource.getFeatures();
SimpleFeatureIterator features = fc.features();
while (features.hasNext()) {
SimpleFeature next = features.next();
}
features.close();
}
}
public static void main(String args[]) {
DataStoreConcurrencyTest test = new DataStoreConcurrencyTest();
try {
test.tmp.create();
test.beforeTest();
test.testConcurrentEditsAndReads();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
test.afterTest();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.