gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.phreak;
import org.drools.core.common.BetaConstraints;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.LeftTupleSets;
import org.drools.core.common.RightTupleSets;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.LeftTuple;
import org.drools.core.reteoo.LeftTupleMemory;
import org.drools.core.reteoo.LeftTupleSink;
import org.drools.core.reteoo.NotNode;
import org.drools.core.reteoo.RightTuple;
import org.drools.core.reteoo.RightTupleMemory;
import org.drools.core.rule.ContextEntry;
import org.drools.core.util.FastIterator;
import static org.drools.core.phreak.PhreakJoinNode.updateChildLeftTuple;
public class PhreakNotNode {
public void doNode(NotNode notNode,
LeftTupleSink sink,
BetaMemory bm,
InternalWorkingMemory wm,
LeftTupleSets srcLeftTuples,
LeftTupleSets trgLeftTuples,
LeftTupleSets stagedLeftTuples) {
RightTupleSets srcRightTuples = bm.getStagedRightTuples().takeAll();
if (srcLeftTuples.getDeleteFirst() != null) {
// left deletes must come before right deletes. Otherwise right deletes could
// stage an insertion, that is later deleted in the rightDelete, causing potential problems
doLeftDeletes(bm, srcLeftTuples, trgLeftTuples, stagedLeftTuples);
}
if (srcLeftTuples.getUpdateFirst() != null) {
// must happen before right inserts, so it can find left tuples to block.
RuleNetworkEvaluator.doUpdatesExistentialReorderLeftMemory(bm,
srcLeftTuples);
}
if ( srcRightTuples.getUpdateFirst() != null) {
RuleNetworkEvaluator.doUpdatesExistentialReorderRightMemory(bm,
notNode,
srcRightTuples); // this also preserves the next rightTuple
}
if (srcRightTuples.getInsertFirst() != null) {
// must come before right updates and inserts, as they might cause insert propagation, while this causes delete propagations, resulting in staging clash.
doRightInserts(notNode, bm, wm, srcRightTuples, trgLeftTuples, stagedLeftTuples);
}
if (srcRightTuples.getUpdateFirst() != null) {
// must come after rightInserts and before rightDeletes, to avoid staging clash
doRightUpdates(notNode, sink, bm, wm, srcRightTuples, trgLeftTuples, stagedLeftTuples);
}
if (srcRightTuples.getDeleteFirst() != null) {
// must come after rightUpdates, to avoid staging clash
doRightDeletes(notNode, sink, bm, wm, srcRightTuples, trgLeftTuples);
}
if (srcLeftTuples.getUpdateFirst() != null) {
doLeftUpdates(notNode, sink, bm, wm, srcLeftTuples, trgLeftTuples, stagedLeftTuples);
}
if (srcLeftTuples.getInsertFirst() != null) {
doLeftInserts(notNode, sink, bm, wm, srcLeftTuples, trgLeftTuples);
}
srcRightTuples.resetAll();
srcLeftTuples.resetAll();
}
public void doLeftInserts(NotNode notNode,
LeftTupleSink sink,
BetaMemory bm,
InternalWorkingMemory wm,
LeftTupleSets srcLeftTuples,
LeftTupleSets trgLeftTuples) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
RightTupleMemory rtm = bm.getRightTupleMemory();
ContextEntry[] contextEntry = bm.getContext();
BetaConstraints constraints = notNode.getRawConstraints();
for (LeftTuple leftTuple = srcLeftTuples.getInsertFirst(); leftTuple != null; ) {
LeftTuple next = leftTuple.getStagedNext();
FastIterator it = notNode.getRightIterator(rtm);
boolean useLeftMemory = RuleNetworkEvaluator.useLeftMemory(notNode, leftTuple);
constraints.updateFromTuple( contextEntry,
wm,
leftTuple );
// This method will also remove rightTuples that are from subnetwork where no leftmemory use used
RuleNetworkEvaluator.findLeftTupleBlocker( notNode, rtm, contextEntry, constraints, leftTuple, it, useLeftMemory );
if (leftTuple.getBlocker() == null) {
// tuple is not blocked, so add to memory so other fact handles can attempt to match
if (useLeftMemory) {
ltm.add(leftTuple);
}
trgLeftTuples.addInsert(sink.createLeftTuple(leftTuple,
sink,
leftTuple.getPropagationContext(), useLeftMemory)); // use leftTuple pctx here, as no right input caused the trigger anway
}
leftTuple.clearStaged();
leftTuple = next;
}
constraints.resetTuple( contextEntry );
}
public void doRightInserts(NotNode notNode,
BetaMemory bm,
InternalWorkingMemory wm,
RightTupleSets srcRightTuples,
LeftTupleSets trgLeftTuples,
LeftTupleSets stagedLeftTuples ) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
RightTupleMemory rtm = bm.getRightTupleMemory();
ContextEntry[] contextEntry = bm.getContext();
BetaConstraints constraints = notNode.getRawConstraints();
// this must be processed here, rather than initial insert, as we need to link the blocker
unlinkNotNodeOnRightInsert(notNode,
bm,
wm);
for (RightTuple rightTuple = srcRightTuples.getInsertFirst(); rightTuple != null; ) {
RightTuple next = rightTuple.getStagedNext();
rtm.add(rightTuple);
if ( ltm != null && ltm.size() > 0 ) {
FastIterator it = notNode.getLeftIterator( ltm );
constraints.updateFromFactHandle( contextEntry,
wm,
rightTuple.getFactHandle() );
for ( LeftTuple leftTuple = notNode.getFirstLeftTuple( rightTuple, ltm, it ); leftTuple != null; ) {
// preserve next now, in case we remove this leftTuple
LeftTuple temp = (LeftTuple) it.next( leftTuple );
if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) {
// ignore, as it will get processed via left iteration. Children cannot be processed twice
leftTuple = temp;
continue;
}
// we know that only unblocked LeftTuples are still in the memory
if ( constraints.isAllowedCachedRight( contextEntry,
leftTuple ) ) {
leftTuple.setBlocker( rightTuple );
rightTuple.addBlocked( leftTuple );
// this is now blocked so remove from memory
ltm.remove( leftTuple );
// subclasses like ForallNotNode might override this propagation
// ** @TODO (mdp) need to not break forall
LeftTuple childLeftTuple = leftTuple.getFirstChild();
if ( childLeftTuple != null ) { // NotNode only has one child
childLeftTuple.setPropagationContext( rightTuple.getPropagationContext() );
RuleNetworkEvaluator.deleteLeftChild( childLeftTuple, trgLeftTuples, stagedLeftTuples );
}
}
leftTuple = temp;
}
}
rightTuple.clearStaged();
rightTuple = next;
}
constraints.resetFactHandle(contextEntry);
}
public static void unlinkNotNodeOnRightInsert(NotNode notNode,
BetaMemory bm,
InternalWorkingMemory wm) {
if (bm.getSegmentMemory().isSegmentLinked() && !notNode.isRightInputIsRiaNode() && notNode.isEmptyBetaConstraints()) {
// this must be processed here, rather than initial insert, as we need to link the blocker
// @TODO this could be more efficient, as it means the entire StagedLeftTuples for all previous nodes where evaluated, needlessly.
bm.unlinkNode(wm);
}
}
public void doLeftUpdates(NotNode notNode,
LeftTupleSink sink,
BetaMemory bm,
InternalWorkingMemory wm,
LeftTupleSets srcLeftTuples,
LeftTupleSets trgLeftTuples,
LeftTupleSets stagedLeftTuples) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
RightTupleMemory rtm = bm.getRightTupleMemory();
ContextEntry[] contextEntry = bm.getContext();
BetaConstraints constraints = notNode.getRawConstraints();
boolean leftUpdateOptimizationAllowed = notNode.isLeftUpdateOptimizationAllowed();
for (LeftTuple leftTuple = srcLeftTuples.getUpdateFirst(); leftTuple != null; ) {
LeftTuple next = leftTuple.getStagedNext();
FastIterator rightIt = notNode.getRightIterator(rtm);
RightTuple firstRightTuple = notNode.getFirstRightTuple(leftTuple, rtm, null, rightIt);
// If in memory, remove it, because we'll need to add it anyway if it's not blocked, to ensure iteration order
RightTuple blocker = leftTuple.getBlocker();
if (blocker == null) {
if (leftTuple.getMemory() != null) { // memory can be null, if blocker was deleted in same do loop
ltm.remove(leftTuple);
}
} else {
// check if we changed bucket
if (rtm.isIndexed() && !rightIt.isFullIterator()) {
// if newRightTuple is null, we assume there was a bucket change and that bucket is empty
if (firstRightTuple == null || firstRightTuple.getMemory() != blocker.getMemory()) {
blocker.removeBlocked(leftTuple);
blocker = null;
}
}
}
constraints.updateFromTuple(contextEntry,
wm,
leftTuple);
if ( !leftUpdateOptimizationAllowed && blocker != null ) {
blocker.removeBlocked(leftTuple);
blocker = null;
}
// if we where not blocked before (or changed buckets), or the previous blocker no longer blocks, then find the next blocker
if (blocker == null || !constraints.isAllowedCachedLeft(contextEntry,
blocker.getFactHandle())) {
if (blocker != null) {
// remove previous blocker if it exists, as we know it doesn't block any more
blocker.removeBlocked(leftTuple);
}
// find first blocker, because it's a modify, we need to start from the beginning again
for (RightTuple newBlocker = firstRightTuple; newBlocker != null; newBlocker = (RightTuple) rightIt.next(newBlocker)) {
if (constraints.isAllowedCachedLeft(contextEntry,
newBlocker.getFactHandle())) {
leftTuple.setBlocker(newBlocker);
newBlocker.addBlocked(leftTuple);
break;
}
}
LeftTuple childLeftTuple = leftTuple.getFirstChild();
if (leftTuple.getBlocker() != null) {
// blocked
if (childLeftTuple != null) {
// blocked, with previous children, so must have not been previously blocked, so retract
// no need to remove, as we removed at the start
// to be matched against, as it's now blocked
childLeftTuple.setPropagationContext(leftTuple.getBlocker().getPropagationContext()); // we have the righttuple, so use it for the pctx
RuleNetworkEvaluator.deleteLeftChild(childLeftTuple, trgLeftTuples, stagedLeftTuples);
} // else: it's blocked now and no children so blocked before, thus do nothing
} else if (childLeftTuple == null) {
// not blocked, with no children, must have been previously blocked so assert
ltm.add(leftTuple); // add to memory so other fact handles can attempt to match
trgLeftTuples.addInsert(sink.createLeftTuple(leftTuple,
sink,
leftTuple.getPropagationContext(), true)); // use leftTuple for the pctx here, as the right one is not available
// this won't cause a problem, as the trigger tuple (to the left) will be more recent anwyay
} else {
updateChildLeftTuple(childLeftTuple, stagedLeftTuples, trgLeftTuples);
// not blocked, with children, so wasn't previous blocked and still isn't so modify
ltm.add(leftTuple); // add to memory so other fact handles can attempt to match
childLeftTuple.reAddLeft();
}
}
leftTuple.clearStaged();
leftTuple = next;
}
constraints.resetTuple(contextEntry);
}
public void doRightUpdates(NotNode notNode,
LeftTupleSink sink,
BetaMemory bm,
InternalWorkingMemory wm,
RightTupleSets srcRightTuples,
LeftTupleSets trgLeftTuples,
LeftTupleSets stagedLeftTuples) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
RightTupleMemory rtm = bm.getRightTupleMemory();
ContextEntry[] contextEntry = bm.getContext();
BetaConstraints constraints = notNode.getRawConstraints();
boolean iterateFromStart = notNode.isIndexedUnificationJoin() || rtm.getIndexType().isComparison();
for (RightTuple rightTuple = srcRightTuples.getUpdateFirst(); rightTuple != null; ) {
RightTuple next = rightTuple.getStagedNext();
if ( ltm != null && ltm.size() > 0 ) {
constraints.updateFromFactHandle( contextEntry,
wm,
rightTuple.getFactHandle() );
FastIterator leftIt = notNode.getLeftIterator( ltm );
LeftTuple firstLeftTuple = notNode.getFirstLeftTuple( rightTuple, ltm, leftIt );
// first process non-blocked tuples, as we know only those ones are in the left memory.
for ( LeftTuple leftTuple = firstLeftTuple; leftTuple != null; ) {
// preserve next now, in case we remove this leftTuple
LeftTuple temp = (LeftTuple) leftIt.next( leftTuple );
if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) {
// ignore, as it will get processed via left iteration. Children cannot be processed twice
leftTuple = temp;
continue;
}
// we know that only unblocked LeftTuples are still in the memory
if ( constraints.isAllowedCachedRight( contextEntry,
leftTuple ) ) {
leftTuple.setBlocker( rightTuple );
rightTuple.addBlocked( leftTuple );
// this is now blocked so remove from memory
ltm.remove( leftTuple );
LeftTuple childLeftTuple = leftTuple.getFirstChild();
if ( childLeftTuple != null ) {
childLeftTuple.setPropagationContext( rightTuple.getPropagationContext() );
RuleNetworkEvaluator.deleteRightChild( childLeftTuple, trgLeftTuples, stagedLeftTuples );
}
}
leftTuple = temp;
}
}
LeftTuple firstBlocked = rightTuple.getTempBlocked();
if ( firstBlocked != null ) {
RightTuple rootBlocker = rightTuple.getTempNextRightTuple();
if ( rootBlocker == null ) {
iterateFromStart = true;
}
FastIterator rightIt = notNode.getRightIterator( rtm );
// iterate all the existing previous blocked LeftTuples
for ( LeftTuple leftTuple = firstBlocked; leftTuple != null; ) {
LeftTuple temp = leftTuple.getBlockedNext();
leftTuple.clearBlocker();
if ( leftTuple.getStagedType() == LeftTuple.UPDATE ) {
// ignore, as it will get processed via left iteration. Children cannot be processed twice
// but need to add it back into list first
leftTuple.setBlocker( rightTuple );
rightTuple.addBlocked( leftTuple );
leftTuple = temp;
continue;
}
constraints.updateFromTuple( contextEntry,
wm,
leftTuple );
if ( iterateFromStart ) {
rootBlocker = notNode.getFirstRightTuple( leftTuple, rtm, null, rightIt );
}
// we know that older tuples have been checked so continue next
for ( RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) rightIt.next( newBlocker ) ) {
// cannot select a RightTuple queued in the delete list
// There may be UPDATE RightTuples too, but that's ok. They've already been re-added to the correct bucket, safe to be reprocessed.
if ( leftTuple.getStagedType() != LeftTuple.DELETE && newBlocker.getStagedType() != LeftTuple.DELETE &&
constraints.isAllowedCachedLeft( contextEntry, newBlocker.getFactHandle() ) ) {
leftTuple.setBlocker( newBlocker );
newBlocker.addBlocked( leftTuple );
break;
}
}
if ( leftTuple.getBlocker() == null ) {
// was previous blocked and not in memory, so add
if (ltm != null) {
ltm.add( leftTuple );
}
// subclasses like ForallNotNode might override this propagation
trgLeftTuples.addInsert( sink.createLeftTuple( leftTuple,
sink,
rightTuple.getPropagationContext(), true ) );
}
leftTuple = temp;
}
}
rightTuple.clearStaged();
rightTuple = next;
}
constraints.resetFactHandle(contextEntry);
constraints.resetTuple(contextEntry);
}
public void doLeftDeletes(BetaMemory bm,
LeftTupleSets srcLeftTuples,
LeftTupleSets trgLeftTuples,
LeftTupleSets stagedLeftTuples) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
for (LeftTuple leftTuple = srcLeftTuples.getDeleteFirst(); leftTuple != null; ) {
LeftTuple next = leftTuple.getStagedNext();
RightTuple blocker = leftTuple.getBlocker();
if (blocker == null) {
if (leftTuple.getMemory() != null) {
// it may have been staged and never actually added
ltm.remove(leftTuple);
}
LeftTuple childLeftTuple = leftTuple.getFirstChild();
if (childLeftTuple != null) { // NotNode only has one child
childLeftTuple.setPropagationContext(leftTuple.getPropagationContext());
RuleNetworkEvaluator.deleteLeftChild(childLeftTuple, trgLeftTuples, stagedLeftTuples); // no need to update pctx, as no right available, and pctx will exist on a parent LeftTuple anyway
}
} else {
blocker.removeBlocked(leftTuple);
}
leftTuple.clearStaged();
leftTuple = next;
}
}
public void doRightDeletes(NotNode notNode,
LeftTupleSink sink,
BetaMemory bm,
InternalWorkingMemory wm,
RightTupleSets srcRightTuples,
LeftTupleSets trgLeftTuples) {
LeftTupleMemory ltm = bm.getLeftTupleMemory();
RightTupleMemory rtm = bm.getRightTupleMemory();
ContextEntry[] contextEntry = bm.getContext();
BetaConstraints constraints = notNode.getRawConstraints();
for (RightTuple rightTuple = srcRightTuples.getDeleteFirst(); rightTuple != null; ) {
RightTuple next = rightTuple.getStagedNext();
FastIterator it = notNode.getRightIterator(rtm);
// assign now, so we can remove from memory before doing any possible propagations
boolean useComparisonIndex = rtm.getIndexType().isComparison();
RightTuple rootBlocker = useComparisonIndex ? null : (RightTuple) it.next(rightTuple);
if (rightTuple.getMemory() != null) {
// it may have been staged and never actually added
rtm.remove(rightTuple);
}
if (rightTuple.getBlocked() != null) {
for (LeftTuple leftTuple = rightTuple.getBlocked(); leftTuple != null; ) {
LeftTuple temp = leftTuple.getBlockedNext();
leftTuple.clearBlocker();
if (leftTuple.getStagedType() == LeftTuple.UPDATE) {
// ignore, as it will get processed via left iteration. Children cannot be processed twice
leftTuple = temp;
continue;
}
constraints.updateFromTuple(contextEntry,
wm,
leftTuple);
if (useComparisonIndex) {
rootBlocker = rtm.getFirst(leftTuple, null, it);
}
// we know that older tuples have been checked so continue next
for (RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) it.next(newBlocker)) {
if (constraints.isAllowedCachedLeft(contextEntry,
newBlocker.getFactHandle())) {
leftTuple.setBlocker(newBlocker);
newBlocker.addBlocked(leftTuple);
break;
}
}
if (leftTuple.getBlocker() == null) {
// was previous blocked and not in memory, so add
ltm.add(leftTuple);
trgLeftTuples.addInsert(sink.createLeftTuple(leftTuple,
sink,
rightTuple.getPropagationContext(), true));
}
leftTuple = temp;
}
}
rightTuple.nullBlocked();
rightTuple.clearStaged();
rightTuple = next;
}
constraints.resetTuple(contextEntry);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
import javax.cache.Cache;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheMode.LOCAL;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
/**
* Grid cache concurrent hash map self test.
*/
public class GridCacheConcurrentMapSelfTest extends GridCommonAbstractTest {
/** Ip finder. */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
CacheConfiguration cc = defaultCacheConfiguration();
cc.setCacheMode(LOCAL);
cc.setWriteSynchronizationMode(FULL_SYNC);
cc.setStartSize(4);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
cfg.setDiscoverySpi(disco);
cfg.setCacheConfiguration(cc);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
startGrid();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/**
* @throws Exception If failed.
*/
public void testRehash() throws Exception {
IgniteCache<Integer, String> c = grid().cache(null);
int cnt = 100 * 1024;
for (int i = 0; i < cnt; i++) {
c.put(i, Integer.toString(i));
if (i > 0 && i % 50000 == 0)
info(">>> " + i + " puts completed");
}
for (int i = 0; i < cnt; i++)
assertEquals(Integer.toString(i), c.get(i));
assertEquals(cnt, c.size());
int idx = 0;
for (Cache.Entry<Integer, String> e : c) {
assertNotNull(e.getValue());
idx++;
}
assertEquals(cnt, idx);
}
/**
* @throws Exception If failed.
*/
public void testRehashRandom() throws Exception {
IgniteCache<Integer, String> c = grid().cache(null);
int cnt = 100 * 1024;
Random rnd = new Random();
Map<Integer, String> puts = new HashMap<>();
for (int i = 0; i < cnt * 2; i++) {
int key = rnd.nextInt(cnt);
c.put(key, Integer.toString(key));
puts.put(key, Integer.toString(key));
if (i > 0 && i % 50000 == 0)
info(">>> " + i + " puts completed");
}
for (Integer key : puts.keySet())
assertEquals(Integer.toString(key), c.get(key));
assertEquals(puts.size(), c.size());
int idx = 0;
for (Cache.Entry<Integer, String> e : c) {
assertNotNull(e.getValue());
idx++;
}
assertEquals(puts.size(), idx);
}
/**
* @throws Exception If failed.
*/
public void testRehashMultithreaded1() throws Exception {
final AtomicInteger tidGen = new AtomicInteger();
final Random rand = new Random();
final int cnt = 100 * 1024;
multithreaded(new Callable<Object>() {
@SuppressWarnings("UnusedAssignment")
@Override public Object call() throws Exception {
IgniteCache<Integer, String> c = grid().cache(null);
int tid = tidGen.getAndIncrement();
int start = 2 * 1024 * tid;
Iterator<Cache.Entry<Integer, String>> it = null;
for (int i = start; i < start + cnt; i++) {
int key = i % cnt;
if (it == null && i >= start + tid * 100)
it = c.iterator();
c.put(key, Integer.toString(key));
c.get(rand.nextInt(cnt));
}
// Go through iterators.
while(it.hasNext())
it.next();
// Make sure that hard references are gone.
it = null;
for (int i = start; i < start + cnt; i++) {
int key = i % cnt;
assertEquals(Integer.toString(key), c.get(key));
}
assertEquals(cnt, c.size());
int idx = 0;
for (Cache.Entry<Integer, String> e : c) {
assertNotNull(e.getValue());
idx++;
}
assertEquals(cnt, idx);
System.gc();
return null;
}
}, 10);
jcache().get(rand.nextInt(cnt));
System.gc();
Thread.sleep(1000);
jcache().get(rand.nextInt(cnt));
}
/**
* @throws Exception If failed.
*/
public void testRehashMultithreaded2() throws Exception {
final AtomicInteger tidGen = new AtomicInteger(0);
final Random rand = new Random();
final int cnt = 100 * 1024;
multithreaded(new Callable<Object>() {
@SuppressWarnings("UnusedAssignment")
@Override public Object call() throws Exception {
IgniteCache<Integer, String> c = grid().cache(null);
int tid = tidGen.getAndIncrement();
int start = 2 * 1024 * tid;
Iterator<Cache.Entry<Integer, String>> it1 = null;
Iterator<Cache.Entry<Integer, String>> it2 = null;
Iterator<Cache.Entry<Integer, String>> it3 = null;
boolean forgot = false;
for (int i = start; i < start + cnt; i++) {
int key = i % cnt;
if (!forgot && i >= start + tid * 100) {
if (it1 == null)
it1 = c.iterator();
if (it2 == null)
it2 = c.iterator();
if (it3 == null)
it3 = c.iterator();
}
c.put(key, Integer.toString(key));
c.get(rand.nextInt(cnt));
if (!forgot && i == cnt) {
info("Forgetting iterators [it1=" + it1 + ", it2=" + it2 + ", it3=" + it3 + ']');
// GC
it1 = null;
it2 = null;
it3 = null;
forgot = true;
}
}
// Make sure that hard references are gone.
it1 = null;
it2 = null;
it3 = null;
for (int i = start; i < start + cnt; i++) {
int key = i % cnt;
assertEquals(Integer.toString(key), c.get(key));
}
assertEquals(cnt, c.size());
int idx = 0;
for (Cache.Entry<Integer, String> e : c) {
assertNotNull(e.getValue());
idx++;
}
assertEquals(cnt, idx);
System.gc();
return null;
}
}, 10);
jcache().get(rand.nextInt(cnt));
System.gc();
Thread.sleep(1000);
jcache().get(rand.nextInt(cnt));
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("ResultOfObjectAllocationIgnored")
public void testEmptyWeakIterator() throws Exception {
final IgniteCache<Integer, String> c = grid().cache(null);
for (int i = 0; i < 10; i++) {
multithreaded(new Callable<Object>() {
@SuppressWarnings("UnusedAssignment")
@Override public Object call() throws Exception {
Iterator<Cache.Entry<Integer, String>> it = c.iterator();
for (int i = 0; i < 1000; i++) {
c.put(i, String.valueOf(i));
if (i == 0)
it.hasNext();
}
// Make sure that hard references are gone.
it = null;
System.gc();
return null;
}
}, Math.min(16, Runtime.getRuntime().availableProcessors()));
for (int r = 0; r < 10; r++) {
System.gc();
c.get(100);
}
}
}
}
| |
/*
* Copyright 2012 - 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.solr.repository.query;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Collections;
import org.apache.solr.common.params.HighlightParams;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.repository.core.RepositoryMetadata;
import org.springframework.data.repository.query.RepositoryQuery;
import org.springframework.data.solr.core.SolrCallback;
import org.springframework.data.solr.core.SolrOperations;
import org.springframework.data.solr.core.mapping.SolrPersistentEntity;
import org.springframework.data.solr.core.query.HighlightOptions;
import org.springframework.data.solr.core.query.HighlightQuery;
import org.springframework.data.solr.core.query.Query;
import org.springframework.data.solr.core.query.SimpleQuery;
import org.springframework.data.solr.core.query.SimpleStringCriteria;
import org.springframework.data.solr.repository.Facet;
import org.springframework.data.solr.repository.Highlight;
import org.springframework.data.solr.repository.ProductBean;
import org.springframework.data.solr.repository.SolrCrudRepository;
import org.springframework.data.solr.repository.support.MappingSolrEntityInformation;
/**
* @author Christoph Strobl
*
*/
@RunWith(MockitoJUnitRunner.class)
public class SolrQueryTests {
@Mock
private RepositoryMetadata metadataMock;
@Mock
private SolrOperations solrOperationsMock;
@Mock
private SolrPersistentEntity<ProductBean> persitentEntityMock;
private SolrEntityInformationCreator entityInformationCreator;
@SuppressWarnings("unchecked")
@Before
public void setUp() {
entityInformationCreator = new SolrEntityInformationCreatorImpl();
Mockito.when(persitentEntityMock.getType()).thenReturn(ProductBean.class);
Mockito.when(solrOperationsMock.execute(Matchers.any(SolrCallback.class))).thenReturn(
new PageImpl<ProductBean>(Collections.<ProductBean> emptyList()));
}
@Test(expected = InvalidDataAccessApiUsageException.class)
public void testQueryWithHighlightAndFaceting() throws NoSuchMethodException, SecurityException {
createQueryForMethod("findAndApplyHighlightingAndFaceting", Pageable.class).execute(
new Object[] { new PageRequest(0, 10) });
}
@SuppressWarnings("unchecked")
@Test
public void testQueryWithHighlight() {
ArgumentCaptor<HighlightQuery> captor = ArgumentCaptor.forClass(HighlightQuery.class);
createQueryForMethod("findAndApplyHighlighting", Pageable.class).execute(new Object[] { new PageRequest(0, 10) });
Mockito.verify(solrOperationsMock, Mockito.times(1)).queryForHighlightPage(captor.capture(),
(Class<ProductBean>) Matchers.any());
HighlightOptions capturedOptions = captor.getValue().getHighlightOptions();
Assert.assertNotNull(capturedOptions);
}
@SuppressWarnings("unchecked")
@Test
public void testQueryWithHighlightParameters() {
ArgumentCaptor<HighlightQuery> captor = ArgumentCaptor.forClass(HighlightQuery.class);
createQueryForMethod("findAndApplyHighlightingAllParameters", Pageable.class).execute(
new Object[] { new PageRequest(0, 10) });
Mockito.verify(solrOperationsMock, Mockito.times(1)).queryForHighlightPage(captor.capture(),
(Class<ProductBean>) Matchers.any());
HighlightOptions capturedOptions = captor.getValue().getHighlightOptions();
Assert.assertNotNull(capturedOptions);
Assert.assertEquals("<b>", capturedOptions.getSimplePrefix());
Assert.assertEquals("</b>", capturedOptions.getSimplePostfix());
Assert.assertEquals("name", capturedOptions.getFields().get(0).getName());
Assert.assertEquals("description", capturedOptions.getFields().get(1).getName());
Assert.assertEquals("simple", capturedOptions.getFormatter());
Assert.assertEquals(Integer.valueOf(10), capturedOptions.getFragsize());
Assert.assertEquals(Integer.valueOf(20), capturedOptions.getNrSnipplets());
Assert
.assertEquals("name:with", ((SimpleStringCriteria) capturedOptions.getQuery().getCriteria()).getQueryString());
}
@SuppressWarnings("unchecked")
@Test
public void testQueryWithParametrizedHighlightQuery() {
ArgumentCaptor<HighlightQuery> captor = ArgumentCaptor.forClass(HighlightQuery.class);
createQueryForMethod("findAndApplyHighlightingWithParametrizedHighlightQuery", String.class, Pageable.class)
.execute(new Object[] { "spring", new PageRequest(0, 10) });
Mockito.verify(solrOperationsMock, Mockito.times(1)).queryForHighlightPage(captor.capture(),
(Class<ProductBean>) Matchers.any());
HighlightOptions capturedOptions = captor.getValue().getHighlightOptions();
Assert.assertEquals("name:*spring*",
((SimpleStringCriteria) capturedOptions.getQuery().getCriteria()).getQueryString());
}
@SuppressWarnings("unchecked")
@Test
public void testQueryWithNonDefaultHighlightFormatter() {
ArgumentCaptor<HighlightQuery> captor = ArgumentCaptor.forClass(HighlightQuery.class);
createQueryForMethod("findAndApplyHighlightingWithNonDefaultFormatter", Pageable.class).execute(
new Object[] { new PageRequest(0, 10) });
Mockito.verify(solrOperationsMock, Mockito.times(1)).queryForHighlightPage(captor.capture(),
(Class<ProductBean>) Matchers.any());
HighlightOptions capturedOptions = captor.getValue().getHighlightOptions();
Assert.assertNotNull(capturedOptions);
Assert.assertNull(capturedOptions.getSimplePrefix());
Assert.assertNull(capturedOptions.getSimplePrefix());
Assert.assertNull(capturedOptions.getSimplePostfix());
Assert.assertEquals("postingshighlighter", capturedOptions.getFormatter());
Assert.assertEquals("{pre}", capturedOptions.getHighlightParameterValue(HighlightParams.TAG_PRE));
Assert.assertEquals("{post}", capturedOptions.getHighlightParameterValue(HighlightParams.TAG_POST));
}
private RepositoryQuery createQueryForMethod(String methodName, Class<?>... paramTypes) {
try {
return this.createQueryForMethod(Repo1.class.getMethod(methodName, paramTypes));
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException(e.getMessage(), e);
} catch (SecurityException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
}
private RepositoryQuery createQueryForMethod(Method method) {
return new SolrQueryImpl(this.solrOperationsMock, createSolrQueryMethodFrom(method));
}
private SolrQueryMethod createSolrQueryMethodFrom(Method method) {
return new SolrQueryMethod(method, metadataMock, entityInformationCreator);
}
private interface Repo1 extends SolrCrudRepository<ProductBean, String> {
@Facet(fields = { "name" })
@Highlight
Page<ProductBean> findAndApplyHighlightingAndFaceting(Pageable page);
@Highlight
Page<ProductBean> findAndApplyHighlighting(Pageable page);
@Highlight(fields = { "name", "description" }, fragsize = 10, snipplets = 20, prefix = "<b>", postfix = "</b>", query = "name:with", formatter = "simple")
Page<ProductBean> findAndApplyHighlightingAllParameters(Pageable page);
@Highlight(query = "name:*?0*")
Page<ProductBean> findAndApplyHighlightingWithParametrizedHighlightQuery(String name, Pageable page);
@Highlight(formatter = "postingshighlighter", prefix = "{pre}", postfix = "{post}")
Page<ProductBean> findAndApplyHighlightingWithNonDefaultFormatter(Pageable page);
}
private class SolrEntityInformationCreatorImpl implements SolrEntityInformationCreator {
@SuppressWarnings("unchecked")
@Override
public <T, ID extends Serializable> SolrEntityInformation<T, ID> getEntityInformation(Class<T> domainClass) {
return (SolrEntityInformation<T, ID>) new SolrEntityInformationImpl(persitentEntityMock);
}
}
private class SolrEntityInformationImpl extends MappingSolrEntityInformation<ProductBean, String> {
public SolrEntityInformationImpl(SolrPersistentEntity<ProductBean> entity) {
super(entity);
}
@Override
public Class<String> getIdType() {
return String.class;
}
@Override
public Class<ProductBean> getJavaType() {
return ProductBean.class;
}
}
private class SolrQueryImpl extends AbstractSolrQuery {
public SolrQueryImpl(SolrOperations solrOperations, SolrQueryMethod solrQueryMethod) {
super(solrOperations, solrQueryMethod);
}
@Override
protected Query createQuery(SolrParameterAccessor parameterAccessor) {
return new SimpleQuery(new SimpleStringCriteria("fake:query"));
}
}
}
| |
package views.formdata;
import java.util.ArrayList;
import java.util.List;
import models.DiveTable;
import models.DiveTableDB;
import play.data.validation.ValidationError;
import utils.DiveTableCalc;
/**
* Created by David.
*/
public class DiveTableFormData {
public String eventID = ""; //Serves as unique id (slug) for the dive event. Used in GET urls.
public String location = "";
public String date = "";
public String depth1 = "";
public String bottomTime1 = "";
public String surfaceIntervalTime1 = "";
public String depth2 = "";
public String bottomTime2 = "";
public String surfaceIntervalTime2 = "";
public String depth3 = "";
public String bottomTime3 = "";
public String surfaceIntervalTime3 = "";
public String depth4 = "";
public String bottomTime4 = "";
public String surfaceIntervalTime4 = "";
public String depth5 = "";
public String bottomTime5 = "";
public String surfaceIntervalTime5 = "";
public String diveTableCalcErrorMessage = "";
/** The isEditable hidden form field. For seeing if allowed to edit */
// public boolean isEditable = false;
/**
* Default constructor, required by Play.
*/
public DiveTableFormData() {
// Nothing needed.
}
/**
* Constructs a DiveTableFormData object manually. Currently only used by Global.java to create initial Surfers on
* application startup. NOTE: This doesn't currently check if eventID already exists. Use with care.
*
*/
public DiveTableFormData(String eventID, String location, String date, String depth1, String bottomTime1, String surfaceIntervalTime1,
String depth2, String bottomTime2, String surfaceIntervalTime2, String depth3, String bottomTime3,
String surfaceIntervalTime3, String depth4, String bottomTime4, String surfaceIntervalTime4,
String depth5, String bottomTime5, String surfaceIntervalTime5, String diveTableCalcErrorMessage) {
this.eventID = eventID;
this.location = location;
this.date = date;
this.depth1 = depth1;
this.bottomTime1 = bottomTime1;
this.surfaceIntervalTime1 = surfaceIntervalTime1;
this.depth2 = depth2;
this.bottomTime2 = bottomTime2;
this.surfaceIntervalTime2 = surfaceIntervalTime2;
this.depth3 = depth3;
this.bottomTime3 = bottomTime3;
this.surfaceIntervalTime3 = surfaceIntervalTime3;
this.depth4 = depth4;
this.bottomTime4 = bottomTime4;
this.surfaceIntervalTime4 = surfaceIntervalTime4;
this.depth5 = depth5;
this.bottomTime5 = bottomTime5;
this.surfaceIntervalTime5 = surfaceIntervalTime5;
this.diveTableCalcErrorMessage = diveTableCalcErrorMessage;
}
/**
* Create a DiveTableFormData object based upon a DiveTable.
*
* @param diveTable The DiveTable object.
*/
public DiveTableFormData(DiveTable diveTable) {
this.eventID = diveTable.getEventID();
this.location = diveTable.getLocation();
this.date = diveTable.getDate();
this.depth1 = diveTable.getDepth1();
this.bottomTime1 = diveTable.getBottomTime1();
this.surfaceIntervalTime1 = diveTable.getSurfaceIntervalTime1();
this.depth2 = diveTable.getDepth2();
this.bottomTime2 = diveTable.getBottomTime2();
this.surfaceIntervalTime2 = diveTable.getSurfaceIntervalTime2();
this.depth3 = diveTable.getDepth3();
this.bottomTime3 = diveTable.getBottomTime3();
this.surfaceIntervalTime3 = diveTable.getSurfaceIntervalTime3();
this.depth4 = diveTable.getDepth4();
this.bottomTime4 = diveTable.getBottomTime4();
this.surfaceIntervalTime4 = diveTable.getSurfaceIntervalTime4();
this.depth5 = diveTable.getDepth5();
this.bottomTime5 = diveTable.getBottomTime5();
this.surfaceIntervalTime5 = diveTable.getSurfaceIntervalTime5();
this.diveTableCalcErrorMessage = diveTable.getDiveTableCalcErrorMessage();
}
/**
* Will populate input data into arraylist, and then use it to calculate dive table results.
*
* @return DiveTableCalc The object holding dive results.
*/
public DiveTableCalc computeDiveTableCalc () {
//First, determine how many dives there are.
int diveCount = 0;
if (depth1 != null && !depth1.trim().equals("")) {
diveCount++;
}
if (depth2 != null && !depth2.trim().equals("")) {
diveCount++;
}
if (depth3 != null && !depth3.trim().equals("")) {
diveCount++;
}
if (depth4 != null && !depth4.trim().equals("")) {
diveCount++;
}
if (depth5 != null && !depth5.trim().equals("")) {
diveCount++;
}
//Populate list holding all dive input data. Will use with DiveTableCalc later.
List<Integer> diveDataList = new ArrayList<Integer>();
for (int i=0; i < diveCount; i++) {
if (i == 0) {
diveDataList.add(Integer.parseInt(depth1));
diveDataList.add(Integer.parseInt(bottomTime1));
diveDataList.add(Integer.parseInt(surfaceIntervalTime1));
}
if (i == 1) {
diveDataList.add(Integer.parseInt(depth2));
diveDataList.add(Integer.parseInt(bottomTime2));
diveDataList.add(Integer.parseInt(surfaceIntervalTime2));
}
if (i == 2) {
diveDataList.add(Integer.parseInt(depth3));
diveDataList.add(Integer.parseInt(bottomTime3));
diveDataList.add(Integer.parseInt(surfaceIntervalTime3));
}
if (i == 3) {
diveDataList.add(Integer.parseInt(depth4));
diveDataList.add(Integer.parseInt(bottomTime4));
diveDataList.add(Integer.parseInt(surfaceIntervalTime4));
}
if (i == 4) {
diveDataList.add(Integer.parseInt(depth5));
diveDataList.add(Integer.parseInt(bottomTime5));
diveDataList.add(Integer.parseInt(surfaceIntervalTime5));
}
}
//Calculate and return dive object;
DiveTableCalc dive = new DiveTableCalc();
dive.calculateDiveTableResults(diveDataList);
return dive;
}
/**
* Validates the form input by the user. The following fields must be non-empty: Name, Home, Carousel URL, Bio URL,
* Biography. The Awards field is optional. It can be empty. The Slug field must be alphanumeric, and must be unique.
* The SurferType field must be either "Male", "Female", or "Grom". The footstyleType field must be either "Regular"
* or "Goofy".
*
*
* @return null if no errors, list of ValidationErrors if errors.
*/
public List<ValidationError> validate() {
List<ValidationError> errors = new ArrayList<>();
if (eventID == null || eventID.length() == 0) {
errors.add(new ValidationError("eventID", "eventID is required"));
}
if (!eventID.matches("^[a-zA-Z0-9]+$")) { // Checks if string is alphanumeric. Also checks if string is empty.
errors.add(new ValidationError("eventID", "EventID must only contain letters and digits."));
}
if (DiveTableDB.eventIDExists(eventID)) {
errors.add(new ValidationError("eventID", "EventID already exists"));
}
if (location == null || location.length() == 0) {
errors.add(new ValidationError("location", "Location is required"));
}
if (date == null || date.length() == 0) {
errors.add(new ValidationError("date", "Date is required"));
}
if (depth1 == null || depth1.length() == 0) {
errors.add(new ValidationError("depth1", "Depth-1 is required"));
}
if (bottomTime1 == null || bottomTime1.length() == 0) {
errors.add(new ValidationError("bottomTime1", "BottomTime-1 is required"));
}
if (surfaceIntervalTime1 == null || surfaceIntervalTime1.length() == 0) {
errors.add(new ValidationError("surfaceIntervalTime1", "SurfaceIntervalTime-1 is required"));
}
DiveTableCalc dive = computeDiveTableCalc();
if (dive.errorFound > 0) {
errors.add(new ValidationError("diveTableCalcErrorMessage", "Error found at Dive #" + dive.errorFound + ": " + dive.errorMessage));
}
// if (isEditable == false && errors.isEmpty()) {
// isEditable = true;
// }
return errors.isEmpty() ? null : errors;
}
}
| |
//=====================================================================
//
//File: $RCSfile: CanvasCopyPasteTests.java,v $
//Version: $Revision: 1.21 $
//Modified: $Date: 2013/05/10 05:41:51 $
//
//(c) Copyright 2007-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
package com.mentor.nucleus.bp.ui.canvas.test;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.gef.editparts.ZoomManager;
import org.eclipse.gef.tools.AbstractTool;
import org.eclipse.swt.graphics.Point;
import org.eclipse.ui.PlatformUI;
import com.mentor.nucleus.bp.core.Association_c;
import com.mentor.nucleus.bp.core.ModelClass_c;
import com.mentor.nucleus.bp.core.Ooaofooa;
import com.mentor.nucleus.bp.core.Package_c;
import com.mentor.nucleus.bp.core.PackageableElement_c;
import com.mentor.nucleus.bp.core.common.ClassQueryInterface_c;
import com.mentor.nucleus.bp.core.common.NonRootModelElement;
import com.mentor.nucleus.bp.core.common.Transaction;
import com.mentor.nucleus.bp.core.common.TransactionException;
import com.mentor.nucleus.bp.core.common.TransactionManager;
import com.mentor.nucleus.bp.core.ui.Selection;
import com.mentor.nucleus.bp.test.common.BaseTest;
import com.mentor.nucleus.bp.test.common.UITestingUtilities;
import com.mentor.nucleus.bp.ui.canvas.GraphicalElement_c;
import com.mentor.nucleus.bp.ui.canvas.Model_c;
import com.mentor.nucleus.bp.ui.canvas.Shape_c;
import com.mentor.nucleus.bp.ui.graphics.actions.CanvasCopyAction;
import com.mentor.nucleus.bp.ui.graphics.actions.CanvasPasteAction;
import com.mentor.nucleus.bp.ui.graphics.editor.GraphicalEditor;
import com.mentor.nucleus.bp.ui.graphics.editor.ModelEditor;
import com.mentor.nucleus.bp.utilities.ui.CanvasUtilities;
public class CanvasCopyPasteTests extends CanvasTest {
private String testModelName = "CopyPasteTestModel";
private static boolean initialized;
private String test_id;
public static boolean generateResults = false;
public CanvasCopyPasteTests(String name) {
super(null, name);
}
public void setUp() throws Exception {
super.setUp();
if(!initialized) {
Ooaofooa.setPersistEnabled(true);
loadProject(testModelName);
initialized = true;
}
}
public void testCopyPackageToSystem() {
test_id = "1";
Package_c domain = Package_c.getOneEP_PKGOnR1401(m_sys);
assertNotNull(domain);
CanvasUtilities.openCanvasEditor(domain);
GraphicalEditor ce = ((ModelEditor) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActiveEditor())
.getGraphicalEditor();
Package_c dtPackage = Package_c.getOneEP_PKGOnR1405(m_sys, new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((Package_c)candidate).getName().equals("DT Package");
}
});
assertNotNull(dtPackage);
addElementToSelection(true, dtPackage);
copySelection(ce);
addElementToSelection(true, m_sys);
CanvasUtilities.openCanvasEditor(m_sys);
ce = ((ModelEditor) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActiveEditor())
.getGraphicalEditor();
UITestingUtilities.pasteClipboardContents(UITestingUtilities.getClearPoint(ce), ce);
validateOrGenerateResults(ce, generateResults);
Package_c newDtPackage = Package_c.getOneEP_PKGOnR1405(m_sys, new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((Package_c)candidate).getName().equals("DT Package");
}
});
// test that datatypes of package are copied correctly
assertNotNull("Package was not created along with paste.", newDtPackage);
addElementToSelection(true, newDtPackage);
CanvasUtilities.openCanvasEditor(newDtPackage);
ce = ((ModelEditor) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActiveEditor())
.getGraphicalEditor();
test_id = "2";
validateOrGenerateResults(ce, generateResults);
}
public void testCopySSWithNonSimpleAssociations() {
// TODO: dts0100656082
test_id = "3";
Package_c subsystem = Package_c.PackageInstance(modelRoot, new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((Package_c)candidate).getName().equals("Test SS");
}
});
assertNotNull(subsystem);
CanvasUtilities.openCanvasEditor(subsystem);
GraphicalEditor ce = ((ModelEditor) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActiveEditor())
.getGraphicalEditor();
ce.getSelectAllAction().run();
copySelection(ce);
CanvasTestUtilities.doMouseMove(600, 100);
CanvasTestUtilities.doMousePress(600, 100);
CanvasTestUtilities.doMouseRelease(600, 100);
pasteClipboardElements(ce);
validateOrGenerateResults(ce,generateResults);
}
public void testCopyClassesWithoutSelectingAssociationBetween() {
// TODO: dts0100656082
test_id = "4";
Package_c subsystem = Package_c.PackageInstance(modelRoot, new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((Package_c)candidate).getName().equals("Test SS");
}
});
assertNotNull(subsystem);
CanvasUtilities.openCanvasEditor(subsystem);
GraphicalEditor ce = ((ModelEditor) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().getActiveEditor())
.getGraphicalEditor();
ModelClass_c class1 = ModelClass_c.getOneO_OBJOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(subsystem), new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((ModelClass_c)candidate).getName().equals("Supertype");
}
});
ModelClass_c class2 = ModelClass_c.getOneO_OBJOnR8001(PackageableElement_c.getManyPE_PEsOnR8000(subsystem), new ClassQueryInterface_c() {
public boolean evaluate(Object candidate) {
return ((ModelClass_c)candidate).getName().equals("Subtype");
}
});
addElementToSelection(true, class1);
addElementToSelection(false, class2);
copySelection(ce);
CanvasTestUtilities.doMouseMove(600, 600);
CanvasTestUtilities.doMousePress(600, 600);
CanvasTestUtilities.doMouseRelease(600, 600);
pasteClipboardElements(ce);
validateOrGenerateResults(ce,generateResults);
}
public void testUndoRedoRestoresPastedElements() throws CoreException {
// TODO: dts0100656082
// ensureAvailableAndLoaded("Models", "microwave", false, true);
// Domain_c domain = Domain_c.DomainInstance(modelRoot, new ClassQueryInterface_c() {
//
// public boolean evaluate(Object candidate) {
// return ((Domain_c)candidate).getName().equals("microwave");
// }
//
// });
// assertNotNull(domain);
// CanvasTestUtilities.openCanvasEditor(domain);
// GraphicalEditor ce = ((ModelEditor) PlatformUI.getWorkbench()
// .getActiveWorkbenchWindow().getActivePage().getActiveEditor())
// .getGraphicalEditor();
// ce.getSelectAllAction().run();
// DataTypePackage_c dtPackage = DataTypePackage_c.DataTypePackageInstance(modelRoot, new ClassQueryInterface_c() {
//
// public boolean evaluate(Object candidate) {
// return ((DataTypePackage_c)candidate).getName().equals(Ooaofooa.Getcoredatatypespackagename(modelRoot));
// }
//
// });
// assertNotNull(dtPackage);
// selection.removeFromSelection(dtPackage);
// copySelection(ce);
// CanvasTestUtilities.doMouseMove(100, 100);
// CanvasTestUtilities.doMousePress(100, 100);
// CanvasTestUtilities.doMouseRelease(100, 100);
// pasteClipboardElements(ce);
// m_sys.getTransactionManager().getUndoAction().run();
// m_sys.getTransactionManager().getRedoAction().run();
// test_id = "5";
// if(BaseTest.testGlobals) {
// test_id = "5Globals";
// }
// validateOrGenerateResults(ce, generateResults);
}
public void testPasteOutsideOfGraphicsAllowsUpdateOfGraphicalElementModelRoots() {
// create a new package, with two classes
// do this inside of a transaction
Package_c testPackage = null;
try {
Transaction transaction = TransactionManager.getSingleton()
.startTransaction("Create test elements.",
Ooaofooa.getDefaultInstance());
m_sys.Newpackage();
Package_c[] packages = Package_c.getManyEP_PKGsOnR1401(m_sys);
testPackage = packages[packages.length - 1];
testPackage.setName("PasteOutsideGraphicsTest");
TransactionManager.getSingleton().endTransaction(transaction);
} catch (TransactionException e) {
fail(e.getLocalizedMessage());
}
assertNotNull(testPackage);
// create two classes graphically
CanvasUtilities.openCanvasEditor(testPackage);
AbstractTool tool = UITestingUtilities.getTool("Classes", "Class");
UITestingUtilities.activateTool(tool);
CanvasTestUtilities.doMouseMove(100, 100);
CanvasTestUtilities.doMousePress(100, 100);
CanvasTestUtilities.doMouseMove(200, 200);
CanvasTestUtilities.doMouseRelease(200, 200);
UITestingUtilities.deactivateTool(tool);
UITestingUtilities.activateTool(tool);
CanvasTestUtilities.doMouseMove(300, 100);
CanvasTestUtilities.doMousePress(300, 100);
CanvasTestUtilities.doMouseMove(400, 200);
CanvasTestUtilities.doMouseRelease(400, 200);
UITestingUtilities.deactivateTool(tool);
// now copy the package in ME and paste the package
// into itself in ME
Selection.getInstance().clear();
Selection.getInstance().addToSelection(testPackage);
UITestingUtilities.copyElementInExplorer(getExplorerView());
UITestingUtilities.pasteClipboardContentsInExplorer(testPackage);
// now open the pasted diagram and draw an association from one class
// to another
Package_c pastedPackage = Package_c
.getOneEP_PKGOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(testPackage));
CanvasUtilities.openCanvasEditor(pastedPackage);
GraphicalEditor editor = (GraphicalEditor) UITestingUtilities.getActiveEditor();
ZoomManager zoomManager = (ZoomManager) editor.getAdapter(ZoomManager.class);
zoomManager.setZoom(1);
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
// get the two shapes on the diagram
Model_c model = editor.getModel();
Shape_c[] shapes = Shape_c.getManyGD_SHPsOnR2(GraphicalElement_c.getManyGD_GEsOnR1(model));
tool = UITestingUtilities.getTool("Classes", "Association");
UITestingUtilities.activateTool(tool);
Point shapeCenter = CanvasUtilities.getShapeCenter(shapes[0]);
shapeCenter = CanvasTestUtilities.convertToMouseCoor(shapeCenter, model);
CanvasTestUtilities.doMouseMove(shapeCenter.x, shapeCenter.y);
CanvasTestUtilities.doMousePress(shapeCenter.x, shapeCenter.y);
shapeCenter = CanvasUtilities.getShapeCenter(shapes[1]);
shapeCenter = CanvasTestUtilities.convertToMouseCoor(shapeCenter, model);
CanvasTestUtilities.doMouseMove(shapeCenter.x, shapeCenter.y);
CanvasTestUtilities.doMouseRelease(shapeCenter.x, shapeCenter.y);
UITestingUtilities.deactivateTool(tool);
// assert that an assocation exists
Association_c association = Association_c
.getOneR_RELOnR8001(PackageableElement_c
.getManyPE_PEsOnR8000(pastedPackage));
assertNotNull(
"Association could not be created, therefore the paste listener did appropriately move graphical elements to the destination.",
association);
}
private void pasteClipboardElements(GraphicalEditor ce) {
CanvasPasteAction canvaspasteaction = new CanvasPasteAction(ce);
canvaspasteaction.run();
waitForTransaction();
waitForDecorator();
}
private void copySelection(GraphicalEditor ce) {
CanvasCopyAction canvascopyaction = new CanvasCopyAction(ce);
canvascopyaction.run();
waitForTransaction();
}
private void addElementToSelection(boolean makeLoneSelection, NonRootModelElement element) {
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
if(makeLoneSelection)
UITestingUtilities.clearGraphicalSelection();
UITestingUtilities.addElementToGraphicalSelection(element);
}
protected String getResultName() {
return "CopyPasteTests" + "_" + test_id;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.db.columniterator.OnDiskAtomIterator;
import org.apache.cassandra.db.composites.CellName;
import org.apache.cassandra.db.filter.NamesQueryFilter;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.mutants.MemSsTableAccessMon;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.SearchIterator;
import org.apache.cassandra.utils.memory.HeapAllocator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CollationController
{
private static final Logger logger = LoggerFactory.getLogger(CollationController.class);
private final ColumnFamilyStore cfs;
private final QueryFilter filter;
private final int gcBefore;
private int sstablesIterated = 0;
public CollationController(ColumnFamilyStore cfs, QueryFilter filter, int gcBefore)
{
this.cfs = cfs;
this.filter = filter;
this.gcBefore = gcBefore;
}
public ColumnFamily getTopLevelColumns(boolean copyOnHeap)
{
return getTopLevelColumns(copyOnHeap, false);
}
public ColumnFamily getTopLevelColumns(boolean copyOnHeap, boolean mtdb_trace)
{
//if (mtdb_trace)
// logger.warn("MTDB:");
return filter.filter instanceof NamesQueryFilter
&& cfs.metadata.getDefaultValidator() != CounterColumnType.instance
? collectTimeOrderedData(copyOnHeap, mtdb_trace)
: collectAllData(copyOnHeap, mtdb_trace);
}
/**
* Collects data in order of recency, using the sstable maxtimestamp data.
* Once we have data for all requests columns that is newer than the newest remaining maxtimestamp,
* we stop.
*/
private ColumnFamily collectTimeOrderedData(boolean copyOnHeap, boolean mtdb_trace)
{
final ColumnFamily container = ArrayBackedSortedColumns.factory.create(cfs.metadata, filter.filter.isReversed());
if (mtdb_trace) {
logger.warn("MTDB: container={} container.deletionInfo={}", container, container.deletionInfo());
}
List<OnDiskAtomIterator> iterators = new ArrayList<>();
boolean isEmpty = true;
Tracing.trace("Acquiring sstable references");
ColumnFamilyStore.ViewFragment view = cfs.select(cfs.viewFilter(filter.key), mtdb_trace);
DeletionInfo returnDeletionInfo = container.deletionInfo();
try
{
Tracing.trace("Merging memtable contents");
for (Memtable memtable : view.memtables)
{
if (mtdb_trace) {
logger.warn("MTDB: {}", memtable);
}
ColumnFamily cf = memtable.getColumnFamily(filter.key);
if (cf != null)
{
if (mtdb_trace) {
logger.warn("MTDB: {}", cf);
}
filter.delete(container.deletionInfo(), cf);
isEmpty = false;
Iterator<Cell> iter = filter.getIterator(cf);
while (iter.hasNext())
{
Cell cell = iter.next();
if (copyOnHeap)
cell = cell.localCopy(cfs.metadata, HeapAllocator.instance);
container.addColumn(cell);
if (mtdb_trace) {
logger.warn("MTDB: {}", container);
}
}
}
}
// avoid changing the filter columns of the original filter
// (reduceNameFilter removes columns that are known to be irrelevant)
NamesQueryFilter namesFilter = (NamesQueryFilter) filter.filter;
TreeSet<CellName> filterColumns = new TreeSet<>(namesFilter.columns);
QueryFilter reducedFilter = new QueryFilter(filter.key, filter.cfName, namesFilter.withUpdatedColumns(filterColumns), filter.timestamp);
if (mtdb_trace) {
logger.warn("MTDB: reducedFilter={} filter.timestamp={}", reducedFilter, filter.timestamp);
}
/* add the SSTables on disk */
// Hobin: sstables with bigger gen (younger sstables) first
Collections.sort(view.sstables, SSTableReader.maxTimestampComparator);
if (mtdb_trace) {
for (SSTableReader sstable : view.sstables)
{
logger.warn("MTDB: sstable {} max_ts {} min_ts {}"
, sstable.descriptor.generation
, sstable.getMaxTimestamp()
, sstable.getMinTimestamp()
);
}
}
// read sorted sstables
// Hobin: view.sstables contains all sstables
for (SSTableReader sstable : view.sstables)
{
if (mtdb_trace) {
logger.warn("MTDB: sstable {}", sstable.descriptor.generation);
}
// if we've already seen a row tombstone with a timestamp greater
// than the most recent update to this sstable, we're done, since the rest of the sstables
// will also be older
if (sstable.getMaxTimestamp() < returnDeletionInfo.getTopLevelDeletion().markedForDeleteAt)
break;
// Hobin: max ts makes sense, not min ts. You want a guarantee
// that a SSTable doesn't have a key with a ts bigger than a
// value.
long currentMaxTs = sstable.getMaxTimestamp();
reduceNameFilter(reducedFilter, container, currentMaxTs);
// Hobin: reducedFilter.filter.columns becomes empty after
// checking with the currentMaxTs
if (((NamesQueryFilter) reducedFilter.filter).columns.isEmpty())
break;
if (mtdb_trace) {
logger.warn("MTDB: sstable {}", sstable.descriptor.generation);
}
Tracing.trace("Merging data from sstable {}", sstable.descriptor.generation);
sstable.incrementReadCount();
OnDiskAtomIterator iter = reducedFilter.getSSTableColumnIterator(sstable);
iterators.add(iter);
isEmpty = false;
if (mtdb_trace) {
//MemSsTableAccessMon.Update(this, readMeter.count());
MemSsTableAccessMon.Update(sstable);
// iter.getColumnFamily() == null means the sstable doesn't
// have the requested data. and probably by the bloom
// filter said so.
logger.warn("MTDB: sstable {} read_meter_count={} false_positive={} true_positive={} iter.getColumnFamily()={}"
, sstable.descriptor.generation
, sstable.getReadMeter().count()
, sstable.getBloomFilterFalsePositiveCount()
, sstable.getBloomFilterTruePositiveCount()
, iter.getColumnFamily()
);
}
if (iter.getColumnFamily() != null)
{
//if (mtdb_trace) {
// logger.warn("MTDB: {} {}", container, iter.getColumnFamily());
//}
container.delete(iter.getColumnFamily());
//if (mtdb_trace) {
// logger.warn("MTDB: {}", container);
//}
sstablesIterated++;
// Seems like adding columns one by one
while (iter.hasNext()) {
container.addAtom(iter.next());
//if (mtdb_trace) {
// logger.warn("MTDB: {}", container);
//}
}
}
}
// we need to distinguish between "there is no data at all for this row" (BF will let us rebuild that efficiently)
// and "there used to be data, but it's gone now" (we should cache the empty CF so we don't need to rebuild that slower)
if (isEmpty)
return null;
// do a final collate. toCollate is boilerplate required to provide a CloseableIterator
ColumnFamily returnCF = container.cloneMeShallow();
Tracing.trace("Collating all results");
filter.collateOnDiskAtom(returnCF, container.iterator(), gcBefore);
// "hoist up" the requested data into a more recent sstable
if (sstablesIterated > cfs.getMinimumCompactionThreshold()
&& !cfs.isAutoCompactionDisabled()
&& cfs.getCompactionStrategy().shouldDefragment())
{
if (mtdb_trace) {
logger.warn("MTDB: what is \"hoist up\"?");
}
// !!WARNING!! if we stop copying our data to a heap-managed object,
// we will need to track the lifetime of this mutation as well
Tracing.trace("Defragmenting requested data");
final Mutation mutation = new Mutation(cfs.keyspace.getName(), filter.key.getKey(), returnCF.cloneMe());
StageManager.getStage(Stage.MUTATION).execute(new Runnable()
{
public void run()
{
// skipping commitlog and index updates is fine since we're just de-fragmenting existing data
Keyspace.open(mutation.getKeyspaceName()).apply(mutation, false, false);
}
});
}
// Caller is responsible for final removeDeletedCF. This is important for cacheRow to work correctly:
return returnCF;
}
finally
{
for (OnDiskAtomIterator iter : iterators)
FileUtils.closeQuietly(iter);
}
}
/**
* remove columns from @param filter where we already have data in @param container newer than @param sstableTimestamp
*/
private void reduceNameFilter(QueryFilter filter, ColumnFamily container, long sstableTimestamp)
{
if (container == null)
return;
SearchIterator<CellName, Cell> searchIter = container.searchIterator();
for (Iterator<CellName> iterator = ((NamesQueryFilter) filter.filter).columns.iterator(); iterator.hasNext() && searchIter.hasNext(); )
{
CellName filterColumn = iterator.next();
Cell cell = searchIter.next(filterColumn);
if (cell != null && cell.timestamp() > sstableTimestamp)
iterator.remove();
}
}
/**
* Collects data the brute-force way: gets an iterator for the filter in question
* from every memtable and sstable, then merges them together.
*/
private ColumnFamily collectAllData(boolean copyOnHeap, boolean mtdb_trace)
{
//if (mtdb_trace)
// logger.warn("MTDB:");
Tracing.trace("Acquiring sstable references");
ColumnFamilyStore.ViewFragment view = cfs.select(cfs.viewFilter(filter.key), mtdb_trace);
// Hobin: sometimes view has a smaller number of SSTables, probably by
// some kind of filtering using cache.
List<Iterator<? extends OnDiskAtom>> iterators = new ArrayList<>(Iterables.size(view.memtables) + view.sstables.size());
ColumnFamily returnCF = ArrayBackedSortedColumns.factory.create(cfs.metadata, filter.filter.isReversed());
DeletionInfo returnDeletionInfo = returnCF.deletionInfo();
try
{
Tracing.trace("Merging memtable tombstones");
for (Memtable memtable : view.memtables)
{
// I wonder why
// select * from table1 where key=0; takes this path, but
// select * from table1; doesn't.
final ColumnFamily cf = memtable.getColumnFamily(filter.key);
if (mtdb_trace) {
//logger.warn("MTDB: memtable={} filter.key={}", memtable, filter.key);
MemSsTableAccessMon.Update(memtable, cf);
}
if (cf != null)
{
filter.delete(returnDeletionInfo, cf);
Iterator<Cell> iter = filter.getIterator(cf);
//if (mtdb_trace) {
// logger.warn("MTDB: cf={} filter={} iter={} copyOnHeap={}", cf, filter, iter, copyOnHeap);
//}
if (copyOnHeap)
{
iter = Iterators.transform(iter, new Function<Cell, Cell>()
{
public Cell apply(Cell cell)
{
return cell.localCopy(cf.metadata, HeapAllocator.instance);
}
});
}
iterators.add(iter);
}
}
/*
* We can't eliminate full sstables based on the timestamp of what we've already read like
* in collectTimeOrderedData, but we still want to eliminate sstable whose maxTimestamp < mostRecentTombstone
* we've read. We still rely on the sstable ordering by maxTimestamp since if
* maxTimestamp_s1 > maxTimestamp_s0,
* we're guaranteed that s1 cannot have a row tombstone such that
* timestamp(tombstone) > maxTimestamp_s0
* since we necessarily have
* timestamp(tombstone) <= maxTimestamp_s1
* In other words, iterating in maxTimestamp order allow to do our mostRecentTombstone elimination
* in one pass, and minimize the number of sstables for which we read a rowTombstone.
*/
Collections.sort(view.sstables, SSTableReader.maxTimestampComparator);
List<SSTableReader> skippedSSTables = null;
long minTimestamp = Long.MAX_VALUE;
int nonIntersectingSSTables = 0;
for (SSTableReader sstable : view.sstables)
{
minTimestamp = Math.min(minTimestamp, sstable.getMinTimestamp());
// if we've already seen a row tombstone with a timestamp greater
// than the most recent update to this sstable, we can skip it
if (sstable.getMaxTimestamp() < returnDeletionInfo.getTopLevelDeletion().markedForDeleteAt) {
//if (mtdb_trace)
// logger.warn("MTDB: sstable={}", sstable);
break;
} else {
//if (mtdb_trace)
// logger.warn("MTDB: sstable={}", sstable);
}
if (!filter.shouldInclude(sstable))
{
nonIntersectingSSTables++;
// sstable contains no tombstone if maxLocalDeletionTime == Integer.MAX_VALUE, so we can safely skip those entirely
if (sstable.getSSTableMetadata().maxLocalDeletionTime != Integer.MAX_VALUE)
{
if (skippedSSTables == null)
skippedSSTables = new ArrayList<>();
skippedSSTables.add(sstable);
}
continue;
}
sstable.incrementReadCount();
OnDiskAtomIterator iter = filter.getSSTableColumnIterator(sstable);
iterators.add(iter);
if (mtdb_trace) {
MemSsTableAccessMon.Update(sstable);
// iter.getColumnFamily() == null means the sstable doesn't
// have the requested data. and probably by the bloom
// filter said so.
//logger.warn("MTDB: sstable {} read_meter_count={} false_positive={} true_positive={} iter.getColumnFamily()={}"
// , sstable.descriptor.generation
// , sstable.getReadMeter().count()
// , sstable.getBloomFilterFalsePositiveCount()
// , sstable.getBloomFilterTruePositiveCount()
// , iter.getColumnFamily()
// );
}
if (iter.getColumnFamily() != null)
{
ColumnFamily cf = iter.getColumnFamily();
returnCF.delete(cf);
sstablesIterated++;
}
}
int includedDueToTombstones = 0;
// Check for row tombstone in the skipped sstables
if (skippedSSTables != null)
{
for (SSTableReader sstable : skippedSSTables)
{
if (sstable.getMaxTimestamp() <= minTimestamp)
continue;
sstable.incrementReadCount();
OnDiskAtomIterator iter = filter.getSSTableColumnIterator(sstable);
ColumnFamily cf = iter.getColumnFamily();
// we are only interested in row-level tombstones here, and only if markedForDeleteAt is larger than minTimestamp
if (cf != null && cf.deletionInfo().getTopLevelDeletion().markedForDeleteAt > minTimestamp)
{
includedDueToTombstones++;
iterators.add(iter);
returnCF.delete(cf.deletionInfo().getTopLevelDeletion());
sstablesIterated++;
}
else
{
FileUtils.closeQuietly(iter);
}
}
}
if (Tracing.isTracing())
Tracing.trace("Skipped {}/{} non-slice-intersecting sstables, included {} due to tombstones",
nonIntersectingSSTables, view.sstables.size(), includedDueToTombstones);
// we need to distinguish between "there is no data at all for this row" (BF will let us rebuild that efficiently)
// and "there used to be data, but it's gone now" (we should cache the empty CF so we don't need to rebuild that slower)
if (iterators.isEmpty())
return null;
Tracing.trace("Merging data from memtables and {} sstables", sstablesIterated);
filter.collateOnDiskAtom(returnCF, iterators, gcBefore);
// Caller is responsible for final removeDeletedCF. This is important for cacheRow to work correctly:
return returnCF;
}
finally
{
for (Object iter : iterators)
if (iter instanceof Closeable)
FileUtils.closeQuietly((Closeable) iter);
}
}
public int getSstablesIterated()
{
return sstablesIterated;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.user;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.jcr.RepositoryException;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.apache.jackrabbit.oak.plugins.memory.PropertyBuilder;
import com.google.common.collect.Iterators;
import static org.apache.jackrabbit.oak.api.Type.NAME;
/**
* @see MembershipProvider to more details.
*/
public class MembershipWriter {
public static final int DEFAULT_MEMBERSHIP_THRESHOLD = 100;
/**
* size of the membership threshold after which a new overflow node is created.
*/
private int membershipSizeThreshold = DEFAULT_MEMBERSHIP_THRESHOLD;
public void setMembershipSizeThreshold(int membershipSizeThreshold) {
this.membershipSizeThreshold = membershipSizeThreshold;
}
/**
* Adds a new member to the given {@code groupTree}.
*
* @param groupTree the group to add the member to
* @param memberContentId the id of the new member
* @return {@code true} if the member was added
* @throws RepositoryException if an error occurs
*/
boolean addMember(Tree groupTree, String memberContentId) throws RepositoryException {
Map<String, String> m = Maps.newHashMapWithExpectedSize(1);
m.put(memberContentId, "-");
return addMembers(groupTree, m).isEmpty();
}
/**
* Adds a new member to the given {@code groupTree}.
*
* @param groupTree the group to add the member to
* @param memberIds the ids of the new members as map of 'contentId':'memberId'
* @return the set of member IDs that was not successfully processed.
* @throws RepositoryException if an error occurs
*/
Set<String> addMembers(@Nonnull Tree groupTree, @Nonnull Map<String, String> memberIds) throws RepositoryException {
// check all possible rep:members properties for the new member and also find the one with the least values
Tree membersList = groupTree.getChild(UserConstants.REP_MEMBERS_LIST);
Iterator<Tree> trees = Iterators.concat(
Iterators.singletonIterator(groupTree),
membersList.getChildren().iterator()
);
Set<String> failed = new HashSet<String>(memberIds.size());
int bestCount = membershipSizeThreshold;
PropertyState bestProperty = null;
Tree bestTree = null;
// remove existing memberIds from the map and find best-matching tree
// for the insertion of the new members.
while (trees.hasNext() && !memberIds.isEmpty()) {
Tree t = trees.next();
PropertyState refs = t.getProperty(UserConstants.REP_MEMBERS);
if (refs != null) {
int numRefs = 0;
for (String ref : refs.getValue(Type.WEAKREFERENCES)) {
String id = memberIds.remove(ref);
if (id != null) {
failed.add(id);
if (memberIds.isEmpty()) {
break;
}
}
numRefs++;
}
if (numRefs < bestCount) {
bestCount = numRefs;
bestProperty = refs;
bestTree = t;
}
}
}
// update member content structure by starting inserting new member IDs
// with the best-matching property and create new member-ref-nodes as needed.
if (!memberIds.isEmpty()) {
PropertyBuilder<String> propertyBuilder;
int propCnt;
if (bestProperty == null) {
// we don't have a good candidate to store the new members.
// so there are no members at all or all are full
if (!groupTree.hasProperty(UserConstants.REP_MEMBERS)) {
bestTree = groupTree;
} else {
bestTree = createMemberRefTree(groupTree, membersList);
}
propertyBuilder = PropertyBuilder.array(Type.WEAKREFERENCE, UserConstants.REP_MEMBERS);
propCnt = 0;
} else {
propertyBuilder = PropertyBuilder.copy(Type.WEAKREFERENCE, bestProperty);
propCnt = bestCount;
}
// if adding all new members to best-property would exceed the threshold
// the new ids need to be distributed to different member-ref-nodes
// for simplicity this is achieved by introducing new tree(s)
if ((propCnt + memberIds.size()) > membershipSizeThreshold) {
while (!memberIds.isEmpty()) {
Set<String> s = new HashSet<String>();
Iterator<String> it = memberIds.keySet().iterator();
while (propCnt < membershipSizeThreshold && it.hasNext()) {
s.add(it.next());
it.remove();
propCnt++;
}
propertyBuilder.addValues(s);
bestTree.setProperty(propertyBuilder.getPropertyState());
if (it.hasNext()) {
// continue filling the next (new) node + propertyBuilder pair
propCnt = 0;
bestTree = createMemberRefTree(groupTree, membersList);
propertyBuilder = PropertyBuilder.array(Type.WEAKREFERENCE, UserConstants.REP_MEMBERS);
}
}
} else {
propertyBuilder.addValues(memberIds.keySet());
bestTree.setProperty(propertyBuilder.getPropertyState());
}
}
return failed;
}
private static Tree createMemberRefTree(@Nonnull Tree groupTree, @Nonnull Tree membersList) {
if (!membersList.exists()) {
membersList = groupTree.addChild(UserConstants.REP_MEMBERS_LIST);
membersList.setProperty(JcrConstants.JCR_PRIMARYTYPE, UserConstants.NT_REP_MEMBER_REFERENCES_LIST, NAME);
}
Tree refTree = membersList.addChild(nextRefNodeName(membersList));
refTree.setProperty(JcrConstants.JCR_PRIMARYTYPE, UserConstants.NT_REP_MEMBER_REFERENCES, NAME);
return refTree;
}
private static String nextRefNodeName(@Nonnull Tree membersList) {
// keep node names linear
int i = 0;
String name = String.valueOf(i);
while (membersList.hasChild(name)) {
name = String.valueOf(++i);
}
return name;
}
/**
* Removes the member from the given group.
*
* @param groupTree group to remove the member from
* @param memberContentId member to remove
* @return {@code true} if the member was removed.
*/
boolean removeMember(@Nonnull Tree groupTree, @Nonnull String memberContentId) {
Map<String, String> m = Maps.newHashMapWithExpectedSize(1);
m.put(memberContentId, "-");
return removeMembers(groupTree, m).isEmpty();
}
/**
* Removes the members from the given group.
*
* @param groupTree group to remove the member from
* @param memberIds Map of 'contentId':'memberId' of all members that need to be removed.
* @return the set of member IDs that was not successfully processed.
*/
Set<String> removeMembers(@Nonnull Tree groupTree, @Nonnull Map<String, String> memberIds) {
Tree membersList = groupTree.getChild(UserConstants.REP_MEMBERS_LIST);
Iterator<Tree> trees = Iterators.concat(
Iterators.singletonIterator(groupTree),
membersList.getChildren().iterator()
);
while (trees.hasNext() && !memberIds.isEmpty()) {
Tree t = trees.next();
PropertyState refs = t.getProperty(UserConstants.REP_MEMBERS);
if (refs != null) {
PropertyBuilder<String> prop = PropertyBuilder.copy(Type.WEAKREFERENCE, refs);
Iterator<Map.Entry<String,String>> it = memberIds.entrySet().iterator();
while (it.hasNext() && !prop.isEmpty()) {
String memberContentId = it.next().getKey();
if (prop.hasValue(memberContentId)) {
prop.removeValue(memberContentId);
it.remove();
}
}
if (prop.isEmpty()) {
if (t == groupTree) {
t.removeProperty(UserConstants.REP_MEMBERS);
} else {
t.remove();
}
} else {
t.setProperty(prop.getPropertyState());
}
}
}
return Sets.newHashSet(memberIds.values());
}
}
| |
package com.gokuai.library.util;
import com.gokuai.base.utils.Util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public final class EmojiMapUtil {
/**
* Regular expression pattern to match RFC 1738 URLs
* List accurate as of 2013/12/18. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
* This pattern is auto-generated by //device/tools/make-iana-tld-pattern.py
*/
public static final Pattern WEB_URL_PATTERN
= Pattern.compile(
"((?:(http|https|Http|Https):\\/\\/(?:(?:[a-zA-Z0-9\\$\\-\\_\\.\\+\\!\\*\\'\\(\\)"
+ "\\,\\;\\?\\&\\=]|(?:\\%[a-fA-F0-9]{2})){1,64}(?:\\:(?:[a-zA-Z0-9\\$\\-\\_"
+ "\\.\\+\\!\\*\\'\\(\\)\\,\\;\\?\\&\\=]|(?:\\%[a-fA-F0-9]{2})){1,25})?\\@)?)?"
+ "((?:(?:[a-zA-Z0-9][a-zA-Z0-9\\-]{0,64}\\.)+" // named host
+ "(?:" // plus top level domain
+ "(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])"
+ "|(?:biz|b[abdefghijmnorstvwyz])"
+ "|(?:cat|com|coop|c[acdfghiklmnorsuvxyz])"
+ "|d[dejkmoz]"
+ "|(?:edu|e[ceghrstu])"
+ "|f[ijkmor]"
+ "|(?:gov|g[abdefghilmnpqrstuwy])"
+ "|h[kmnrtu]"
+ "|(?:info|int|i[delmnoqrst])"
+ "|(?:jobs|j[emop])"
+ "|k[eghimnprwyz]"
+ "|l[abcikrstuvy]"
+ "|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])"
+ "|(?:name|net|n[acefgilopruz])"
+ "|(?:org|om)"
+ "|(?:post|pro|p[aefghklmnrstwy])"
+ "|qa"
+ "|r[eosuw]"
+ "|s[abcdeghijklmnorstuvxyz]"
+ "|(?:tel|travel|t[cdfghjklmnoprtvwz])"
+ "|u[agkmsyz]"
+ "|v[aceginu]"
+ "|w[fs]"
+ "|xxx"
+ "|y[etu]"
+ "|z[amw]))"
+ "|(?:(?:25[0-5]|2[0-4]" // or ip address
+ "[0-9]|[0-1][0-9]{2}|[1-9][0-9]|[1-9])\\.(?:25[0-5]|2[0-4][0-9]"
+ "|[0-1][0-9]{2}|[1-9][0-9]|[1-9]|0)\\.(?:25[0-5]|2[0-4][0-9]|[0-1]"
+ "[0-9]{2}|[1-9][0-9]|[1-9]|0)\\.(?:25[0-5]|2[0-4][0-9]|[0-1][0-9]{2}"
+ "|[1-9][0-9]|[0-9])))"
+ "(?:\\:\\d{1,5})?)" // plus option port number
+ "(\\/(?:(?:[a-zA-Z0-9\\;\\/\\?\\:\\@\\&\\=\\#\\~" // plus option query params
+ "\\-\\.\\+\\!\\*\\'\\(\\)\\,\\_])|(?:\\%[a-fA-F0-9]{2}))*)?"
+ "(?:\\b|$)"); // and finally, a word boundary or end of
// input. This is to stop foo.sure from
// matching as foo.su
public static String replaceCheatSheetEmojis(String s) {
if (Util.isEmpty(s)) {
return "";
}
Matcher matcher = COLON_REGEX.matcher(s);
List<int[]> linkRanges = getLinksRanges(s);
List<int[]> potentialMatches = new ArrayList<int[]>();
OUTER:
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
for (int[] range : linkRanges) {
if (start <= range[1] && range[0] <= end) {
continue OUTER;
}
}
potentialMatches.add(0, new int[]{start, end});
}
for (int[] potentialMatch : potentialMatches) {
String toReplace = s.substring(potentialMatch[0], potentialMatch[1]);
String replacement = CHEAT_SHEET_TO_UNICODE.get(toReplace);
if (!Util.isEmpty(replacement)) {
String newString = s.substring(0, potentialMatch[0]) + replacement;
if (potentialMatch[1] <= s.length()) {
newString += s.substring(potentialMatch[1]);
}
s = newString;
}
}
return s;
}
public static List<int[]> getLinksRanges(String s) {
List<int[]> ranges = new ArrayList<int[]>();
Matcher matcher = WEB_URL_PATTERN.matcher(s);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
ranges.add(new int[]{start, end});
}
return ranges;
}
/**
* Replaces instances of Emoji unicode characters with their Emoji-Cheat sheet key
*
* @param s
* @return
*/
public static String replaceUnicodeEmojis(String s) {
if (Util.isEmpty(s)) {
return "";
}
for (int i = 0; i < s.length(); i++) {
String key = s.substring(i, i + 1);
if ((Character.isLowSurrogate(key.charAt(0)) || Character.isHighSurrogate(key.charAt(0))) && s.length() > i + 1) {
key = s.substring(i, i + 2);
}
String emoji = UNICODE_TO_CHEAT_SHEET.get(key);
if (null != emoji) {
s = s.replace(key, emoji);
}
}
return s;
}
private static final Pattern COLON_REGEX = Pattern.compile(":[a-z0-9+_-]{1,31}:");
private static final Map<String, String> UNICODE_TO_CHEAT_SHEET = new HashMap<String, String>();
private static final Map<String, String> CHEAT_SHEET_TO_UNICODE = new HashMap<String, String>();
private final static String[][] UNICODE_MAPPING = new String[][]{
{":airplane:", "\u2708"},
{":alarm_clock:", "\u23F0"},
{":anchor:", "\u2693"},
{":aquarius:", "\u2652"},
{":aries:", "\u2648"},
{":arrow_backward:", "\u25C0"},
{":arrow_double_down:", "\u23EC"},
{":arrow_double_up:", "\u23EB"},
{":arrow_down:", "\u2B07"},
{":arrow_forward:", "\u25B6"},
{":arrow_heading_down:", "\u2935"},
{":arrow_heading_up:", "\u2934"},
{":arrow_left:", "\u2B05"},
{":arrow_lower_left:", "\u2199"},
{":arrow_lower_right:", "\u2198"},
{":arrow_right:", "\u27A1"},
{":arrow_right_hook:", "\u21AA"},
{":arrow_up:", "\u2B06"},
{":arrow_up_down:", "\u2195"},
{":arrow_upper_left:", "\u2196"},
{":arrow_upper_right:", "\u2197"},
{":ballot_box_with_check:", "\u2611"},
{":bangbang:", "\u203C"},
{":cancer:", "\u264B"},
{":baseball:", "\u26BE"},
{":black_large_square:", "\u2B1B"},
{":black_medium_small_square:", "\u25FE"},
{":black_medium_square:", "\u25FC"},
{":black_nib:", "\u2712"},
{":black_small_square:", "\u25AA"},
{":black_circle:", "\u26AB"},
{":boat:", "\u26F5"},
{":capricorn:", "\u2651"},
{":church:", "\u26EA"},
{":cloud:", "\u2601"},
{":clubs:", "\u2663"},
{":coffee:", "\u2615"},
{":congratulations:", "\u3297"},
{":copyright:", "\u00A9"},
{":curly_loop:", "\u27B0"},
{":eight_pointed_black_star:", "\u2734"},
{":eight_spoked_asterisk:", "\u2733"},
{":diamonds:", "\u2666"},
{":email:", "\u2709"},
{":envelope:", "\u2709"},
{":exclamation:", "\u2757"},
{":fast_forward:", "\u23E9"},
{":fist:", "\u270A"},
{":fountain:", "\u26F2"},
{":fuelpump:", "\u26FD"},
{":gemini:", "\u264A"},
{":golf:", "\u26F3"},
{":grey_exclamation:", "\u2755"},
{":grey_question:", "\u2754"},
{":hand:", "\u270B"},
{":heart:", "\u2764"},
{":hearts:", "\u2665"},
{":heavy_check_mark:", "\u2714"},
{":heavy_division_sign:", "\u2797"},
{":heavy_exclamation_mark:", "\u2757"},
{":heavy_minus_sign:", "\u2796"},
{":heavy_multiplication_x:", "\u2716"},
{":heavy_plus_sign:", "\u2795"},
{":hotsprings:", "\u2668"},
{":hourglass:", "\u231B"},
{":hourglass_flowing_sand:", "\u23F3"},
{":information_source:", "\u2139"},
{":interrobang:", "\u2049"},
{":left_right_arrow:", "\u2194"},
{":leftwards_arrow_with_hook:", "\u21A9"},
{":leo:", "\u264C"},
{":libra:", "\u264E"},
{":loop:", "\u27BF"},
{":m:", "\u24C2"},
{":negative_squared_cross_mark:", "\u274E"},
{":no_entry:", "\u26D4"},
{":o:", "\u2B55"},
{":ophiuchus:", "\u26CE"},
{":part_alternation_mark:", "\u303D"},
{":partly_sunny:", "\u26C5"},
{":pencil2:", "\u270F"},
{":phone:", "\u260E"},
{":pisces:", "\u2653"},
{":point_up:", "\u261D"},
{":question:", "\u2753"},
{":raised_hand:", "\u270B"},
{":recycle:", "\u267B"},
{":registered:", "\u00AE"},
{":relaxed:", "\u263A"},
{":rewind:", "\u23EA"},
{":sagittarius:", "\u2650"},
{":sailboat:", "\u26F5"},
{":scissors:", "\u2702"},
{":scorpius:", "\u264F"},
{":secret:", "\u3299"},
{":snowflake:", "\u2744"},
{":snowman:", "\u26C4"},
{":soccer:", "\u26BD"},
{":spades:", "\u2660"},
{":sparkle:", "\u2747"},
{":sparkles:", "\u2728"},
{":star:", "\u2B50"},
{":sunny:", "\u2600"},
{":taurus:", "\u2649"},
{":telephone:", "\u260E"},
{":tent:", "\u26FA"},
{":tm:", "\u2122"},
{":umbrella:", "\u2614"},
{":v:", "\u270C"},
{":virgo:", "\u264D"},
{":warning:", "\u26A0"},
{":watch:", "\u231A"},
{":wavy_dash:", "\u3030"},
{":wheelchair:", "\u267F"},
{":white_check_mark:", "\u2705"},
{":white_circle:", "\u26AA"},
{":white_large_square:", "\u2B1C"},
{":white_medium_small_square:", "\u25FD"},
{":white_medium_square:", "\u25FB"},
{":white_small_square:", "\u25AB"},
{":x:", "\u274C"},
{":zap:", "\u26A1"},
{":+1:", new String(Character.toChars(0x1F44D))},
{":-1:", new String(Character.toChars(0x1F44E))},
{":100:", new String(Character.toChars(0x1F4AF))},
{":1234:", new String(Character.toChars(0x1F522))},
{":8ball:", new String(Character.toChars(0x1F3B1))},
{":a:", new String(Character.toChars(0x1F170))},
{":ab:", new String(Character.toChars(0x1F18E))},
{":abc:", new String(Character.toChars(0x1F524))},
{":abcd:", new String(Character.toChars(0x1F521))},
{":accept:", new String(Character.toChars(0x1F251))},
{":aerial_tramway:", new String(Character.toChars(0x1F6A1))},
{":alien:", new String(Character.toChars(0x1F47D))},
{":ambulance:", new String(Character.toChars(0x1F691))},
{":angel:", new String(Character.toChars(0x1F47C))},
{":anger:", new String(Character.toChars(0x1F4A2))},
{":angry:", new String(Character.toChars(0x1F620))},
{":anguished:", new String(Character.toChars(0x1F627))},
{":ant:", new String(Character.toChars(0x1F41C))},
{":apple:", new String(Character.toChars(0x1F34E))},
{":arrow_down_small:", new String(Character.toChars(0x1F53D))},
{":arrow_up_small:", new String(Character.toChars(0x1F53C))},
{":arrows_clockwise:", new String(Character.toChars(0x1F503))},
{":arrows_counterclockwise:", new String(Character.toChars(0x1F504))},
{":art:", new String(Character.toChars(0x1F3A8))},
{":articulated_lorry:", new String(Character.toChars(0x1F69B))},
{":astonished:", new String(Character.toChars(0x1F632))},
{":athletic_shoe:", new String(Character.toChars(0x1F45F))},
{":atm:", new String(Character.toChars(0x1F3E7))},
{":b:", new String(Character.toChars(0x1F171))},
{":baby:", new String(Character.toChars(0x1F476))},
{":baby_bottle:", new String(Character.toChars(0x1F37C))},
{":baby_chick:", new String(Character.toChars(0x1F424))},
{":baby_symbol:", new String(Character.toChars(0x1F6BC))},
{":back:", new String(Character.toChars(0x1F519))},
{":baggage_claim:", new String(Character.toChars(0x1F6C4))},
{":balloon:", new String(Character.toChars(0x1F388))},
{":bamboo:", new String(Character.toChars(0x1F38D))},
{":banana:", new String(Character.toChars(0x1F34C))},
{":bank:", new String(Character.toChars(0x1F3E6))},
{":bar_chart:", new String(Character.toChars(0x1F4CA))},
{":barber:", new String(Character.toChars(0x1F488))},
{":basketball:", new String(Character.toChars(0x1F3C0))},
{":bath:", new String(Character.toChars(0x1F6C0))},
{":bathtub:", new String(Character.toChars(0x1F6C1))},
{":battery:", new String(Character.toChars(0x1F50B))},
{":bear:", new String(Character.toChars(0x1F43B))},
{":bee:", new String(Character.toChars(0x1F41D))},
{":beer:", new String(Character.toChars(0x1F37A))},
{":beers:", new String(Character.toChars(0x1F37B))},
{":beetle:", new String(Character.toChars(0x1F41E))},
{":beginner:", new String(Character.toChars(0x1F530))},
{":bell:", new String(Character.toChars(0x1F514))},
{":bento:", new String(Character.toChars(0x1F371))},
{":bicyclist:", new String(Character.toChars(0x1F6B4))},
{":bike:", new String(Character.toChars(0x1F6B2))},
{":bikini:", new String(Character.toChars(0x1F459))},
{":bird:", new String(Character.toChars(0x1F426))},
{":birthday:", new String(Character.toChars(0x1F382))},
{":black_joker:", new String(Character.toChars(0x1F0CF))},
{":black_square_button:", new String(Character.toChars(0x1F532))},
{":blossom:", new String(Character.toChars(0x1F33C))},
{":blowfish:", new String(Character.toChars(0x1F421))},
{":blue_book:", new String(Character.toChars(0x1F4D8))},
{":blue_car:", new String(Character.toChars(0x1F699))},
{":blue_heart:", new String(Character.toChars(0x1F499))},
{":blush:", new String(Character.toChars(0x1F60A))},
{":boar:", new String(Character.toChars(0x1F417))},
{":bomb:", new String(Character.toChars(0x1F4A3))},
{":book:", new String(Character.toChars(0x1F4D6))},
{":bookmark:", new String(Character.toChars(0x1F516))},
{":bookmark_tabs:", new String(Character.toChars(0x1F4D1))},
{":books:", new String(Character.toChars(0x1F4DA))},
{":boom:", new String(Character.toChars(0x1F4A5))},
{":boot:", new String(Character.toChars(0x1F462))},
{":bouquet:", new String(Character.toChars(0x1F490))},
{":bow:", new String(Character.toChars(0x1F647))},
{":bowling:", new String(Character.toChars(0x1F3B3))},
{":boy:", new String(Character.toChars(0x1F466))},
{":bread:", new String(Character.toChars(0x1F35E))},
{":bride_with_veil:", new String(Character.toChars(0x1F470))},
{":bridge_at_night:", new String(Character.toChars(0x1F309))},
{":briefcase:", new String(Character.toChars(0x1F4BC))},
{":broken_heart:", new String(Character.toChars(0x1F494))},
{":bug:", new String(Character.toChars(0x1F41B))},
{":bulb:", new String(Character.toChars(0x1F4A1))},
{":bullettrain_front:", new String(Character.toChars(0x1F685))},
{":bullettrain_side:", new String(Character.toChars(0x1F684))},
{":bus:", new String(Character.toChars(0x1F68C))},
{":busstop:", new String(Character.toChars(0x1F68F))},
{":bust_in_silhouette:", new String(Character.toChars(0x1F464))},
{":busts_in_silhouette:", new String(Character.toChars(0x1F465))},
{":cactus:", new String(Character.toChars(0x1F335))},
{":cake:", new String(Character.toChars(0x1F370))},
{":calendar:", new String(Character.toChars(0x1F4C6))},
{":calling:", new String(Character.toChars(0x1F4F2))},
{":camel:", new String(Character.toChars(0x1F42B))},
{":camera:", new String(Character.toChars(0x1F4F7))},
{":candy:", new String(Character.toChars(0x1F36C))},
{":capital_abcd:", new String(Character.toChars(0x1F520))},
{":car:", new String(Character.toChars(0x1F697))},
{":card_index:", new String(Character.toChars(0x1F4C7))},
{":carousel_horse:", new String(Character.toChars(0x1F3A0))},
{":cat:", new String(Character.toChars(0x1F431))},
{":cat2:", new String(Character.toChars(0x1F408))},
{":cd:", new String(Character.toChars(0x1F4BF))},
{":chart:", new String(Character.toChars(0x1F4B9))},
{":chart_with_downwards_trend:", new String(Character.toChars(0x1F4C9))},
{":chart_with_upwards_trend:", new String(Character.toChars(0x1F4C8))},
{":checkered_flag:", new String(Character.toChars(0x1F3C1))},
{":cherries:", new String(Character.toChars(0x1F352))},
{":cherry_blossom:", new String(Character.toChars(0x1F338))},
{":chestnut:", new String(Character.toChars(0x1F330))},
{":chicken:", new String(Character.toChars(0x1F414))},
{":children_crossing:", new String(Character.toChars(0x1F6B8))},
{":chocolate_bar:", new String(Character.toChars(0x1F36B))},
{":christmas_tree:", new String(Character.toChars(0x1F384))},
{":cinema:", new String(Character.toChars(0x1F3A6))},
{":circus_tent:", new String(Character.toChars(0x1F3AA))},
{":city_sunrise:", new String(Character.toChars(0x1F307))},
{":city_sunset:", new String(Character.toChars(0x1F306))},
{":cl:", new String(Character.toChars(0x1F191))},
{":clap:", new String(Character.toChars(0x1F44F))},
{":clapper:", new String(Character.toChars(0x1F3AC))},
{":clipboard:", new String(Character.toChars(0x1F4CB))},
{":clock1:", new String(Character.toChars(0x1F550))},
{":clock10:", new String(Character.toChars(0x1F559))},
{":clock1030:", new String(Character.toChars(0x1F565))},
{":clock11:", new String(Character.toChars(0x1F55A))},
{":clock1130:", new String(Character.toChars(0x1F566))},
{":clock12:", new String(Character.toChars(0x1F55B))},
{":clock1230:", new String(Character.toChars(0x1F567))},
{":clock130:", new String(Character.toChars(0x1F55C))},
{":clock2:", new String(Character.toChars(0x1F551))},
{":clock230:", new String(Character.toChars(0x1F55D))},
{":clock3:", new String(Character.toChars(0x1F552))},
{":clock330:", new String(Character.toChars(0x1F55E))},
{":clock4:", new String(Character.toChars(0x1F553))},
{":clock430:", new String(Character.toChars(0x1F55F))},
{":clock5:", new String(Character.toChars(0x1F554))},
{":clock530:", new String(Character.toChars(0x1F560))},
{":clock6:", new String(Character.toChars(0x1F555))},
{":clock630:", new String(Character.toChars(0x1F561))},
{":clock7:", new String(Character.toChars(0x1F556))},
{":clock730:", new String(Character.toChars(0x1F562))},
{":clock8:", new String(Character.toChars(0x1F557))},
{":clock830:", new String(Character.toChars(0x1F563))},
{":clock9:", new String(Character.toChars(0x1F558))},
{":clock930:", new String(Character.toChars(0x1F564))},
{":closed_book:", new String(Character.toChars(0x1F4D5))},
{":closed_lock_with_key:", new String(Character.toChars(0x1F510))},
{":closed_umbrella:", new String(Character.toChars(0x1F302))},
{":cocktail:", new String(Character.toChars(0x1F378))},
{":cold_sweat:", new String(Character.toChars(0x1F630))},
{":collision:", new String(Character.toChars(0x1F4A5))},
{":computer:", new String(Character.toChars(0x1F4BB))},
{":confetti_ball:", new String(Character.toChars(0x1F38A))},
{":confounded:", new String(Character.toChars(0x1F616))},
{":confused:", new String(Character.toChars(0x1F615))},
{":construction:", new String(Character.toChars(0x1F6A7))},
{":construction_worker:", new String(Character.toChars(0x1F477))},
{":convenience_store:", new String(Character.toChars(0x1F3EA))},
{":cookie:", new String(Character.toChars(0x1F36A))},
{":cool:", new String(Character.toChars(0x1F192))},
{":cop:", new String(Character.toChars(0x1F46E))},
{":corn:", new String(Character.toChars(0x1F33D))},
{":couple:", new String(Character.toChars(0x1F46B))},
{":couple_with_heart:", new String(Character.toChars(0x1F491))},
{":couplekiss:", new String(Character.toChars(0x1F48F))},
{":cow:", new String(Character.toChars(0x1F42E))},
{":cow2:", new String(Character.toChars(0x1F404))},
{":credit_card:", new String(Character.toChars(0x1F4B3))},
{":crocodile:", new String(Character.toChars(0x1F40A))},
{":crossed_flags:", new String(Character.toChars(0x1F38C))},
{":crown:", new String(Character.toChars(0x1F451))},
{":cry:", new String(Character.toChars(0x1F622))},
{":crying_cat_face:", new String(Character.toChars(0x1F63F))},
{":crystal_ball:", new String(Character.toChars(0x1F52E))},
{":cupid:", new String(Character.toChars(0x1F498))},
{":currency_exchange:", new String(Character.toChars(0x1F4B1))},
{":curry:", new String(Character.toChars(0x1F35B))},
{":custard:", new String(Character.toChars(0x1F36E))},
{":customs:", new String(Character.toChars(0x1F6C3))},
{":cyclone:", new String(Character.toChars(0x1F300))},
{":dancer:", new String(Character.toChars(0x1F483))},
{":dancers:", new String(Character.toChars(0x1F46F))},
{":dango:", new String(Character.toChars(0x1F361))},
{":dart:", new String(Character.toChars(0x1F3AF))},
{":dash:", new String(Character.toChars(0x1F4A8))},
{":date:", new String(Character.toChars(0x1F4C5))},
{":deciduous_tree:", new String(Character.toChars(0x1F333))},
{":department_store:", new String(Character.toChars(0x1F3EC))},
{":diamond_shape_with_a_dot_inside:", new String(Character.toChars(0x1F4A0))},
{":disappointed:", new String(Character.toChars(0x1F61E))},
{":disappointed_relieved:", new String(Character.toChars(0x1F625))},
{":dizzy:", new String(Character.toChars(0x1F4AB))},
{":dizzy_face:", new String(Character.toChars(0x1F635))},
{":do_not_litter:", new String(Character.toChars(0x1F6AF))},
{":dog:", new String(Character.toChars(0x1F436))},
{":dog2:", new String(Character.toChars(0x1F415))},
{":dollar:", new String(Character.toChars(0x1F4B5))},
{":dolls:", new String(Character.toChars(0x1F38E))},
{":dolphin:", new String(Character.toChars(0x1F42C))},
{":door:", new String(Character.toChars(0x1F6AA))},
{":doughnut:", new String(Character.toChars(0x1F369))},
{":dragon:", new String(Character.toChars(0x1F409))},
{":dragon_face:", new String(Character.toChars(0x1F432))},
{":dress:", new String(Character.toChars(0x1F457))},
{":dromedary_camel:", new String(Character.toChars(0x1F42A))},
{":droplet:", new String(Character.toChars(0x1F4A7))},
{":dvd:", new String(Character.toChars(0x1F4C0))},
{":e-mail:", new String(Character.toChars(0x1F4E7))},
{":ear:", new String(Character.toChars(0x1F442))},
{":ear_of_rice:", new String(Character.toChars(0x1F33E))},
{":earth_africa:", new String(Character.toChars(0x1F30D))},
{":earth_americas:", new String(Character.toChars(0x1F30E))},
{":earth_asia:", new String(Character.toChars(0x1F30F))},
{":egg:", new String(Character.toChars(0x1F373))},
{":eggplant:", new String(Character.toChars(0x1F346))},
{":electric_plug:", new String(Character.toChars(0x1F50C))},
{":elephant:", new String(Character.toChars(0x1F418))},
{":end:", new String(Character.toChars(0x1F51A))},
{":envelope_with_arrow:", new String(Character.toChars(0x1F4E9))},
{":euro:", new String(Character.toChars(0x1F4B6))},
{":european_castle:", new String(Character.toChars(0x1F3F0))},
{":european_post_office:", new String(Character.toChars(0x1F3E4))},
{":evergreen_tree:", new String(Character.toChars(0x1F332))},
{":expressionless:", new String(Character.toChars(0x1F611))},
{":eyeglasses:", new String(Character.toChars(0x1F453))},
{":eyes:", new String(Character.toChars(0x1F440))},
{":facepunch:", new String(Character.toChars(0x1F44A))},
{":factory:", new String(Character.toChars(0x1F3ED))},
{":fallen_leaf:", new String(Character.toChars(0x1F342))},
{":family:", new String(Character.toChars(0x1F46A))},
{":fax:", new String(Character.toChars(0x1F4E0))},
{":fearful:", new String(Character.toChars(0x1F628))},
{":feet:", new String(Character.toChars(0x1F43E))},
{":ferris_wheel:", new String(Character.toChars(0x1F3A1))},
{":file_folder:", new String(Character.toChars(0x1F4C1))},
{":fire:", new String(Character.toChars(0x1F525))},
{":fire_engine:", new String(Character.toChars(0x1F692))},
{":fireworks:", new String(Character.toChars(0x1F386))},
{":first_quarter_moon:", new String(Character.toChars(0x1F313))},
{":first_quarter_moon_with_face:", new String(Character.toChars(0x1F31B))},
{":fish:", new String(Character.toChars(0x1F41F))},
{":fish_cake:", new String(Character.toChars(0x1F365))},
{":fishing_pole_and_fish:", new String(Character.toChars(0x1F3A3))},
{":flags:", new String(Character.toChars(0x1F38F))},
{":flashlight:", new String(Character.toChars(0x1F526))},
{":floppy_disk:", new String(Character.toChars(0x1F4BE))},
{":flower_playing_cards:", new String(Character.toChars(0x1F3B4))},
{":flushed:", new String(Character.toChars(0x1F633))},
{":foggy:", new String(Character.toChars(0x1F301))},
{":football:", new String(Character.toChars(0x1F3C8))},
{":footprints:", new String(Character.toChars(0x1F463))},
{":fork_and_knife:", new String(Character.toChars(0x1F374))},
{":four_leaf_clover:", new String(Character.toChars(0x1F340))},
{":free:", new String(Character.toChars(0x1F193))},
{":fried_shrimp:", new String(Character.toChars(0x1F364))},
{":fries:", new String(Character.toChars(0x1F35F))},
{":frog:", new String(Character.toChars(0x1F438))},
{":frowning:", new String(Character.toChars(0x1F626))},
{":full_moon:", new String(Character.toChars(0x1F315))},
{":full_moon_with_face:", new String(Character.toChars(0x1F31D))},
{":game_die:", new String(Character.toChars(0x1F3B2))},
{":gem:", new String(Character.toChars(0x1F48E))},
{":ghost:", new String(Character.toChars(0x1F47B))},
{":gift:", new String(Character.toChars(0x1F381))},
{":gift_heart:", new String(Character.toChars(0x1F49D))},
{":girl:", new String(Character.toChars(0x1F467))},
{":globe_with_meridians:", new String(Character.toChars(0x1F310))},
{":goat:", new String(Character.toChars(0x1F410))},
{":grapes:", new String(Character.toChars(0x1F347))},
{":green_apple:", new String(Character.toChars(0x1F34F))},
{":green_book:", new String(Character.toChars(0x1F4D7))},
{":green_heart:", new String(Character.toChars(0x1F49A))},
{":grimacing:", new String(Character.toChars(0x1F62C))},
{":grin:", new String(Character.toChars(0x1F601))},
{":grinning:", new String(Character.toChars(0x1F600))},
{":guardsman:", new String(Character.toChars(0x1F482))},
{":guitar:", new String(Character.toChars(0x1F3B8))},
{":gun:", new String(Character.toChars(0x1F52B))},
{":haircut:", new String(Character.toChars(0x1F487))},
{":hamburger:", new String(Character.toChars(0x1F354))},
{":hammer:", new String(Character.toChars(0x1F528))},
{":hamster:", new String(Character.toChars(0x1F439))},
{":handbag:", new String(Character.toChars(0x1F45C))},
{":hankey:", new String(Character.toChars(0x1F4A9))},
{":hatched_chick:", new String(Character.toChars(0x1F425))},
{":hatching_chick:", new String(Character.toChars(0x1F423))},
{":headphones:", new String(Character.toChars(0x1F3A7))},
{":hear_no_evil:", new String(Character.toChars(0x1F649))},
{":heart_decoration:", new String(Character.toChars(0x1F49F))},
{":heart_eyes:", new String(Character.toChars(0x1F60D))},
{":heart_eyes_cat:", new String(Character.toChars(0x1F63B))},
{":heartbeat:", new String(Character.toChars(0x1F493))},
{":heartpulse:", new String(Character.toChars(0x1F497))},
{":heavy_dollar_sign:", new String(Character.toChars(0x1F4B2))},
{":helicopter:", new String(Character.toChars(0x1F681))},
{":herb:", new String(Character.toChars(0x1F33F))},
{":hibiscus:", new String(Character.toChars(0x1F33A))},
{":high_brightness:", new String(Character.toChars(0x1F506))},
{":high_heel:", new String(Character.toChars(0x1F460))},
{":hocho:", new String(Character.toChars(0x1F52A))},
{":honey_pot:", new String(Character.toChars(0x1F36F))},
{":honeybee:", new String(Character.toChars(0x1F41D))},
{":horse:", new String(Character.toChars(0x1F434))},
{":horse_racing:", new String(Character.toChars(0x1F3C7))},
{":hospital:", new String(Character.toChars(0x1F3E5))},
{":hotel:", new String(Character.toChars(0x1F3E8))},
{":house:", new String(Character.toChars(0x1F3E0))},
{":house_with_garden:", new String(Character.toChars(0x1F3E1))},
{":hushed:", new String(Character.toChars(0x1F62F))},
{":ice_cream:", new String(Character.toChars(0x1F368))},
{":icecream:", new String(Character.toChars(0x1F366))},
{":id:", new String(Character.toChars(0x1F194))},
{":ideograph_advantage:", new String(Character.toChars(0x1F250))},
{":imp:", new String(Character.toChars(0x1F47F))},
{":inbox_tray:", new String(Character.toChars(0x1F4E5))},
{":incoming_envelope:", new String(Character.toChars(0x1F4E8))},
{":information_desk_person:", new String(Character.toChars(0x1F481))},
{":innocent:", new String(Character.toChars(0x1F607))},
{":iphone:", new String(Character.toChars(0x1F4F1))},
{":izakaya_lantern:", new String(Character.toChars(0x1F3EE))},
{":jack_o_lantern:", new String(Character.toChars(0x1F383))},
{":japan:", new String(Character.toChars(0x1F5FE))},
{":japanese_castle:", new String(Character.toChars(0x1F3EF))},
{":japanese_goblin:", new String(Character.toChars(0x1F47A))},
{":japanese_ogre:", new String(Character.toChars(0x1F479))},
{":jeans:", new String(Character.toChars(0x1F456))},
{":joy:", new String(Character.toChars(0x1F602))},
{":joy_cat:", new String(Character.toChars(0x1F639))},
{":key:", new String(Character.toChars(0x1F511))},
{":keycap_ten:", new String(Character.toChars(0x1F51F))},
{":kimono:", new String(Character.toChars(0x1F458))},
{":kiss:", new String(Character.toChars(0x1F48B))},
{":kissing:", new String(Character.toChars(0x1F617))},
{":kissing_cat:", new String(Character.toChars(0x1F63D))},
{":kissing_closed_eyes:", new String(Character.toChars(0x1F61A))},
{":kissing_heart:", new String(Character.toChars(0x1F618))},
{":kissing_smiling_eyes:", new String(Character.toChars(0x1F619))},
{":koala:", new String(Character.toChars(0x1F428))},
{":koko:", new String(Character.toChars(0x1F201))},
{":large_blue_circle:", new String(Character.toChars(0x1F535))},
{":large_blue_diamond:", new String(Character.toChars(0x1F537))},
{":large_orange_diamond:", new String(Character.toChars(0x1F536))},
{":last_quarter_moon:", new String(Character.toChars(0x1F317))},
{":last_quarter_moon_with_face:", new String(Character.toChars(0x1F31C))},
{":laughing:", new String(Character.toChars(0x1F606))},
{":leaves:", new String(Character.toChars(0x1F343))},
{":ledger:", new String(Character.toChars(0x1F4D2))},
{":left_luggage:", new String(Character.toChars(0x1F6C5))},
{":lemon:", new String(Character.toChars(0x1F34B))},
{":leopard:", new String(Character.toChars(0x1F406))},
{":light_rail:", new String(Character.toChars(0x1F688))},
{":link:", new String(Character.toChars(0x1F517))},
{":lips:", new String(Character.toChars(0x1F444))},
{":lipstick:", new String(Character.toChars(0x1F484))},
{":lock:", new String(Character.toChars(0x1F512))},
{":lock_with_ink_pen:", new String(Character.toChars(0x1F50F))},
{":lollipop:", new String(Character.toChars(0x1F36D))},
{":loudspeaker:", new String(Character.toChars(0x1F4E2))},
{":love_hotel:", new String(Character.toChars(0x1F3E9))},
{":love_letter:", new String(Character.toChars(0x1F48C))},
{":low_brightness:", new String(Character.toChars(0x1F505))},
{":mag:", new String(Character.toChars(0x1F50D))},
{":mag_right:", new String(Character.toChars(0x1F50E))},
{":mahjong:", new String(Character.toChars(0x1F004))},
{":mailbox:", new String(Character.toChars(0x1F4EB))},
{":mailbox_closed:", new String(Character.toChars(0x1F4EA))},
{":mailbox_with_mail:", new String(Character.toChars(0x1F4EC))},
{":mailbox_with_no_mail:", new String(Character.toChars(0x1F4ED))},
{":man:", new String(Character.toChars(0x1F468))},
{":man_with_gua_pi_mao:", new String(Character.toChars(0x1F472))},
{":man_with_turban:", new String(Character.toChars(0x1F473))},
{":mans_shoe:", new String(Character.toChars(0x1F45E))},
{":maple_leaf:", new String(Character.toChars(0x1F341))},
{":mask:", new String(Character.toChars(0x1F637))},
{":massage:", new String(Character.toChars(0x1F486))},
{":meat_on_bone:", new String(Character.toChars(0x1F356))},
{":mega:", new String(Character.toChars(0x1F4E3))},
{":melon:", new String(Character.toChars(0x1F348))},
{":memo:", new String(Character.toChars(0x1F4DD))},
{":mens:", new String(Character.toChars(0x1F6B9))},
{":metro:", new String(Character.toChars(0x1F687))},
{":microphone:", new String(Character.toChars(0x1F3A4))},
{":microscope:", new String(Character.toChars(0x1F52C))},
{":milky_way:", new String(Character.toChars(0x1F30C))},
{":minibus:", new String(Character.toChars(0x1F690))},
{":minidisc:", new String(Character.toChars(0x1F4BD))},
{":mobile_phone_off:", new String(Character.toChars(0x1F4F4))},
{":money_with_wings:", new String(Character.toChars(0x1F4B8))},
{":moneybag:", new String(Character.toChars(0x1F4B0))},
{":monkey:", new String(Character.toChars(0x1F412))},
{":monkey_face:", new String(Character.toChars(0x1F435))},
{":monorail:", new String(Character.toChars(0x1F69D))},
{":moon:", new String(Character.toChars(0x1F319))},
{":mortar_board:", new String(Character.toChars(0x1F393))},
{":mount_fuji:", new String(Character.toChars(0x1F5FB))},
{":mountain_bicyclist:", new String(Character.toChars(0x1F6B5))},
{":mountain_cableway:", new String(Character.toChars(0x1F6A0))},
{":mountain_railway:", new String(Character.toChars(0x1F69E))},
{":mouse:", new String(Character.toChars(0x1F42D))},
{":mouse2:", new String(Character.toChars(0x1F401))},
{":movie_camera:", new String(Character.toChars(0x1F3A5))},
{":moyai:", new String(Character.toChars(0x1F5FF))},
{":muscle:", new String(Character.toChars(0x1F4AA))},
{":mushroom:", new String(Character.toChars(0x1F344))},
{":musical_keyboard:", new String(Character.toChars(0x1F3B9))},
{":musical_note:", new String(Character.toChars(0x1F3B5))},
{":musical_score:", new String(Character.toChars(0x1F3BC))},
{":mute:", new String(Character.toChars(0x1F507))},
{":nail_care:", new String(Character.toChars(0x1F485))},
{":name_badge:", new String(Character.toChars(0x1F4DB))},
{":necktie:", new String(Character.toChars(0x1F454))},
{":neutral_face:", new String(Character.toChars(0x1F610))},
{":new:", new String(Character.toChars(0x1F195))},
{":new_moon:", new String(Character.toChars(0x1F311))},
{":new_moon_with_face:", new String(Character.toChars(0x1F31A))},
{":newspaper:", new String(Character.toChars(0x1F4F0))},
{":ng:", new String(Character.toChars(0x1F196))},
{":no_bell:", new String(Character.toChars(0x1F515))},
{":no_bicycles:", new String(Character.toChars(0x1F6B3))},
{":no_entry_sign:", new String(Character.toChars(0x1F6AB))},
{":no_good:", new String(Character.toChars(0x1F645))},
{":no_mobile_phones:", new String(Character.toChars(0x1F4F5))},
{":no_mouth:", new String(Character.toChars(0x1F636))},
{":no_pedestrians:", new String(Character.toChars(0x1F6B7))},
{":no_smoking:", new String(Character.toChars(0x1F6AD))},
{":non-potable_water:", new String(Character.toChars(0x1F6B1))},
{":nose:", new String(Character.toChars(0x1F443))},
{":notebook:", new String(Character.toChars(0x1F4D3))},
{":notebook_with_decorative_cover:", new String(Character.toChars(0x1F4D4))},
{":notes:", new String(Character.toChars(0x1F3B6))},
{":nut_and_bolt:", new String(Character.toChars(0x1F529))},
{":o2:", new String(Character.toChars(0x1F17E))},
{":ocean:", new String(Character.toChars(0x1F30A))},
{":octopus:", new String(Character.toChars(0x1F419))},
{":oden:", new String(Character.toChars(0x1F362))},
{":office:", new String(Character.toChars(0x1F3E2))},
{":ok:", new String(Character.toChars(0x1F197))},
{":ok_hand:", new String(Character.toChars(0x1F44C))},
{":ok_woman:", new String(Character.toChars(0x1F646))},
{":older_man:", new String(Character.toChars(0x1F474))},
{":older_woman:", new String(Character.toChars(0x1F475))},
{":on:", new String(Character.toChars(0x1F51B))},
{":oncoming_automobile:", new String(Character.toChars(0x1F698))},
{":oncoming_bus:", new String(Character.toChars(0x1F68D))},
{":oncoming_police_car:", new String(Character.toChars(0x1F694))},
{":oncoming_taxi:", new String(Character.toChars(0x1F696))},
{":open_book:", new String(Character.toChars(0x1F4D6))},
{":open_file_folder:", new String(Character.toChars(0x1F4C2))},
{":open_hands:", new String(Character.toChars(0x1F450))},
{":open_mouth:", new String(Character.toChars(0x1F62E))},
{":orange_book:", new String(Character.toChars(0x1F4D9))},
{":outbox_tray:", new String(Character.toChars(0x1F4E4))},
{":ox:", new String(Character.toChars(0x1F402))},
{":package:", new String(Character.toChars(0x1F4E6))},
{":page_facing_up:", new String(Character.toChars(0x1F4C4))},
{":page_with_curl:", new String(Character.toChars(0x1F4C3))},
{":pager:", new String(Character.toChars(0x1F4DF))},
{":palm_tree:", new String(Character.toChars(0x1F334))},
{":panda_face:", new String(Character.toChars(0x1F43C))},
{":paperclip:", new String(Character.toChars(0x1F4CE))},
{":parking:", new String(Character.toChars(0x1F17F))},
{":passport_control:", new String(Character.toChars(0x1F6C2))},
{":paw_prints:", new String(Character.toChars(0x1F43E))},
{":peach:", new String(Character.toChars(0x1F351))},
{":pear:", new String(Character.toChars(0x1F350))},
{":pencil:", new String(Character.toChars(0x1F4DD))},
{":penguin:", new String(Character.toChars(0x1F427))},
{":pensive:", new String(Character.toChars(0x1F614))},
{":performing_arts:", new String(Character.toChars(0x1F3AD))},
{":persevere:", new String(Character.toChars(0x1F623))},
{":person_frowning:", new String(Character.toChars(0x1F64D))},
{":person_with_blond_hair:", new String(Character.toChars(0x1F471))},
{":person_with_pouting_face:", new String(Character.toChars(0x1F64E))},
{":pig:", new String(Character.toChars(0x1F437))},
{":pig2:", new String(Character.toChars(0x1F416))},
{":pig_nose:", new String(Character.toChars(0x1F43D))},
{":pill:", new String(Character.toChars(0x1F48A))},
{":pineapple:", new String(Character.toChars(0x1F34D))},
{":pizza:", new String(Character.toChars(0x1F355))},
{":point_down:", new String(Character.toChars(0x1F447))},
{":point_left:", new String(Character.toChars(0x1F448))},
{":point_right:", new String(Character.toChars(0x1F449))},
{":point_up_2:", new String(Character.toChars(0x1F446))},
{":police_car:", new String(Character.toChars(0x1F693))},
{":poodle:", new String(Character.toChars(0x1F429))},
{":poop:", new String(Character.toChars(0x1F4A9))},
{":post_office:", new String(Character.toChars(0x1F3E3))},
{":postal_horn:", new String(Character.toChars(0x1F4EF))},
{":postbox:", new String(Character.toChars(0x1F4EE))},
{":potable_water:", new String(Character.toChars(0x1F6B0))},
{":pouch:", new String(Character.toChars(0x1F45D))},
{":poultry_leg:", new String(Character.toChars(0x1F357))},
{":pound:", new String(Character.toChars(0x1F4B7))},
{":pouting_cat:", new String(Character.toChars(0x1F63E))},
{":pray:", new String(Character.toChars(0x1F64F))},
{":princess:", new String(Character.toChars(0x1F478))},
{":punch:", new String(Character.toChars(0x1F44A))},
{":purple_heart:", new String(Character.toChars(0x1F49C))},
{":purse:", new String(Character.toChars(0x1F45B))},
{":pushpin:", new String(Character.toChars(0x1F4CC))},
{":put_litter_in_its_place:", new String(Character.toChars(0x1F6AE))},
{":rabbit:", new String(Character.toChars(0x1F430))},
{":rabbit2:", new String(Character.toChars(0x1F407))},
{":racehorse:", new String(Character.toChars(0x1F40E))},
{":radio:", new String(Character.toChars(0x1F4FB))},
{":radio_button:", new String(Character.toChars(0x1F518))},
{":rage:", new String(Character.toChars(0x1F621))},
{":railway_car:", new String(Character.toChars(0x1F683))},
{":rainbow:", new String(Character.toChars(0x1F308))},
{":raised_hands:", new String(Character.toChars(0x1F64C))},
{":raising_hand:", new String(Character.toChars(0x1F64B))},
{":ram:", new String(Character.toChars(0x1F40F))},
{":ramen:", new String(Character.toChars(0x1F35C))},
{":rat:", new String(Character.toChars(0x1F400))},
{":red_car:", new String(Character.toChars(0x1F697))},
{":red_circle:", new String(Character.toChars(0x1F534))},
{":relieved:", new String(Character.toChars(0x1F60C))},
{":repeat:", new String(Character.toChars(0x1F501))},
{":repeat_one:", new String(Character.toChars(0x1F502))},
{":restroom:", new String(Character.toChars(0x1F6BB))},
{":revolving_hearts:", new String(Character.toChars(0x1F49E))},
{":ribbon:", new String(Character.toChars(0x1F380))},
{":rice:", new String(Character.toChars(0x1F35A))},
{":rice_ball:", new String(Character.toChars(0x1F359))},
{":rice_cracker:", new String(Character.toChars(0x1F358))},
{":rice_scene:", new String(Character.toChars(0x1F391))},
{":ring:", new String(Character.toChars(0x1F48D))},
{":rocket:", new String(Character.toChars(0x1F680))},
{":roller_coaster:", new String(Character.toChars(0x1F3A2))},
{":rooster:", new String(Character.toChars(0x1F413))},
{":rose:", new String(Character.toChars(0x1F339))},
{":rotating_light:", new String(Character.toChars(0x1F6A8))},
{":round_pushpin:", new String(Character.toChars(0x1F4CD))},
{":rowboat:", new String(Character.toChars(0x1F6A3))},
{":rugby_football:", new String(Character.toChars(0x1F3C9))},
{":runner:", new String(Character.toChars(0x1F3C3))},
{":running:", new String(Character.toChars(0x1F3C3))},
{":running_shirt_with_sash:", new String(Character.toChars(0x1F3BD))},
{":sa:", new String(Character.toChars(0x1F202))},
{":sake:", new String(Character.toChars(0x1F376))},
{":sandal:", new String(Character.toChars(0x1F461))},
{":santa:", new String(Character.toChars(0x1F385))},
{":satellite:", new String(Character.toChars(0x1F4E1))},
{":satisfied:", new String(Character.toChars(0x1F606))},
{":saxophone:", new String(Character.toChars(0x1F3B7))},
{":school:", new String(Character.toChars(0x1F3EB))},
{":school_satchel:", new String(Character.toChars(0x1F392))},
{":scream:", new String(Character.toChars(0x1F631))},
{":scream_cat:", new String(Character.toChars(0x1F640))},
{":scroll:", new String(Character.toChars(0x1F4DC))},
{":seat:", new String(Character.toChars(0x1F4BA))},
{":see_no_evil:", new String(Character.toChars(0x1F648))},
{":seedling:", new String(Character.toChars(0x1F331))},
{":shaved_ice:", new String(Character.toChars(0x1F367))},
{":sheep:", new String(Character.toChars(0x1F411))},
{":shell:", new String(Character.toChars(0x1F41A))},
{":ship:", new String(Character.toChars(0x1F6A2))},
{":shirt:", new String(Character.toChars(0x1F455))},
{":shit:", new String(Character.toChars(0x1F4A9))},
{":shoe:", new String(Character.toChars(0x1F45E))},
{":shower:", new String(Character.toChars(0x1F6BF))},
{":signal_strength:", new String(Character.toChars(0x1F4F6))},
{":six_pointed_star:", new String(Character.toChars(0x1F52F))},
{":ski:", new String(Character.toChars(0x1F3BF))},
{":skull:", new String(Character.toChars(0x1F480))},
{":sleeping:", new String(Character.toChars(0x1F634))},
{":sleepy:", new String(Character.toChars(0x1F62A))},
{":slot_machine:", new String(Character.toChars(0x1F3B0))},
{":small_blue_diamond:", new String(Character.toChars(0x1F539))},
{":small_orange_diamond:", new String(Character.toChars(0x1F538))},
{":small_red_triangle:", new String(Character.toChars(0x1F53A))},
{":small_red_triangle_down:", new String(Character.toChars(0x1F53B))},
{":smile:", new String(Character.toChars(0x1F604))},
{":smile_cat:", new String(Character.toChars(0x1F638))},
{":smiley:", new String(Character.toChars(0x1F603))},
{":smiley_cat:", new String(Character.toChars(0x1F63A))},
{":smiling_imp:", new String(Character.toChars(0x1F608))},
{":smirk:", new String(Character.toChars(0x1F60F))},
{":smirk_cat:", new String(Character.toChars(0x1F63C))},
{":smoking:", new String(Character.toChars(0x1F6AC))},
{":snail:", new String(Character.toChars(0x1F40C))},
{":snake:", new String(Character.toChars(0x1F40D))},
{":snowboarder:", new String(Character.toChars(0x1F3C2))},
{":sob:", new String(Character.toChars(0x1F62D))},
{":soon:", new String(Character.toChars(0x1F51C))},
{":sos:", new String(Character.toChars(0x1F198))},
{":sound:", new String(Character.toChars(0x1F509))},
{":space_invader:", new String(Character.toChars(0x1F47E))},
{":spaghetti:", new String(Character.toChars(0x1F35D))},
{":sparkler:", new String(Character.toChars(0x1F387))},
{":sparkling_heart:", new String(Character.toChars(0x1F496))},
{":speak_no_evil:", new String(Character.toChars(0x1F64A))},
{":speaker:", new String(Character.toChars(0x1F50A))},
{":speech_balloon:", new String(Character.toChars(0x1F4AC))},
{":speedboat:", new String(Character.toChars(0x1F6A4))},
{":star2:", new String(Character.toChars(0x1F31F))},
{":stars:", new String(Character.toChars(0x1F303))},
{":station:", new String(Character.toChars(0x1F689))},
{":statue_of_liberty:", new String(Character.toChars(0x1F5FD))},
{":steam_locomotive:", new String(Character.toChars(0x1F682))},
{":stew:", new String(Character.toChars(0x1F372))},
{":straight_ruler:", new String(Character.toChars(0x1F4CF))},
{":strawberry:", new String(Character.toChars(0x1F353))},
{":stuck_out_tongue:", new String(Character.toChars(0x1F61B))},
{":stuck_out_tongue_closed_eyes:", new String(Character.toChars(0x1F61D))},
{":stuck_out_tongue_winking_eye:", new String(Character.toChars(0x1F61C))},
{":sun_with_face:", new String(Character.toChars(0x1F31E))},
{":sunflower:", new String(Character.toChars(0x1F33B))},
{":sunglasses:", new String(Character.toChars(0x1F60E))},
{":sunrise:", new String(Character.toChars(0x1F305))},
{":sunrise_over_mountains:", new String(Character.toChars(0x1F304))},
{":surfer:", new String(Character.toChars(0x1F3C4))},
{":sushi:", new String(Character.toChars(0x1F363))},
{":suspension_railway:", new String(Character.toChars(0x1F69F))},
{":sweat:", new String(Character.toChars(0x1F613))},
{":sweat_drops:", new String(Character.toChars(0x1F4A6))},
{":sweat_smile:", new String(Character.toChars(0x1F605))},
{":sweet_potato:", new String(Character.toChars(0x1F360))},
{":swimmer:", new String(Character.toChars(0x1F3CA))},
{":symbols:", new String(Character.toChars(0x1F523))},
{":syringe:", new String(Character.toChars(0x1F489))},
{":tada:", new String(Character.toChars(0x1F389))},
{":tanabata_tree:", new String(Character.toChars(0x1F38B))},
{":tangerine:", new String(Character.toChars(0x1F34A))},
{":taxi:", new String(Character.toChars(0x1F695))},
{":tea:", new String(Character.toChars(0x1F375))},
{":telephone_receiver:", new String(Character.toChars(0x1F4DE))},
{":telescope:", new String(Character.toChars(0x1F52D))},
{":tennis:", new String(Character.toChars(0x1F3BE))},
{":thought_balloon:", new String(Character.toChars(0x1F4AD))},
{":thumbsdown:", new String(Character.toChars(0x1F44E))},
{":thumbsup:", new String(Character.toChars(0x1F44D))},
{":ticket:", new String(Character.toChars(0x1F3AB))},
{":tiger:", new String(Character.toChars(0x1F42F))},
{":tiger2:", new String(Character.toChars(0x1F405))},
{":tired_face:", new String(Character.toChars(0x1F62B))},
{":toilet:", new String(Character.toChars(0x1F6BD))},
{":tokyo_tower:", new String(Character.toChars(0x1F5FC))},
{":tomato:", new String(Character.toChars(0x1F345))},
{":tongue:", new String(Character.toChars(0x1F445))},
{":top:", new String(Character.toChars(0x1F51D))},
{":tophat:", new String(Character.toChars(0x1F3A9))},
{":tractor:", new String(Character.toChars(0x1F69C))},
{":traffic_light:", new String(Character.toChars(0x1F6A5))},
{":train:", new String(Character.toChars(0x1F683))},
{":train2:", new String(Character.toChars(0x1F686))},
{":tram:", new String(Character.toChars(0x1F68A))},
{":triangular_flag_on_post:", new String(Character.toChars(0x1F6A9))},
{":triangular_ruler:", new String(Character.toChars(0x1F4D0))},
{":trident:", new String(Character.toChars(0x1F531))},
{":triumph:", new String(Character.toChars(0x1F624))},
{":trolleybus:", new String(Character.toChars(0x1F68E))},
{":trophy:", new String(Character.toChars(0x1F3C6))},
{":tropical_drink:", new String(Character.toChars(0x1F379))},
{":tropical_fish:", new String(Character.toChars(0x1F420))},
{":truck:", new String(Character.toChars(0x1F69A))},
{":trumpet:", new String(Character.toChars(0x1F3BA))},
{":tshirt:", new String(Character.toChars(0x1F455))},
{":tulip:", new String(Character.toChars(0x1F337))},
{":turtle:", new String(Character.toChars(0x1F422))},
{":tv:", new String(Character.toChars(0x1F4FA))},
{":twisted_rightwards_arrows:", new String(Character.toChars(0x1F500))},
{":two_hearts:", new String(Character.toChars(0x1F495))},
{":two_men_holding_hands:", new String(Character.toChars(0x1F46C))},
{":two_women_holding_hands:", new String(Character.toChars(0x1F46D))},
{":u5272:", new String(Character.toChars(0x1F239))},
{":u5408:", new String(Character.toChars(0x1F234))},
{":u55b6:", new String(Character.toChars(0x1F23A))},
{":u6307:", new String(Character.toChars(0x1F22F))},
{":u6708:", new String(Character.toChars(0x1F237))},
{":u6709:", new String(Character.toChars(0x1F236))},
{":u6e80:", new String(Character.toChars(0x1F235))},
{":u7121:", new String(Character.toChars(0x1F21A))},
{":u7533:", new String(Character.toChars(0x1F238))},
{":u7981:", new String(Character.toChars(0x1F232))},
{":u7a7a:", new String(Character.toChars(0x1F233))},
{":unamused:", new String(Character.toChars(0x1F612))},
{":underage:", new String(Character.toChars(0x1F51E))},
{":unlock:", new String(Character.toChars(0x1F513))},
{":up:", new String(Character.toChars(0x1F199))},
{":vertical_traffic_light:", new String(Character.toChars(0x1F6A6))},
{":vhs:", new String(Character.toChars(0x1F4FC))},
{":vibration_mode:", new String(Character.toChars(0x1F4F3))},
{":video_camera:", new String(Character.toChars(0x1F4F9))},
{":video_game:", new String(Character.toChars(0x1F3AE))},
{":violin:", new String(Character.toChars(0x1F3BB))},
{":volcano:", new String(Character.toChars(0x1F30B))},
{":vs:", new String(Character.toChars(0x1F19A))},
{":walking:", new String(Character.toChars(0x1F6B6))},
{":waning_crescent_moon:", new String(Character.toChars(0x1F318))},
{":waning_gibbous_moon:", new String(Character.toChars(0x1F316))},
{":water_buffalo:", new String(Character.toChars(0x1F403))},
{":watermelon:", new String(Character.toChars(0x1F349))},
{":wave:", new String(Character.toChars(0x1F44B))},
{":waxing_crescent_moon:", new String(Character.toChars(0x1F312))},
{":waxing_gibbous_moon:", new String(Character.toChars(0x1F314))},
{":wc:", new String(Character.toChars(0x1F6BE))},
{":weary:", new String(Character.toChars(0x1F629))},
{":wedding:", new String(Character.toChars(0x1F492))},
{":whale:", new String(Character.toChars(0x1F433))},
{":whale2:", new String(Character.toChars(0x1F40B))},
{":white_flower:", new String(Character.toChars(0x1F4AE))},
{":white_square_button:", new String(Character.toChars(0x1F533))},
{":wind_chime:", new String(Character.toChars(0x1F390))},
{":wine_glass:", new String(Character.toChars(0x1F377))},
{":wink:", new String(Character.toChars(0x1F609))},
{":wolf:", new String(Character.toChars(0x1F43A))},
{":woman:", new String(Character.toChars(0x1F469))},
{":womans_clothes:", new String(Character.toChars(0x1F45A))},
{":womans_hat:", new String(Character.toChars(0x1F452))},
{":womens:", new String(Character.toChars(0x1F6BA))},
{":worried:", new String(Character.toChars(0x1F61F))},
{":wrench:", new String(Character.toChars(0x1F527))},
{":yellow_heart:", new String(Character.toChars(0x1F49B))},
{":yen:", new String(Character.toChars(0x1F4B4))},
{":yum:", new String(Character.toChars(0x1F60B))},
{":zzz:", new String(Character.toChars(0x1F4A4))}};
static {
for (String[] s : UNICODE_MAPPING) {
CHEAT_SHEET_TO_UNICODE.put(s[0], s[1]);
UNICODE_TO_CHEAT_SHEET.put(s[1], s[0]);
}
}
}
| |
/*
* Copyright (C) 2017-2022 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.objects;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.junit.Ignore;
import org.junit.Test;
import it.unimi.dsi.fastutil.BigArrays;
import it.unimi.dsi.fastutil.MainRunner;
import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntLists;
@SuppressWarnings("rawtypes")
public class ObjectBigArrayBigListTest {
@Test
public void testRemoveAllModifiesCollection() {
final ObjectBigList<Integer> list = new ObjectBigArrayBigList<>();
assertFalse(list.removeAll(Collections.emptySet()));
assertEquals(ObjectBigLists.EMPTY_BIG_LIST, list);
}
@SuppressWarnings("boxing")
@Test
public void testRemoveAllSkipSegment() {
final ObjectBigList<Integer> list = new ObjectBigArrayBigList<>();
for(long i = 0; i < BigArrays.SEGMENT_SIZE + 10; i++) list.add(Integer.valueOf((int)(i % 2)));
assertTrue(list.removeAll(ObjectSets.singleton(1)));
assertEquals((BigArrays.SEGMENT_SIZE + 1) / 2 + 5, list.size64());
for (long i = 0; i < (BigArrays.SEGMENT_SIZE + 1) / 2 + 5; i++) assertEquals(Integer.valueOf(0), list.get(i));
}
@SuppressWarnings("boxing")
@Test
public void testRemoveAll() {
final ObjectBigArrayBigList<Integer> l = new ObjectBigArrayBigList<>(ObjectBigArrayBigList.of(0, 1, 1, 2));
l.removeAll(ObjectSets.singleton(1));
assertEquals(ObjectBigArrayBigList.of(0, 2), l);
final Object[][] elements = l.elements();
assertNull(BigArrays.get(elements, 2));
assertNull(BigArrays.get(elements, 3));
}
@SuppressWarnings("boxing")
@Test
public void testAddAllCollection() {
final ObjectBigArrayBigList<Integer> l = new ObjectBigArrayBigList<>(ObjectBigArrayBigList.of(0, 1, 1, 2));
final List<Integer> m = Arrays.asList(new Integer[] { 2, 3, 3, 4 });
l.addAll(0, m);
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
l.addAll(0, IntList.of());
l.addAll(2, IntList.of());
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
l.addAll(0, (Collection<Integer>)ObjectList.of(0));
assertEquals(ObjectBigArrayBigList.of(0, 2, 3, 3, 4, 0, 1, 1, 2), l);
}
@SuppressWarnings("boxing")
@Test
public void testAddAllList() {
final ObjectBigArrayBigList<Integer> l = new ObjectBigArrayBigList<>(ObjectBigArrayBigList.of(0, 1, 1, 2));
final ObjectList<Integer> m = ObjectList.of(2, 3, 3, 4);
l.addAll(0, m);
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
l.addAll(0, IntLists.emptyList());
l.addAll(2, IntLists.emptyList());
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
}
@SuppressWarnings("boxing")
@Test
public void testAddAllBigList() {
final ObjectBigArrayBigList<Integer> l = new ObjectBigArrayBigList<>(ObjectBigArrayBigList.of(0, 1, 1, 2));
final ObjectBigArrayBigList<Integer> m = ObjectBigArrayBigList.of(2, 3, 3, 4);
l.addAll(0, m);
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
l.addAll(0, ObjectBigArrayBigList.of());
l.addAll(2, ObjectBigArrayBigList.of());
assertEquals(ObjectBigArrayBigList.of(2, 3, 3, 4, 0, 1, 1, 2), l);
}
@Test
public void testOf() {
final ObjectBigArrayBigList<String> l = ObjectBigArrayBigList.of("0", "1", "2");
assertEquals(new ObjectBigArrayBigList<>(BigArrays.wrap(new String[] { "0", "1", "2" })), l);
}
@Test
public void testToBigList() {
final ObjectBigArrayBigList<String> baseList = ObjectBigArrayBigList.of("wood", "board", "glass", "metal");
final ObjectBigArrayBigList<String> transformed = baseList.stream().map(s -> "ply" + s).collect(ObjectBigArrayBigList.toBigList());
assertEquals(ObjectBigArrayBigList.of("plywood", "plyboard", "plyglass", "plymetal"), transformed);
}
@Test
public void testSpliteratorTrySplit() {
final ObjectBigArrayBigList<String> baseList = ObjectBigArrayBigList.of("0", "1", "2", "3", "4", "5", "bird");
final ObjectSpliterator<String> willBeSuffix = baseList.spliterator();
assertEquals(baseList.size64(), willBeSuffix.getExactSizeIfKnown());
// Rather non-intuitively for finite sequences (but makes perfect sense for infinite ones),
// the spec demands the original spliterator becomes the suffix and the new Spliterator becomes the prefix.
final ObjectSpliterator<String> prefix = willBeSuffix.trySplit();
// No assurance of where we split, but where ever it is it should be a perfect split into a prefix and suffix.
final java.util.stream.Stream<String> suffixStream = java.util.stream.StreamSupport.stream(willBeSuffix, false);
final java.util.stream.Stream<String> prefixStream = java.util.stream.StreamSupport.stream(prefix, false);
final ObjectBigArrayBigList<String> prefixList = prefixStream.collect(ObjectBigArrayBigList.toBigList());
final ObjectBigArrayBigList<String> suffixList = suffixStream.collect(ObjectBigArrayBigList.toBigList());
assertEquals(baseList.size64(), prefixList.size64() + suffixList.size64());
assertEquals(baseList.subList(0, prefixList.size64()), prefixList);
assertEquals(baseList.subList(prefixList.size64(), baseList.size64()), suffixList);
final ObjectBigArrayBigList<String> recombinedList = new ObjectBigArrayBigList<>(baseList.size64());
recombinedList.addAll(prefixList);
recombinedList.addAll(suffixList);
assertEquals(baseList, recombinedList);
}
private static java.util.Random r = new java.util.Random(0);
private static int genKey() {
return r.nextInt();
}
private static Object[] k, nk;
private static Object kt[];
private static Object nkt[];
@SuppressWarnings({ "unchecked", "boxing" })
protected static void testLists(final ObjectBigList m, final ObjectBigList t, final int n, final int level) {
Exception mThrowsOutOfBounds, tThrowsOutOfBounds;
Object rt = null;
Object rm = (null);
if (level > 4) return;
/* Now we check that both sets agree on random keys. For m we use the polymorphic method. */
for (int i = 0; i < n; i++) {
int p = r.nextInt() % (n * 2);
final Object T = genKey();
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.set(p, T);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.set(p, (T));
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): set() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
if (mThrowsOutOfBounds == null) assertTrue("Error (" + level + "): m and t differ after set() on position " + p + " (" + m.get(p) + ", " + t.get(p) + ")",
t.get(p).equals((m.get(p))));
p = r.nextInt() % (n * 2);
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.get(p);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.get(p);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
if (mThrowsOutOfBounds == null) assertTrue("Error (" + level + "): m and t differ aftre get() on position " + p + " (" + m.get(p) + ", " + t.get(p) + ")",
t.get(p).equals((m.get(p))));
}
/* Now we check that both sets agree on random keys. For m we use the standard method. */
for (int i = 0; i < n; i++) {
final int p = r.nextInt() % (n * 2);
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.get(p);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.get(p);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): get() divergence at start in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
if (mThrowsOutOfBounds == null) assertTrue("Error (" + level + "): m and t differ at start on position " + p + " (" + m.get(p) + ", " + t.get(p) + ")", t.get(p)
.equals(m.get(p)));
}
/* Now we check that m and t are equal. */
if (!m.equals(t) || !t.equals(m)) System.err.println("m: " + m + " t: " + t);
assertTrue("Error (" + level + "): ! m.equals(t) at start", m.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m) at start", t.equals(m));
/* Now we check that m actually holds that data. */
for (final Iterator i = t.iterator(); i.hasNext();) {
assertTrue("Error (" + level + "): m and t differ on an entry after insertion (iterating on t)", m.contains(i.next()));
}
/* Now we check that m actually holds that data, but iterating on m. */
for (final Iterator i = m.listIterator(); i.hasNext();) {
assertTrue("Error (" + level + "): m and t differ on an entry after insertion (iterating on m)", t.contains(i.next()));
}
/*
* Now we check that inquiries about random data give the same answer in m and t. For m we
* use the polymorphic method.
*/
for (int i = 0; i < n; i++) {
final Object T = genKey();
assertTrue("Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains(T) == t.contains((T)));
}
/*
* Again, we check that inquiries about random data give the same answer in m and t, but for
* m we use the standard method.
*/
for (int i = 0; i < n; i++) {
final Object T = genKey();
assertTrue("Error (" + level + "): divergence in content between t and m (polymorphic method)", m.contains((T)) == t.contains((T)));
}
/* Now we add and remove random data in m and t, checking that the result is the same. */
for (int i = 0; i < 2 * n; i++) {
Object T = genKey();
try {
m.add(T);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.add((T));
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
T = genKey();
int p = r.nextInt() % (2 * n + 1);
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.add(p, T);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.add(p, (T));
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): add() divergence in IndexOutOfBoundsException for index " + p + " for " + T + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
p = r.nextInt() % (2 * n + 1);
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
rm = m.remove(p);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
rt = t.remove(p);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): remove() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
if (mThrowsOutOfBounds == null) assertTrue("Error (" + level + "): divergence in remove() between t and m (" + rt + ", " + rm + ")", rt.equals((rm)));
}
assertTrue("Error (" + level + "): ! m.equals(t) after add/remove", m.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m) after add/remove", t.equals(m));
/*
* Now we add random data in m and t using addAll on a collection, checking that the result
* is the same.
*/
for (int i = 0; i < n; i++) {
final int p = r.nextInt() % (2 * n + 1);
final java.util.Collection m1 = new java.util.ArrayList();
final int s = r.nextInt(n / 2 + 1);
for (int j = 0; j < s; j++)
m1.add((genKey()));
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.addAll(p, m1);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.addAll(p, m1);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
assertTrue("Error (" + level + m + t + "): ! m.equals(t) after addAll", m.equals(t));
assertTrue("Error (" + level + m + t + "): ! t.equals(m) after addAll", t.equals(m));
}
if (m.size64() > n) {
m.size(n);
while (t.size64() != n)
t.remove(t.size64() - 1);
}
/*
* Now we add random data in m and t using addAll on a type-specific collection, checking
* that the result is the same.
*/
for (int i = 0; i < n; i++) {
final int p = r.nextInt() % (2 * n + 1);
final ObjectCollection m1 = new ObjectBigArrayBigList();
final java.util.Collection t1 = new java.util.ArrayList();
final int s = r.nextInt(n / 2 + 1);
for (int j = 0; j < s; j++) {
final Object x = genKey();
m1.add(x);
t1.add((x));
}
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.addAll(p, m1);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.addAll(p, t1);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): polymorphic addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds
+ ")", (mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
assertTrue("Error (" + level + m + t + "): ! m.equals(t) after polymorphic addAll", m.equals(t));
assertTrue("Error (" + level + m + t + "): ! t.equals(m) after polymorphic addAll", t.equals(m));
}
if (m.size64() > n) {
m.size(n);
while (t.size64() != n)
t.remove(t.size64() - 1);
}
/*
* Now we add random data in m and t using addAll on a list, checking that the result is the
* same.
*/
for (int i = 0; i < n; i++) {
final int p = r.nextInt() % (2 * n + 1);
final ObjectBigList m1 = new ObjectBigArrayBigList();
final java.util.Collection t1 = new java.util.ArrayList();
final int s = r.nextInt(n / 2 + 1);
for (int j = 0; j < s; j++) {
final Object x = genKey();
m1.add(x);
t1.add((x));
}
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.addAll(p, m1);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.addAll(p, t1);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): list addAll() divergence in IndexOutOfBoundsException for index " + p + " for " + m1 + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
assertTrue("Error (" + level + "): ! m.equals(t) after list addAll", m.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m) after list addAll", t.equals(m));
}
/* Now we check that both sets agree on random keys. For m we use the standard method. */
for (int i = 0; i < n; i++) {
final int p = r.nextInt() % (n * 2);
mThrowsOutOfBounds = tThrowsOutOfBounds = null;
try {
m.get(p);
}
catch (final IndexOutOfBoundsException e) {
mThrowsOutOfBounds = e;
}
try {
t.get(p);
}
catch (final IndexOutOfBoundsException e) {
tThrowsOutOfBounds = e;
}
assertTrue("Error (" + level + "): get() divergence in IndexOutOfBoundsException for index " + p + " (" + mThrowsOutOfBounds + ", " + tThrowsOutOfBounds + ")",
(mThrowsOutOfBounds == null) == (tThrowsOutOfBounds == null));
if (mThrowsOutOfBounds == null) assertTrue("Error (" + level + "): m and t differ on position " + p + " (" + m.get(p) + ", " + t.get(p) + ")", t.get(p).equals(m.get(p)));
}
/* Now we inquiry about the content with indexOf()/lastIndexOf(). */
for (int i = 0; i < 10 * n; i++) {
final Object T = genKey();
assertTrue("Error (" + level + "): indexOf() divergence for " + T + " (" + m.indexOf((T)) + ", " + t.indexOf((T)) + ")", m.indexOf((T)) == t.indexOf((T)));
assertTrue("Error (" + level + "): lastIndexOf() divergence for " + T + " (" + m.lastIndexOf((T)) + ", " + t.lastIndexOf((T)) + ")",
m.lastIndexOf((T)) == t.lastIndexOf((T)));
assertTrue("Error (" + level + "): polymorphic indexOf() divergence for " + T + " (" + m.indexOf(T) + ", " + t.indexOf((T)) + ")", m.indexOf(T) == t.indexOf((T)));
assertTrue("Error (" + level + "): polymorphic lastIndexOf() divergence for " + T + " (" + m.lastIndexOf(T) + ", " + t.lastIndexOf((T)) + ")",
m.lastIndexOf(T) == t.lastIndexOf((T)));
}
/* Now we check cloning. */
if (level == 0) {
assertTrue("Error (" + level + "): m does not equal m.clone()", m.equals(((ObjectBigArrayBigList)m).clone()));
assertTrue("Error (" + level + "): m.clone() does not equal m", ((ObjectBigArrayBigList)m).clone().equals(m));
}
/* Now we play with constructors. */
assertTrue("Error (" + level + "): m does not equal new (type-specific Collection m)", m.equals(new ObjectBigArrayBigList((ObjectCollection)m)));
assertTrue("Error (" + level + "): new (type-specific nCollection m) does not equal m", (new ObjectBigArrayBigList((ObjectCollection)m)).equals(m));
assertTrue("Error (" + level + "): m does not equal new (type-specific List m)", m.equals(new ObjectBigArrayBigList(m)));
assertTrue("Error (" + level + "): new (type-specific List m) does not equal m", (new ObjectBigArrayBigList(m)).equals(m));
assertTrue("Error (" + level + "): m does not equal new (m.listIterator())", m.equals(new ObjectBigArrayBigList(m.listIterator())));
assertTrue("Error (" + level + "): new (m.listIterator()) does not equal m", (new ObjectBigArrayBigList(m.listIterator())).equals(m));
assertTrue("Error (" + level + "): m does not equal new (m.type_specific_iterator())", m.equals(new ObjectBigArrayBigList(m.iterator())));
assertTrue("Error (" + level + "): new (m.type_specific_iterator()) does not equal m", (new ObjectBigArrayBigList(m.iterator())).equals(m));
final int h = m.hashCode();
/* Now we save and read m. */
ObjectBigList m2 = null;
try {
final java.io.File ff = new java.io.File("it.unimi.dsi.fastutil.test.junit." + m.getClass().getSimpleName() + "." + n);
final java.io.OutputStream os = new java.io.FileOutputStream(ff);
final java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream(os);
oos.writeObject(m);
oos.close();
final java.io.InputStream is = new java.io.FileInputStream(ff);
final java.io.ObjectInputStream ois = new java.io.ObjectInputStream(is);
m2 = (ObjectBigList)ois.readObject();
ois.close();
ff.delete();
}
catch (final Exception e) {
e.printStackTrace();
System.exit(1);
}
assertTrue("Error (" + level + "): hashCode() changed after save/read", m2.hashCode() == h);
/* Now we check that m2 actually holds that data. */
assertTrue("Error (" + level + "): ! m2.equals(t) after save/read", m2.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m2) after save/read", t.equals(m2));
/* Now we take out of m everything, and check that it is empty. */
for (final Iterator i = t.iterator(); i.hasNext();)
m2.remove(i.next());
assertTrue("Error (" + level + "): m2 is not empty (as it should be)", m2.isEmpty());
/* Now we play with iterators. */
{
ObjectBigListIterator i;
ObjectBigListIterator j;
i = m.listIterator();
j = t.listIterator();
for (int k = 0; k < 2 * n; k++) {
assertTrue("Error (" + level + "): divergence in hasNext()", i.hasNext() == j.hasNext());
assertTrue("Error (" + level + "): divergence in hasPrevious()", i.hasPrevious() == j.hasPrevious());
if (r.nextFloat() < .8 && i.hasNext()) {
assertTrue("Error (" + level + "): divergence in next()", i.next().equals(j.next()));
if (r.nextFloat() < 0.2) {
i.remove();
j.remove();
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.set(T);
j.set((T));
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.add(T);
j.add((T));
}
}
else if (r.nextFloat() < .2 && i.hasPrevious()) {
assertTrue("Error (" + level + "): divergence in previous()", i.previous().equals(j.previous()));
if (r.nextFloat() < 0.2) {
i.remove();
j.remove();
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.set(T);
j.set((T));
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.add(T);
j.add((T));
}
}
assertTrue("Error (" + level + "): divergence in nextIndex()", i.nextIndex() == j.nextIndex());
assertTrue("Error (" + level + "): divergence in previousIndex()", i.previousIndex() == j.previousIndex());
}
}
{
Object I, J;
final long from = (r.nextLong() >>> 1) % (m.size64() + 1);
ObjectBigListIterator i;
ObjectBigListIterator j;
i = m.listIterator(from);
j = t.listIterator(from);
for (int k = 0; k < 2 * n; k++) {
assertTrue("Error (" + level + "): divergence in hasNext() (iterator with starting point " + from + ")", i.hasNext() == j.hasNext());
assertTrue("Error (" + level + "): divergence in hasPrevious() (iterator with starting point " + from + ")", i.hasPrevious() == j.hasPrevious());
if (r.nextFloat() < .8 && i.hasNext()) {
I = i.next();
J = j.next();
assertTrue("Error (" + level + "): divergence in next() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals(J));
// System.err.println("Done next " + I + " " + J + " " + badPrevious);
if (r.nextFloat() < 0.2) {
// System.err.println("Removing in next");
i.remove();
j.remove();
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.set(T);
j.set((T));
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.add(T);
j.add((T));
}
}
else if (r.nextFloat() < .2 && i.hasPrevious()) {
I = i.previous();
J = j.previous();
assertTrue("Error (" + level + "): divergence in previous() (" + I + ", " + J + ", iterator with starting point " + from + ")", I.equals(J));
if (r.nextFloat() < 0.2) {
// System.err.println("Removing in prev");
i.remove();
j.remove();
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.set(T);
j.set((T));
}
else if (r.nextFloat() < 0.2) {
final Object T = genKey();
i.add(T);
j.add((T));
}
}
}
}
/* Now we check that m actually holds that data. */
assertTrue("Error (" + level + "): ! m.equals(t) after iteration", m.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m) after iteration", t.equals(m));
/* Now we select a pair of keys and create a subset. */
if (!m.isEmpty()) {
final long start = (r.nextLong() >>> 1) % m.size64();
final long end = start + (r.nextLong() >>> 1) % (m.size64() - start);
// System.err.println("Checking subList from " + start + " to " + end + " (level=" +
// (level+1) + ")...");
testLists(m.subList(start, end), t.subList(start, end), n, level + 1);
assertTrue("Error (" + level + m + t + "): ! m.equals(t) after subList", m.equals(t));
assertTrue("Error (" + level + "): ! t.equals(m) after subList", t.equals(m));
}
m.clear();
t.clear();
assertTrue("Error (" + level + "): m is not empty after clear()", m.isEmpty());
}
@SuppressWarnings({ "boxing", "unchecked" })
protected static void test(final int n) {
ObjectBigArrayBigList m = new ObjectBigArrayBigList();
ObjectBigList t = ObjectBigLists.asBigList(new ObjectArrayList());
k = new Object[n];
nk = new Object[n];
kt = new Object[n];
nkt = new Object[n];
for (int i = 0; i < n; i++) {
k[i] = kt[i] = genKey();
nk[i] = nkt[i] = genKey();
}
/* We add pairs to t. */
for (int i = 0; i < n; i++) t.add(k[i]);
/* We add to m the same data */
m.addAll(t);
testLists(m, t, n, 0);
// This tests all reflection-based methods.
m = ObjectBigArrayBigList.wrap(ObjectBigArrays.EMPTY_BIG_ARRAY);
t = ObjectBigLists.asBigList(new ObjectArrayList());
/* We add pairs to t. */
for (int i = 0; i < n; i++) t.add(k[i]);
/* We add to m the same data */
m.addAll(t);
testLists(m, t, n, 0);
return;
}
@Test
public void test1() {
test(1);
}
@Test
public void test10() {
test(10);
}
@Test
public void test100() {
test(100);
}
@Ignore("Too long")
@Test
public void test1000() {
test(1000);
}
@Test
public void testSizeOnDefaultInstance() {
final ObjectBigArrayBigList<Integer> l = new ObjectBigArrayBigList<>();
l.size(100);
}
@Test
public void testLegacyMainMethodTests() throws Exception {
MainRunner.callMainIfExists(ObjectBigArrayBigList.class, "test", /*num=*/"200", /*seed=*/"90293");
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bremersee.fac.example.config;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import javax.net.ssl.KeyManager;
import javax.net.ssl.TrustManager;
import org.apache.tomcat.util.http.fileupload.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.util.StringUtils;
import com.unboundid.ldap.listener.InMemoryDirectoryServer;
import com.unboundid.ldap.listener.InMemoryListenerConfig;
import com.unboundid.ldap.sdk.schema.Schema;
import com.unboundid.util.ssl.KeyStoreKeyManager;
import com.unboundid.util.ssl.SSLUtil;
import com.unboundid.util.ssl.TrustStoreTrustManager;
/**
* @author Christian Bremer
*
*/
@Configuration
@ConditionalOnProperty(prefix = "bremersee.unboundid.ldap.server", name = "embedded", havingValue = "true", matchIfMissing = false)
@EnableConfigurationProperties(InMemoryDirectoryServerProperties.class)
public class InMemoryDirectoryServerConfig {
private static final String CLASSPATH_PREFIX = "classpath:";
@Autowired
private InMemoryDirectoryServerProperties properties;
@Bean(name = "inMemoryDirectoryServerBean", destroyMethod = "shutdown")
public InMemoryDirectoryServerBean inMemoryDirectoryServerBean() {
try {
final com.unboundid.ldap.listener.InMemoryDirectoryServerConfig config = new com.unboundid.ldap.listener.InMemoryDirectoryServerConfig(
properties.getRootDn());
if (StringUtils.hasText(properties.getManagerDn())) {
config.addAdditionalBindCredentials(properties.getManagerDn(), properties.getManagerPassword());
}
final InetAddress ldapAddress = StringUtils.hasText(properties.getLdapAddress())
? InetAddress.getByName(properties.getLdapAddress()) : null;
final InetAddress ldapsAddress = StringUtils.hasText(properties.getLdapsAddress())
? InetAddress.getByName(properties.getLdapsAddress()) : null;
KeyManager keyManager = null;
TrustManager trustManager = null;
if (StringUtils.hasText(properties.getKeyStoreLocation())) {
String location = properties.getKeyStoreLocation();
if (location.toLowerCase().startsWith(CLASSPATH_PREFIX)) {
final File keyStoreFile = File.createTempFile("ldapServerKey", "Store");
keyStoreFile.deleteOnExit();
try (final OutputStream outputStream = new FileOutputStream(keyStoreFile)) {
IOUtils.copy(
new ClassPathResource(location.substring(CLASSPATH_PREFIX.length())).getInputStream(),
outputStream);
}
location = keyStoreFile.getCanonicalPath();
}
final char[] password = StringUtils.hasText(properties.getKeyStorePassword())
? properties.getKeyStorePassword().toCharArray() : null;
keyManager = new KeyStoreKeyManager(location, password, properties.getKeyStoreFormat(),
properties.getKeyAlias());
}
if (StringUtils.hasText(properties.getTrustStoreLocation())) {
String location = properties.getTrustStoreLocation();
if (location.toLowerCase().startsWith(CLASSPATH_PREFIX)) {
final File trustStoreFile = File.createTempFile("ldapServerTrust", "Store");
trustStoreFile.deleteOnExit();
try (final OutputStream outputStream = new FileOutputStream(trustStoreFile)) {
IOUtils.copy(
new ClassPathResource(location.substring(CLASSPATH_PREFIX.length())).getInputStream(),
outputStream);
}
location = trustStoreFile.getCanonicalPath();
}
final char[] password = StringUtils.hasText(properties.getTrustStorePassword())
? properties.getTrustStorePassword().toCharArray() : null;
trustManager = new TrustStoreTrustManager(location, password, properties.getTrustStoreFormat(), true);
}
if (keyManager == null || trustManager == null) {
final File keyStoreFile = File.createTempFile("ldapServerKeyStore", ".jks");
keyStoreFile.deleteOnExit();
try (final OutputStream outputStream = new FileOutputStream(keyStoreFile)) {
IOUtils.copy(new ClassPathResource("/ldap-server/truststore.jks").getInputStream(), outputStream);
}
if (keyManager == null) {
keyManager = new KeyStoreKeyManager(keyStoreFile, "changeit".toCharArray());
}
if (trustManager == null) {
trustManager = new TrustStoreTrustManager(keyStoreFile);
}
}
final SSLUtil serverSSLUtil = new SSLUtil(keyManager, trustManager);
final SSLUtil clientSSLUtil = new SSLUtil(trustManager);
//@formatter:off
config.setListenerConfigs(
InMemoryListenerConfig.createLDAPConfig(properties.getLdapListenerName(), // Listener name
ldapAddress, // Listen address. (null = listen on all interfaces)
properties.getLdapPort(), // Listen port (0 = automatically choose an available port)
serverSSLUtil.createSSLSocketFactory()), // StartTLS factory
InMemoryListenerConfig.createLDAPSConfig(properties.getLdapsListenerName(), // Listener name
ldapsAddress, // Listen address. (null = listen on all interfaces)
properties.getLdapsPort(), // Listen port (0 = automatically choose an available port)
serverSSLUtil.createSSLServerSocketFactory(), // Server factory
clientSSLUtil.createSSLSocketFactory())); // Client factory
//@formatter:on
config.setEnforceSingleStructuralObjectClass(false);
config.setEnforceAttributeSyntaxCompliance(true);
String[] schemaLocations = properties.getSchemaLocationsAsArray();
if (schemaLocations.length > 0) {
for (int i = 0; i < schemaLocations.length; i++) {
if (schemaLocations[i].toLowerCase().startsWith(CLASSPATH_PREFIX)) {
final File schemaFile = File.createTempFile("ldapServer", ".schema");
schemaFile.deleteOnExit();
try (final OutputStream outputStream = new FileOutputStream(schemaFile)) {
IOUtils.copy(new ClassPathResource(schemaLocations[i].substring(CLASSPATH_PREFIX.length()))
.getInputStream(), outputStream);
}
schemaLocations[i] = schemaFile.getCanonicalPath();
}
}
final Schema s = Schema.mergeSchemas(Schema.getSchema(schemaLocations));
config.setSchema(s);
}
InMemoryDirectoryServer directoryServer = new InMemoryDirectoryServer(config);
String[] ldifLocations = properties.getLdifLocationsAsArray();
if (ldifLocations.length > 0) {
for (int i = 0; i < ldifLocations.length; i++) {
String location = ldifLocations[i];
if (location.toLowerCase().startsWith(CLASSPATH_PREFIX.substring(CLASSPATH_PREFIX.length()))) {
final File ldifFile = File.createTempFile("ldapServer", ".ldif");
ldifFile.deleteOnExit();
try (final OutputStream outputStream = new FileOutputStream(ldifFile)) {
IOUtils.copy(new ClassPathResource(location.substring(CLASSPATH_PREFIX.length()))
.getInputStream(), outputStream);
}
location = ldifFile.getCanonicalPath();
}
directoryServer.importFromLDIF(true, location);
}
directoryServer.restartServer();
} else {
directoryServer.startListening();
}
return new InMemoryDirectoryServerBean(directoryServer);
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
public static class InMemoryDirectoryServerBean {
private final InMemoryDirectoryServer server;
private InMemoryDirectoryServerBean(InMemoryDirectoryServer server) {
this.server = server;
}
public InMemoryDirectoryServer getServer() {
return server;
}
public void shutdown() {
if (server != null) {
server.shutDown(true);
}
}
}
}
| |
package fini.main.model;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.ArrayList;
import java.util.UUID;
/**
*
* Task class is the fundamental element of FINI. Users can create, manipulate and store the tasks.
* Since creating a task requires quite a lot of parameters for constructor which makes a lot of constructing work redundant,
* we choose to make use of builder design pattern to merge a TaskBuilder inside Task class.
*
* @@author A0127483B
*/
public class Task implements TaskInterface {
/* ***********************************
* Constants
* ***********************************/
public static enum Type {
DEFAULT, EVENT, DEADLINE
}
public static enum Priority {
HIGH, MEDIUM, LOW, NORMAL
}
private static final String DEFAULT_PROJECT = "Inbox";
/* ***********************************
* Required fields
* ***********************************/
private String taskTitle;
private boolean isRecurring;
/* ***********************************
* Optional fields
* ***********************************/
private String projectName;
private Priority priority;
private LocalDateTime taskStartDateTime = null;
private LocalDateTime taskEndDateTime = null;
private LocalDateTime recursUntil;
private Period interval;
private boolean isCompleted = false;
private Type taskType;
// ObjectID assign each Task instance a distinct ID
// RecurUniqueID assign same ID for different Tasks under same recurring task
private String objectID = null;
private String recurUniqueID = null;
/* ***********************************
* TaskBuilder
* ***********************************/
public static class TaskBuilder {
// Required
private final String taskTitle;
private final boolean isRecurring;
// Optional
private String projectName = DEFAULT_PROJECT;
private Priority priority = Priority.NORMAL;
private ArrayList<LocalDateTime> dateTimes = new ArrayList<LocalDateTime>();
private LocalDateTime recursUntil = null;
private Period interval = null;
public TaskBuilder(String taskTitle, boolean isRecurring) {
this.taskTitle = taskTitle;
this.isRecurring = isRecurring;
}
public TaskBuilder setProjectName(String projectName) {
this.projectName = projectName;
return this;
}
public TaskBuilder setPriority(Priority priority) {
this.priority = priority;
return this;
}
public TaskBuilder setDatetimes(ArrayList<LocalDateTime> dateTimes) {
this.dateTimes = dateTimes;
return this;
}
public TaskBuilder setRecursUntil(LocalDateTime recursUntil) {
this.recursUntil = recursUntil;
return this;
}
public TaskBuilder setInterval(Period interval) {
this.interval = interval;
return this;
}
// Return a Task through build()
public Task build() {
return new Task(this);
}
}
/* ***********************************
* Private constructor
* ***********************************/
private Task(TaskBuilder taskBuilder) {
// Required
taskTitle = taskBuilder.taskTitle;
isRecurring = taskBuilder.isRecurring;
if (isRecurring) {
recurUniqueID = UUID.randomUUID().toString();
}
objectID = UUID.randomUUID().toString();
// Optional
projectName = taskBuilder.projectName;
priority = taskBuilder.priority;
recursUntil = taskBuilder.recursUntil;
interval = taskBuilder.interval;
switch (taskBuilder.dateTimes.size()) {
case 2:
taskType = Type.EVENT;
break;
case 1:
taskType = Type.DEADLINE;
break;
default:
taskType = Type.DEFAULT;
break;
}
switch (taskType) {
case EVENT:
taskStartDateTime = taskBuilder.dateTimes.get(0);
taskEndDateTime = taskBuilder.dateTimes.get(1);
break;
case DEADLINE:
taskStartDateTime = taskBuilder.dateTimes.get(0);
break;
default:
break;
}
}
// @@author A0130047W
/* ***********************************
* Public getters
* ***********************************/
public String getTitle() {
assert taskTitle != null;
return taskTitle;
}
public String getProjectName() {
return projectName;
}
public Priority getPriority() {
return priority;
}
public LocalDateTime getStartDateTime() {
return taskStartDateTime;
}
public LocalDateTime getEndDateTime() {
return taskEndDateTime;
}
public LocalDateTime getRecursUntil() {
return recursUntil;
}
public Type getTaskType() {
return taskType;
}
public String getRecurUniqueID() {
return recurUniqueID;
}
public boolean hasRecurUniqueID() {
return recurUniqueID != null;
}
public String getObjectID() {
return objectID;
}
public Period getInterval() {
return interval;
}
public boolean isCompleted() {
return isCompleted;
}
public boolean isOverdue() {
LocalDateTime nowDateTime = LocalDateTime.now();
/**
* Possible combination:
* no startDateTime -> Not overdue
* startDateTime before now -> Overdue
* startDateTime after now -> Not Overdue
*
*/
if (taskStartDateTime == null) {
return false;
} else if (taskStartDateTime.isAfter(nowDateTime)) {
return false;
} else {
return true;
}
}
public boolean isRecurring() {
return isRecurring;
}
/* ***********************************
* Public setters
* ***********************************/
public void setIsRecurring(boolean isRecurring) {
this.isRecurring = isRecurring;
}
public void setTaskTitle(String taskTitle) {
this.taskTitle = taskTitle;
}
public void setProjectName(String projectName) {
if (projectName != null) {
this.projectName = projectName;
} else {
this.projectName = DEFAULT_PROJECT;
}
}
public void setPriority(Priority priority) {
if (priority != null) {
assert(priority != null);
this.priority = priority;
} else {
this.priority = Priority.NORMAL;
}
}
public void setTaskStartDateTime(LocalDateTime taskStartDateTime) {
this.taskStartDateTime = taskStartDateTime;
}
public void setTaskEndDateTime(LocalDateTime taskEndDateTime) {
this.taskEndDateTime = taskEndDateTime;
}
public void setIsComplete() {
isCompleted = true;
}
public void setIncomplete() {
isCompleted = false;
}
public void updateObjectID() {
objectID = UUID.randomUUID().toString();
}
/* ***********************************
* Utility methods
* ***********************************/
public boolean hasNext() {
if (isRecurring) {
if (recursUntil != null) {
return taskStartDateTime.plus(interval).isBefore(recursUntil);
}
return true;
}
return false;
}
public void toNext() {
if (hasNext()) {
if (taskStartDateTime != null) {
taskStartDateTime = taskStartDateTime.plus(interval);
}
if (taskEndDateTime != null) {
taskEndDateTime = taskEndDateTime.plus(interval);
}
}
}
/* ***********************************
* Copy related methods
* ***********************************/
@Override
public Task makeCopy() {
Task taskObject = null;
try {
taskObject = (Task) super.clone();
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
taskObject.updateObjectID();
return taskObject;
}
/* ***********************************
* Display related methods
* ***********************************/
public boolean isTaskDueToday() {
return taskStartDateTime == null ? false : checkIfTaskIsDueToday();
}
private boolean checkIfTaskIsDueToday() {
return taskStartDateTime.toLocalDate().isEqual(LocalDate.now());
}
public boolean isTaskDueTomorrow() {
return taskStartDateTime == null ? false : checkIfTaskIsDueTomorrow();
}
private boolean checkIfTaskIsDueTomorrow() {
return taskStartDateTime.toLocalDate().isEqual(LocalDate.now().plusDays(1));
}
public boolean isTaskDueWithinSevenDays() {
return taskStartDateTime == null ? false : checkIfTaskIsDueWithinSevenDays();
}
private boolean checkIfTaskIsDueWithinSevenDays() {
return taskStartDateTime.toLocalDate().isBefore(LocalDate.now().plusDays(7));
}
public String getLabelForTaskOverviewPane() {
return DEFAULT_PROJECT;
}
public String toString() {
return taskTitle;
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender;
import com.navercorp.pinpoint.common.util.CollectionUtils;
import com.navercorp.pinpoint.rpc.util.ListUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
/**
* @author Taejin Koo
*/
public class PartitionedByteBufferLocator {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final byte[] baseBuffer;
private final List<Index> partitionIndexList;
private final int partitionedBufferCapacity;
private PartitionedByteBufferLocator(byte[] buffer, List<Index> partitionIndexList) {
if (buffer == null) {
throw new NullPointerException("buffer must not be null.");
}
if (CollectionUtils.isEmpty(partitionIndexList)) {
throw new NullPointerException("buffer must not be null or zero.");
}
Index firstPartitionIndex = ListUtils.getFirst(partitionIndexList);
Index lastPartitionIndex = ListUtils.getLast(partitionIndexList);
int partitionedBufferCapacity = lastPartitionIndex.getEndPosition() - firstPartitionIndex.getStartPosition();
if (partitionedBufferCapacity > buffer.length) {
throw new IllegalArgumentException("partitionedBufferCapacity(" + partitionedBufferCapacity + ") > bufferCapacity(" + buffer.length + ").");
}
if (lastPartitionIndex.getEndPosition() > buffer.length) {
throw new IllegalArgumentException("lastPartitionEndPosition(" + lastPartitionIndex.getEndPosition() + ") > bufferCapacity(" + buffer.length + ").");
}
this.baseBuffer = buffer;
this.partitionIndexList = new ArrayList<Index>(partitionIndexList);
this.partitionedBufferCapacity = partitionedBufferCapacity;
}
public int getPartitionedCount() {
return partitionIndexList.size();
}
public int getTotalByteBufferCapacity() {
return partitionedBufferCapacity;
}
public int getByteBufferCapacity(int partitionIndex) {
if (partitionIndex < 0) {
throw new IllegalArgumentException("partitionIndex = " + partitionIndex);
}
if (partitionIndex >= getPartitionedCount()) {
throw new IllegalArgumentException("partitionIndex(" + partitionIndex + ") >= partitionedCount(" + getPartitionedCount() + ").");
}
return partitionIndexList.get(partitionIndex).getCapacity();
}
public int getByteBufferCapacity(int fromPartitionIndex, int toPartitionIndex) {
checkRangeValidation(fromPartitionIndex, toPartitionIndex, getPartitionedCount());
int capacity = 0;
for (int i = fromPartitionIndex; i < toPartitionIndex; i++) {
capacity += partitionIndexList.get(i).getCapacity();
}
return capacity;
}
public ByteBuffer getByteBuffer() {
return getByteBuffer(0, getPartitionedCount() - 1);
}
public ByteBuffer getByteBuffer(int partitionIndex) {
if (partitionIndex < 0) {
throw new IllegalArgumentException("partitionIndex = " + partitionIndex);
}
if (partitionIndex >= getPartitionedCount()) {
throw new IllegalArgumentException("partitionIndex(" + partitionIndex + ") >= partitionedCount(" + getPartitionedCount() + ").");
}
Index fromIndex = partitionIndexList.get(partitionIndex);
Index toIndex = partitionIndexList.get(partitionIndex);
int startPosition = fromIndex.getStartPosition();
int endPosition = toIndex.getEndPosition();
logger.debug("getByteBuffer baseBuffer-length:{}, {}~{}.", baseBuffer.length, startPosition, endPosition);
return ByteBuffer.wrap(baseBuffer, fromIndex.getStartPosition(), toIndex.getEndPosition() - fromIndex.getStartPosition());
}
public ByteBuffer getByteBuffer(int fromPartitionIndex, int toPartitionIndex) {
checkRangeValidation(fromPartitionIndex, toPartitionIndex, getPartitionedCount());
Index fromIndex = partitionIndexList.get(fromPartitionIndex);
Index toIndex = partitionIndexList.get(toPartitionIndex);
int startPosition = fromIndex.getStartPosition();
int endPosition = toIndex.getEndPosition();
logger.debug("getByteBuffer baseBuffer-length:{}, {}~{}.", baseBuffer.length, startPosition, endPosition);
return ByteBuffer.wrap(baseBuffer, fromIndex.getStartPosition(), toIndex.getEndPosition() - fromIndex.getStartPosition());
}
private void checkRangeValidation(int fromPartitionIndex, int toPartitionIndex, int partitionedCount) {
if (fromPartitionIndex < 0) {
throw new IllegalArgumentException("fromPartitionIndex = " + fromPartitionIndex);
}
if (fromPartitionIndex > toPartitionIndex) {
throw new IllegalArgumentException("fromPartitionIndex(" + fromPartitionIndex + ") > toPartitionIndex(" + toPartitionIndex + ").");
}
if (toPartitionIndex >= partitionedCount) {
throw new IllegalArgumentException("toPartitionIndex(" + toPartitionIndex + ") >= partitionedCount(" + partitionedCount + ").");
}
}
public boolean isLastPartitionIndex(int partitionIndex) {
if (partitionIndex == partitionIndexList.size() - 1) {
return true;
}
return false;
}
@Override
public String toString() {
return "PartitionedByteBufferLocator [baseBuffer-length=" + baseBuffer.length + ", partitionedCount=" + partitionIndexList.size()
+ ", partitionedBufferCapacity=" + partitionedBufferCapacity + "]";
}
public static class Builder {
private byte[] buffer;
private List<Index> indexList = new ArrayList<Index>();
public void setBuffer(byte[] buffer) {
this.buffer = buffer;
}
public void addIndex(List<Index> indexList) {
if (indexList == null) {
return;
}
for (Index index : indexList) {
addIndex(index);
}
}
public void addIndex(int startBufferPosition, int endBufferPosition) {
addIndex(new Index(startBufferPosition, endBufferPosition));
}
public void addIndex(Index index) {
Index lastPartitionIndex = ListUtils.getLast(indexList);
int partitionedEndPosition = 0;
if (lastPartitionIndex != null) {
partitionedEndPosition = lastPartitionIndex.getEndPosition();
}
chechRangeValidation(index, partitionedEndPosition);
indexList.add(index);
}
private void chechRangeValidation(Index index, int partitionedEndPosition) {
int startPosition = index.getStartPosition();
int endPosition = index.getEndPosition();
if (startPosition < 0) {
throw new IllegalArgumentException("startPosition = " + startPosition);
}
if (startPosition > endPosition) {
throw new IllegalArgumentException("startPosition(" + startPosition + ") > endPosition(" + endPosition + ").");
}
if (startPosition != partitionedEndPosition) {
throw new IllegalArgumentException("support only stream buffer index. startPosition(" + startPosition + ") != partitionedEndPosition(" + partitionedEndPosition + ").");
}
}
public PartitionedByteBufferLocator build() {
return new PartitionedByteBufferLocator(buffer, indexList);
}
}
static class Index {
private final int startPosition;
private final int endPosition;
private final int capacity;
Index(int startPosition, int endPosition) {
this.startPosition = startPosition;
this.endPosition = endPosition;
this.capacity = endPosition - startPosition;
}
public int getStartPosition() {
return startPosition;
}
public int getEndPosition() {
return endPosition;
}
public int getCapacity() {
return capacity;
}
@Override
public String toString() {
return "Index [startPosition=" + startPosition + ", endPosition=" + endPosition + ", capacity=" + capacity + "]";
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.parse.swf;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.nutch.metadata.Metadata;
import org.apache.nutch.net.protocols.Response;
import org.apache.nutch.parse.*;
import org.apache.nutch.protocol.Content;
import org.apache.nutch.util.NutchConfiguration;
import org.apache.hadoop.conf.Configuration;
import com.anotherbigidea.flash.interfaces.*;
import com.anotherbigidea.flash.readers.*;
import com.anotherbigidea.flash.structs.*;
import com.anotherbigidea.flash.writers.SWFActionBlockImpl;
import com.anotherbigidea.flash.writers.SWFTagTypesImpl;
import com.anotherbigidea.io.InStream;
/**
* Parser for Flash SWF files. Loosely based on the sample in JavaSWF
* distribution.
*/
public class SWFParser implements Parser {
public static final Logger LOG = LoggerFactory.getLogger("org.apache.nutch.parse.swf");
private Configuration conf = null;
public SWFParser() {}
public void setConf(Configuration conf) {
this.conf = conf;
}
public Configuration getConf() {
return conf;
}
public ParseResult getParse(Content content) {
String text = null;
Vector<Outlink> outlinks = new Vector<Outlink>();
try {
byte[] raw = content.getContent();
String contentLength = content.getMetadata().get(Response.CONTENT_LENGTH);
if (contentLength != null && raw.length != Integer.parseInt(contentLength)) {
return new ParseStatus(ParseStatus.FAILED, ParseStatus.FAILED_TRUNCATED,
"Content truncated at " + raw.length +
" bytes. Parser can't handle incomplete files.").getEmptyParseResult(content.getUrl(), getConf());
}
ExtractText extractor = new ExtractText();
// TagParser implements SWFTags and drives a SWFTagTypes interface
TagParser parser = new TagParser(extractor);
// use this instead to debug the file
// TagParser parser = new TagParser( new SWFTagDumper(true, true) );
// SWFReader reads an input file and drives a SWFTags interface
SWFReader reader = new SWFReader(parser, new InStream(raw));
// read the input SWF file and pass it through the interface pipeline
reader.readFile();
text = extractor.getText();
String atext = extractor.getActionText();
if (atext != null && atext.length() > 0) text += "\n--------\n" + atext;
// harvest potential outlinks
String[] links = extractor.getUrls();
for (int i = 0; i < links.length; i++) {
Outlink out = new Outlink(links[i], "");
outlinks.add(out);
}
Outlink[] olinks = OutlinkExtractor.getOutlinks(text, conf);
if (olinks != null) for (int i = 0; i < olinks.length; i++) {
outlinks.add(olinks[i]);
}
} catch (Exception e) { // run time exception
LOG.error("Error, runtime exception: ", e);
return new ParseStatus(ParseStatus.FAILED, "Can't be handled as SWF document. " + e).getEmptyParseResult(content.getUrl(), getConf());
}
if (text == null) text = "";
Outlink[] links = (Outlink[]) outlinks.toArray(new Outlink[outlinks.size()]);
ParseData parseData = new ParseData(ParseStatus.STATUS_SUCCESS, "", links,
content.getMetadata());
return ParseResult.createParseResult(content.getUrl(), new ParseImpl(text, parseData));
}
/**
* Arguments are: 0. Name of input SWF file.
*/
public static void main(String[] args) throws IOException {
FileInputStream in = new FileInputStream(args[0]);
byte[] buf = new byte[in.available()];
in.read(buf);
in.close();
SWFParser parser = new SWFParser();
ParseResult parseResult = parser.getParse(new Content("file:" + args[0], "file:" + args[0],
buf, "application/x-shockwave-flash",
new Metadata(),
NutchConfiguration.create()));
Parse p = parseResult.get("file:" + args[0]);
System.out.println("Parse Text:");
System.out.println(p.getText());
System.out.println("Parse Data:");
System.out.println(p.getData());
}
}
/**
* Shows how to parse a Flash movie and extract all the text in Text symbols and
* the initial text in Edit Fields. Output is to System.out.
*
* A "pipeline" is set up in the main method:
*
* SWFReader-->TagParser-->ExtractText
*
* SWFReader reads the input SWF file and separates out the header and the tags.
* The separated contents are passed to TagParser which parses out the
* individual tag types and passes them to ExtractText.
*
* ExtractText extends SWFTagTypesImpl and overrides some methods.
*/
class ExtractText extends SWFTagTypesImpl {
/**
* Store font info keyed by the font symbol id. Each entry is an int[] of
* character codes for the correspnding font glyphs (An empty array denotes a
* System Font).
*/
protected HashMap<Integer, int[]> fontCodes = new HashMap<Integer, int[]>();
public ArrayList<String> strings = new ArrayList<String>();
public HashSet<String> actionStrings = new HashSet<String>();
public ArrayList<String> urls = new ArrayList<String>();
public ExtractText() {
super(null);
}
public String getText() {
StringBuffer res = new StringBuffer();
Iterator<String> it = strings.iterator();
while (it.hasNext()) {
if (res.length() > 0) res.append(' ');
res.append(it.next());
}
return res.toString();
}
public String getActionText() {
StringBuffer res = new StringBuffer();
String[] strings = (String[])actionStrings.toArray(new String[actionStrings.size()]);
Arrays.sort(strings);
for (int i = 0; i < strings.length; i++) {
if (i > 0) res.append('\n');
res.append(strings[i]);
}
return res.toString();
}
public String[] getUrls() {
String[] res = new String[urls.size()];
int i = 0;
Iterator<String> it = urls.iterator();
while (it.hasNext()) {
res[i] = (String) it.next();
i++;
}
return res;
}
public void tagDefineFontInfo2(int arg0, String arg1, int arg2, int[] arg3, int arg4) throws IOException {
tagDefineFontInfo(arg0, arg1, arg2, arg3);
}
/**
* SWFTagTypes interface Save the Text Font character code info
*/
public void tagDefineFontInfo(int fontId, String fontName, int flags, int[] codes) throws IOException {
// System.out.println("-defineFontInfo id=" + fontId + ", name=" +
// fontName);
fontCodes.put(new Integer(fontId), codes);
}
// XXX too much hassle for too little return ... we cannot guess character
// XXX codes anyway, so we just give up.
/*
* public SWFVectors tagDefineFont(int arg0, int arg1) throws IOException {
* return null;
* }
*/
/**
* SWFTagTypes interface. Save the character code info.
*/
public SWFVectors tagDefineFont2(int id, int flags, String name, int numGlyphs, int ascent, int descent, int leading,
int[] codes, int[] advances, Rect[] bounds, int[] kernCodes1, int[] kernCodes2, int[] kernAdjustments)
throws IOException {
// System.out.println("-defineFontInfo id=" + id + ", name=" + name);
fontCodes.put(new Integer(id), (codes != null) ? codes : new int[0]);
return null;
}
/**
* SWFTagTypes interface. Dump any initial text in the field.
*/
public void tagDefineTextField(int fieldId, String fieldName, String initialText, Rect boundary, int flags,
AlphaColor textColor, int alignment, int fontId, int fontSize, int charLimit, int leftMargin,
int rightMargin, int indentation, int lineSpacing) throws IOException {
if (initialText != null) {
strings.add(initialText);
}
}
/**
* SWFTagTypes interface
*/
public SWFText tagDefineText(int id, Rect bounds, Matrix matrix) throws IOException {
lastBounds = curBounds;
curBounds = bounds;
return new TextDumper();
}
Rect lastBounds = null;
Rect curBounds = null;
/**
* SWFTagTypes interface
*/
public SWFText tagDefineText2(int id, Rect bounds, Matrix matrix) throws IOException {
lastBounds = curBounds;
curBounds = bounds;
return new TextDumper();
}
public class TextDumper implements SWFText {
protected Integer fontId;
protected boolean firstY = true;
public void font(int fontId, int textHeight) {
this.fontId = new Integer(fontId);
}
public void setY(int y) {
if (firstY)
firstY = false;
else strings.add("\n"); // Change in Y - dump a new line
}
/*
* There are some issues with this method: sometimes SWF files define their
* own font, so short of OCR we cannot guess what is the glyph code -> character
* mapping. Additionally, some files don't use literal space character, instead
* they adjust glyphAdvances. We don't handle it at all - in such cases the text
* will be all glued together.
*/
public void text(int[] glyphIndices, int[] glyphAdvances) {
// System.out.println("-text id=" + fontId);
int[] codes = (int[]) fontCodes.get(fontId);
if (codes == null) {
// unknown font, better not guess
strings.add("\n**** ?????????????? ****\n");
return;
}
// --Translate the glyph indices to character codes
char[] chars = new char[glyphIndices.length];
for (int i = 0; i < chars.length; i++) {
int index = glyphIndices[i];
if (index >= codes.length) // System Font ?
{
chars[i] = (char) index;
} else {
chars[i] = (char) (codes[index]);
}
// System.out.println("-ch[" + i + "]='" + chars[i] + "'(" +
// (int)chars[i] + ") +" + glyphAdvances[i]);
}
strings.add(new String(chars));
}
public void color(Color color) {}
public void setX(int x) {}
public void done() {
strings.add("\n");
}
}
public SWFActions tagDoAction() throws IOException {
// ActionTextWriter actions = new ActionTextWriter(new
// PrintWriter(System.out));
NutchSWFActions actions = new NutchSWFActions(actionStrings, urls);
return actions;
}
public SWFActions tagDoInitAction(int arg0) throws IOException {
// ActionTextWriter actions = new ActionTextWriter(new
// PrintWriter(System.out));
NutchSWFActions actions = new NutchSWFActions(actionStrings, urls);
return actions;
}
public void tagGeneratorFont(byte[] arg0) throws IOException {
// TODO Auto-generated method stub
super.tagGeneratorFont(arg0);
}
public void tagGeneratorText(byte[] arg0) throws IOException {
// TODO Auto-generated method stub
super.tagGeneratorText(arg0);
}
}
/**
* ActionScript parser. This parser tries to extract free text embedded inside
* the script, but without polluting it too much with names of variables,
* methods, etc. Not ideal, but it works.
*/
class NutchSWFActions extends SWFActionBlockImpl implements SWFActions {
private HashSet<String> strings = null;
private ArrayList<String> urls = null;
String[] dict = null;
Stack<Object> stack = null;
public NutchSWFActions(HashSet<String> strings, ArrayList<String> urls) {
this.strings = strings;
this.urls = urls;
stack = new SmallStack(100, strings);
}
public void lookupTable(String[] values) throws IOException {
for (int i = 0; i < values.length; i++) {
if (!strings.contains(values[i])) strings.add(values[i]);
}
super.lookupTable(values);
dict = values;
}
public void defineLocal() throws IOException {
stack.pop();
super.defineLocal();
}
public void getURL(int vars, int mode) {
// System.out.println("-getURL: vars=" + vars + ", mode=" + mode);
}
public void getURL(String url, String target) throws IOException {
// System.out.println("-getURL: url=" + url + ", target=" + target);
stack.push(url);
stack.push(target);
strings.remove(url);
strings.remove(target);
urls.add(url);
super.getURL(url, target);
}
public SWFActionBlock.TryCatchFinally _try(String var) throws IOException {
// stack.push(var);
strings.remove(var);
return super._try(var);
}
public void comment(String var) throws IOException {
// stack.push(var);
strings.remove(var);
super.comment(var);
}
public void goToFrame(String var) throws IOException {
stack.push(var);
strings.remove(var);
super.gotoFrame(var);
}
public void ifJump(String var) throws IOException {
strings.remove(var);
super.ifJump(var);
}
public void jump(String var) throws IOException {
strings.remove(var);
super.jump(var);
}
public void jumpLabel(String var) throws IOException {
strings.remove(var);
super.jumpLabel(var);
}
public void lookup(int var) throws IOException {
if (dict != null && var >= 0 && var < dict.length) {
stack.push(dict[var]);
}
super.lookup(var);
}
public void push(String var) throws IOException {
stack.push(var);
strings.remove(var);
super.push(var);
}
public void setTarget(String var) throws IOException {
stack.push(var);
strings.remove(var);
super.setTarget(var);
}
public SWFActionBlock startFunction(String var, String[] params) throws IOException {
stack.push(var);
strings.remove(var);
if (params != null) {
for (int i = 0; i < params.length; i++) {
strings.remove(params[i]);
}
}
return this;
}
public SWFActionBlock startFunction2(String var, int arg1, int arg2, String[] params, int[] arg3) throws IOException {
stack.push(var);
strings.remove(var);
if (params != null) {
for (int i = 0; i < params.length; i++) {
strings.remove(params[i]);
}
}
return this;
}
public void waitForFrame(int num, String var) throws IOException {
stack.push(var);
strings.remove(var);
super.waitForFrame(num, var);
}
public void waitForFrame(String var) throws IOException {
stack.push(var);
strings.remove(var);
super.waitForFrame(var);
}
public void done() throws IOException {
while (stack.size() > 0) {
strings.remove(stack.pop());
}
}
public SWFActionBlock start(int arg0, int arg1) throws IOException {
return this;
}
public SWFActionBlock start(int arg0) throws IOException {
return this;
}
public void add() throws IOException {
super.add();
}
public void asciiToChar() throws IOException {
super.asciiToChar();
}
public void asciiToCharMB() throws IOException {
super.asciiToCharMB();
}
public void push(int var) throws IOException {
if (dict != null && var >= 0 && var < dict.length) {
stack.push(dict[var]);
}
super.push(var);
}
public void callFunction() throws IOException {
strings.remove(stack.pop());
super.callFunction();
}
public void callMethod() throws IOException {
strings.remove(stack.pop());
super.callMethod();
}
public void getMember() throws IOException {
// 0: name
String val = (String) stack.pop();
strings.remove(val);
super.getMember();
}
public void setMember() throws IOException {
// 0: value -1: name
stack.pop(); // value
String name = (String) stack.pop();
strings.remove(name);
super.setMember();
}
public void setProperty() throws IOException {
super.setProperty();
}
public void setVariable() throws IOException {
super.setVariable();
}
public void call() throws IOException {
strings.remove(stack.pop());
super.call();
}
public void setTarget() throws IOException {
strings.remove(stack.pop());
super.setTarget();
}
public void pop() throws IOException {
strings.remove(stack.pop());
super.pop();
}
public void push(boolean arg0) throws IOException {
stack.push("" + arg0);
super.push(arg0);
}
public void push(double arg0) throws IOException {
stack.push("" + arg0);
super.push(arg0);
}
public void push(float arg0) throws IOException {
stack.push("" + arg0);
super.push(arg0);
}
public void pushNull() throws IOException {
stack.push("");
super.pushNull();
}
public void pushRegister(int arg0) throws IOException {
stack.push("" + arg0);
super.pushRegister(arg0);
}
public void pushUndefined() throws IOException {
stack.push("???");
super.pushUndefined();
}
public void getProperty() throws IOException {
stack.pop();
super.getProperty();
}
public void getVariable() throws IOException {
strings.remove(stack.pop());
super.getVariable();
}
public void gotoFrame(boolean arg0) throws IOException {
stack.push("" + arg0);
super.gotoFrame(arg0);
}
public void gotoFrame(int arg0) throws IOException {
stack.push("" + arg0);
super.gotoFrame(arg0);
}
public void gotoFrame(String arg0) throws IOException {
stack.push("" + arg0);
strings.remove(arg0);
super.gotoFrame(arg0);
}
public void newObject() throws IOException {
stack.pop();
super.newObject();
}
public SWFActionBlock startWith() throws IOException {
return this;
}
}
/*
* Small bottom-less stack.
*/
class SmallStack extends Stack<Object> {
private static final long serialVersionUID = 1L;
private int maxSize;
private HashSet<String> strings = null;
public SmallStack(int maxSize, HashSet<String> strings) {
this.maxSize = maxSize;
this.strings = strings;
}
public Object push(Object o) {
// limit max size
if (this.size() > maxSize) {
String val = (String) remove(0);
strings.remove(val);
}
return super.push(o);
}
public Object pop() {
// tolerate underruns
if (this.size() == 0)
return null;
else return super.pop();
}
}
| |
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.k2crypto.storage.driver;
import com.google.k2crypto.storage.IllegalAddressException;
import java.net.URI;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility methods for checking and manipulating storage URI addresses.
*
* <p>The {@link #encodeConvenience(String)} and
* {@link #decodeUnreserved(String)} utility methods are required because
* {@link java.net.URLEncoder} and {@link java.net.URLDecoder} do not provide
* the desired functionality. Details are provided in the documentation of the
* individual methods. It boils down to {@link java.net.URLEncoder} and
* {@link java.net.URLDecoder} being designed to encode and decode ALL
* percent-encoded characters and ONLY within the query portion of a URL. They
* are NOT designed for operating on an entire URI, which is what we want to do.
*
* @author darylseah@gmail.com (Daryl Seah)
*/
public class AddressUtilities {
// Prevent instantiation
private AddressUtilities() {}
// Regex matching percent ('%') characters in the address string that are NOT
// of the format %[HEX][HEX] and spaces (' ').
private static final Pattern CONVENIENCE_ENCODABLE =
Pattern.compile("\\%(?![0-9a-fA-F][0-9a-fA-F])|\\ ");
// Buffer space for encoding expansion
private static final int ENCODE_ALLOWANCE = 16;
// Table converting decimal to hex
private static final char[] HEX_TABLE =
{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
/**
* Performs safe URI-escaping of the '%' and ' ' characters in the string
* for convenience sake. This is NOT a complete percent-encoding procedure.
* The idea is to escape these characters (and only these characters) across
* the entire URI string so that addresses containing them can remain in a
* readable form in user code.
*
* <p>The Java-included {@link java.net.URLEncoder} class has similar
* functionality, but its encoding is designed specifically for the query
* portion of a URL, and does not work universally across an entire URI.
* For example, it will encode {@code "/my keys/bank"} as
* {@code "%2Fmy+keys%2Fbank"}, whereas this method will encode it as
* {@code "/my%20keys/bank"}. The escaped backslashes and conversion of
* spaces to {@code '+'} will effectively change the meaning of the address,
* and this is undesirable behavior.
*
* @param str String to encode.
*
* @return the string with stray percents and spaces percent-encoded.
*/
public static String encodeConvenience(String str) {
Matcher m = CONVENIENCE_ENCODABLE.matcher(str);
boolean found = m.find();
if (found) {
final char[] hexTable = HEX_TABLE;
StringBuilder sb = new StringBuilder(str.length() + ENCODE_ALLOWANCE);
int findStart = 0;
do {
final int pos = m.start();
sb.append(str, findStart, pos);
// Percent-encode the encodable character
final char c = str.charAt(pos);
sb.append('%').append(hexTable[c >>> 4]).append(hexTable[c & 7]);
findStart = m.end();
found = m.find();
} while (found);
sb.append(str, findStart, str.length());
return sb.toString();
}
return str;
}
// Regex matching percent-encoded URI-unreserved characters
// (i.e. letters, digits, '-', '.', '_' and '~')
private static final Pattern ENCODED_UNRESERVED = Pattern.compile(
"\\%(?:4[1-9A-F]|5[0-9A]|6[1-9A-F]|7[0-9A]|3[0-9]|2D|2E|5F|7E)", Pattern.CASE_INSENSITIVE);
/**
* Decodes any percent-encoded URI-unreserved characters in the URI address.
*
* <p>As mentioned in <a href="http://tools.ietf.org/html/rfc3986#section-2.3"
* target="_blank">RFC 3986, Section 2.3</a>, any unreserved characters in a
* URI should be decoded before the URI can be safely compared or normalized.
* Unfortunately, Java's URI implementation does not do this for us.
*
* @param address URI to decode.
*
* @return a new URI with all unreserved characters decoded, or the same
* URI if there are no unreserved characters to decode.
*
* @see #decodeUnreserved(String)
*/
public static URI decodeUnreserved(URI address) {
String addressStr = address.toString();
String decoded = decodeUnreserved(addressStr);
return decoded == addressStr ? address : URI.create(decoded);
}
/**
* Decodes any percent-encoded URI-unreserved characters in the string.
*
* <p>The Java-included {@link java.net.URLDecoder} class has similar
* functionality, except it decodes ALL percent-encoded characters; it is
* designed primarily for decoding individual key/value strings in the query
* portion of a URL after they have been extracted. For example, it will
* decode {@code "/my+keys%3F/%62%61%6E%6B"} as {@code "/my keys?/bank"},
* which will result in an invalid URI because {@code "/bank"} would be
* interpreted as a query. The {@code '+'} symbol should also only be
* interpreted as a space in the query portion, and not in any other part of
* a URI. This method will decode the string as {@code "/my+keys%3F/bank"},
* preserving the meaning of the address.
*
* @param str String to decode.
*
* @return the string with all unreserved characters decoded.
*/
public static String decodeUnreserved(String str) {
Matcher m = ENCODED_UNRESERVED.matcher(str);
boolean found = m.find();
if (found) {
StringBuilder sb = new StringBuilder(str.length());
int findStart = 0;
do {
final int pos = m.start();
sb.append(str, findStart, pos);
// Assume that first hex is always a digit (restricted to the regex)
int decoded = ((str.charAt(pos + 1) - '0') << 4);
// Complete decoding by checking second hex
final char hex = str.charAt(pos + 2);
decoded += hex - (hex <= '9' ? '0' : (hex <= 'Z' ? 'A' : 'a') - 10);
sb.append((char) decoded);
findStart = m.end();
found = m.find();
} while (found);
sb.append(str, findStart, str.length());
return sb.toString();
}
return str;
}
/**
* Checks that the address has no authority component.
*
* @param address Address to check.
* @throws IllegalAddressException if the authority component exists.
*/
public static void checkNoAuthority(URI address) throws IllegalAddressException {
if (address.getAuthority() != null) {
throw new IllegalAddressException(address,
IllegalAddressException.Reason.AUTHORITY_UNSUPPORTED, null);
}
}
/**
* Checks that the address has no user component.
*
* @param address Address to check.
* @throws IllegalAddressException if the user component exists.
*/
public static void checkNoUser(URI address) throws IllegalAddressException {
String user = address.getUserInfo();
if (user != null && user.length() > 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.USER_UNSUPPORTED,
null);
}
}
/**
* Checks that the address has no host/port component.
*
* @param address Address to check.
* @throws IllegalAddressException if the host or port component exists.
*/
public static void checkNoHostPort(URI address) throws IllegalAddressException {
if (address.getHost() != null || address.getPort() >= 0) {
throw new IllegalAddressException(address,
IllegalAddressException.Reason.HOST_PORT_UNSUPPORTED, null);
}
}
/**
* Checks that the address has no path component.
*
* @param address Address to check.
* @throws IllegalAddressException if the path component exists.
*/
public static void checkNoPath(URI address) throws IllegalAddressException {
String path = address.getPath();
if (path != null && path.length() > 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.PATH_UNSUPPORTED,
null);
}
}
/**
* Checks that the address has no query component.
*
* @param address Address to check.
* @throws IllegalAddressException if the query component exists.
*/
public static void checkNoQuery(URI address) throws IllegalAddressException {
String query = address.getQuery();
if (query != null && query.length() > 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.QUERY_UNSUPPORTED,
null);
}
}
/**
* Checks that the address has no fragment component.
*
* @param address Address to check.
* @throws IllegalAddressException if the fragment component exists.
*/
public static void checkNoFragment(URI address) throws IllegalAddressException {
String fragment = address.getFragment();
if (fragment != null && fragment.length() > 0) {
throw new IllegalAddressException(address,
IllegalAddressException.Reason.FRAGMENT_UNSUPPORTED, null);
}
}
/**
* Obtains the host from the URI address.
*
* @param address Address to obtain the host from.
* @throws IllegalAddressException if the address is missing a host.
*/
public static String extractHost(URI address) throws IllegalAddressException {
String host = address.getHost();
if (host == null || host.length() == 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.MISSING_HOST_PORT,
null);
}
return host;
}
/**
* Obtains the raw path from the URI address.
*
* @param address Address to obtain the path from.
* @throws IllegalAddressException if the address is missing a path.
*/
public static String extractRawPath(URI address) throws IllegalAddressException {
String path = address.getRawPath();
if (path == null || path.length() == 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.MISSING_PATH, null);
}
return path;
}
/**
* Obtains the raw query from the URI address.
*
* @param address Address to obtain the query from.
* @throws IllegalAddressException if the address is missing a query.
*/
public static String extractRawQuery(URI address) throws IllegalAddressException {
String query = address.getRawQuery();
if (query == null || query.length() == 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.MISSING_QUERY,
null);
}
return query;
}
/**
* Obtains the fragment from the URI address.
*
* @param address Address to obtain the fragment from.
* @throws IllegalAddressException if the address is missing a fragment.
*/
public static String extractFragment(URI address) throws IllegalAddressException {
String frag = address.getFragment();
if (frag == null || frag.length() == 0) {
throw new IllegalAddressException(address, IllegalAddressException.Reason.MISSING_FRAGMENT,
null);
}
return frag;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.mgmt;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.batchById;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.inverted;
import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.verifySorting;
import static org.hamcrest.core.IsCollectionContaining.hasItems;
import java.util.ArrayList;
import java.util.List;
import org.camunda.bpm.engine.HistoryService;
import org.camunda.bpm.engine.ManagementService;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.batch.Batch;
import org.camunda.bpm.engine.batch.BatchQuery;
import org.camunda.bpm.engine.exception.NotValidException;
import org.camunda.bpm.engine.exception.NullValueException;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.engine.test.api.runtime.migration.MigrationTestRule;
import org.camunda.bpm.engine.test.api.runtime.migration.batch.BatchMigrationHelper;
import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
/**
* @author Thorben Lindhauer
*
*/
public class BatchQueryTest {
protected ProcessEngineRule engineRule = new ProvidedProcessEngineRule();
protected MigrationTestRule migrationRule = new MigrationTestRule(engineRule);
protected BatchMigrationHelper helper = new BatchMigrationHelper(engineRule, migrationRule);
@Rule
public RuleChain ruleChain = RuleChain.outerRule(engineRule).around(migrationRule);
protected RuntimeService runtimeService;
protected ManagementService managementService;
protected HistoryService historyService;
@Before
public void initServices() {
runtimeService = engineRule.getRuntimeService();
managementService = engineRule.getManagementService();
historyService = engineRule.getHistoryService();
}
@After
public void removeBatches() {
helper.removeAllRunningAndHistoricBatches();
}
@Test
public void testBatchQuery() {
// given
Batch batch1 = helper.migrateProcessInstancesAsync(1);
Batch batch2 = helper.migrateProcessInstancesAsync(1);
// when
List<Batch> list = managementService.createBatchQuery().list();
// then
Assert.assertEquals(2, list.size());
List<String> batchIds = new ArrayList<String>();
for (Batch resultBatch : list) {
batchIds.add(resultBatch.getId());
}
Assert.assertTrue(batchIds.contains(batch1.getId()));
Assert.assertTrue(batchIds.contains(batch2.getId()));
}
@Test
public void testBatchQueryResult() {
// given
Batch batch = helper.migrateProcessInstancesAsync(1);
// when
Batch resultBatch = managementService.createBatchQuery().singleResult();
// then
Assert.assertNotNull(batch);
Assert.assertEquals(batch.getId(), resultBatch.getId());
Assert.assertEquals(batch.getBatchJobDefinitionId(), resultBatch.getBatchJobDefinitionId());
Assert.assertEquals(batch.getMonitorJobDefinitionId(), resultBatch.getMonitorJobDefinitionId());
Assert.assertEquals(batch.getSeedJobDefinitionId(), resultBatch.getSeedJobDefinitionId());
Assert.assertEquals(batch.getTenantId(), resultBatch.getTenantId());
Assert.assertEquals(batch.getType(), resultBatch.getType());
Assert.assertEquals(batch.getBatchJobsPerSeed(), resultBatch.getBatchJobsPerSeed());
Assert.assertEquals(batch.getInvocationsPerBatchJob(), resultBatch.getInvocationsPerBatchJob());
Assert.assertEquals(batch.getTotalJobs(), resultBatch.getTotalJobs());
Assert.assertEquals(batch.getJobsCreated(), resultBatch.getJobsCreated());
Assert.assertEquals(batch.isSuspended(), resultBatch.isSuspended());
}
@Test
public void testBatchQueryById() {
// given
Batch batch1 = helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
Batch resultBatch = managementService.createBatchQuery().batchId(batch1.getId()).singleResult();
// then
Assert.assertNotNull(resultBatch);
Assert.assertEquals(batch1.getId(), resultBatch.getId());
}
@Test
public void testBatchQueryByIdNull() {
try {
managementService.createBatchQuery().batchId(null).singleResult();
Assert.fail("exception expected");
}
catch (NullValueException e) {
Assert.assertThat(e.getMessage(), CoreMatchers.containsString("Batch id is null"));
}
}
@Test
public void testBatchQueryByType() {
// given
Batch batch1 = helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
long count = managementService.createBatchQuery().type(batch1.getType()).count();
// then
Assert.assertEquals(2, count);
}
@Test
public void testBatchQueryByNonExistingType() {
// given
helper.migrateProcessInstancesAsync(1);
// when
long count = managementService.createBatchQuery().type("foo").count();
// then
Assert.assertEquals(0, count);
}
@Test
public void testBatchQueryByTypeNull() {
try {
managementService.createBatchQuery().type(null).singleResult();
Assert.fail("exception expected");
}
catch (NullValueException e) {
Assert.assertThat(e.getMessage(), CoreMatchers.containsString("Type is null"));
}
}
@Test
public void testBatchQueryCount() {
// given
helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
long count = managementService.createBatchQuery().count();
// then
Assert.assertEquals(2, count);
}
@Test
public void testBatchQueryOrderByIdAsc() {
// given
helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
List<Batch> orderedBatches = managementService.createBatchQuery().orderById().asc().list();
// then
verifySorting(orderedBatches, batchById());
}
@Test
public void testBatchQueryOrderByIdDec() {
// given
helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
List<Batch> orderedBatches = managementService.createBatchQuery().orderById().desc().list();
// then
verifySorting(orderedBatches, inverted(batchById()));
}
@Test
public void testBatchQueryOrderingPropertyWithoutOrder() {
try {
managementService.createBatchQuery().orderById().singleResult();
Assert.fail("exception expected");
}
catch (NotValidException e) {
Assert.assertThat(e.getMessage(), CoreMatchers.containsString("Invalid query: "
+ "call asc() or desc() after using orderByXX()"));
}
}
@Test
public void testBatchQueryOrderWithoutOrderingProperty() {
try {
managementService.createBatchQuery().asc().singleResult();
Assert.fail("exception expected");
}
catch (NotValidException e) {
Assert.assertThat(e.getMessage(), CoreMatchers.containsString("You should call any of the orderBy methods "
+ "first before specifying a direction"));
}
}
@Test
public void testBatchQueryBySuspendedBatches() {
// given
Batch batch1 = helper.migrateProcessInstancesAsync(1);
Batch batch2 = helper.migrateProcessInstancesAsync(1);
helper.migrateProcessInstancesAsync(1);
// when
managementService.suspendBatchById(batch1.getId());
managementService.suspendBatchById(batch2.getId());
managementService.activateBatchById(batch1.getId());
// then
BatchQuery query = managementService.createBatchQuery().suspended();
Assert.assertEquals(1, query.count());
Assert.assertEquals(1, query.list().size());
Assert.assertEquals(batch2.getId(), query.singleResult().getId());
}
@Test
public void testBatchQueryByActiveBatches() {
// given
Batch batch1 = helper.migrateProcessInstancesAsync(1);
Batch batch2 = helper.migrateProcessInstancesAsync(1);
Batch batch3 = helper.migrateProcessInstancesAsync(1);
// when
managementService.suspendBatchById(batch1.getId());
managementService.suspendBatchById(batch2.getId());
managementService.activateBatchById(batch1.getId());
// then
BatchQuery query = managementService.createBatchQuery().active();
Assert.assertEquals(2, query.count());
Assert.assertEquals(2, query.list().size());
List<String> foundIds = new ArrayList<String>();
for (Batch batch : query.list()) {
foundIds.add(batch.getId());
}
Assert.assertThat(foundIds, hasItems(
batch1.getId(),
batch3.getId()
));
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.dbg.util;
import java.util.*;
import java.util.function.*;
public enum CollectionUtils {
;
public abstract static class AbstractImmutableList<T> extends AbstractList<T> {
@Override
public void add(int index, T element) {
throw new UnsupportedOperationException();
}
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(int index, Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public T remove(int index) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeIf(Predicate<? super T> filter) {
throw new UnsupportedOperationException();
}
@Override
protected void removeRange(int fromIndex, int toIndex) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public void replaceAll(UnaryOperator<T> operator) {
throw new UnsupportedOperationException();
}
@Override
public T set(int index, T element) {
throw new UnsupportedOperationException();
}
@Override
public void sort(Comparator<? super T> c) {
throw new UnsupportedOperationException();
}
}
public abstract static class AbstractImmutableSet<T> extends AbstractSet<T> {
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
}
public abstract static class AbstractImmutableMap<K, V> extends AbstractMap<K, V> {
}
public static class AbstractEmptyMap<K, V> extends AbstractMap<K, V> {
@Override
public Set<Entry<K, V>> entrySet() {
return Set.of();
}
}
public static class AbstractEmptyList<T> extends AbstractList<T> {
@Override
public T get(int index) {
throw new ArrayIndexOutOfBoundsException(index);
}
@Override
public int size() {
return 0;
}
}
public static class AbstractEmptySet<T> extends AbstractImmutableSet<T> {
@Override
public Iterator<T> iterator() {
return Collections.emptyIterator();
}
@Override
public int size() {
return 0;
}
}
public static class AbstractNList<T> extends AbstractImmutableList<T> {
protected final List<T> wrapped;
@SafeVarargs
public AbstractNList(T... elems) {
this.wrapped = List.of(elems);
}
public AbstractNList(Collection<T> col) {
this.wrapped = List.copyOf(col);
}
@Override
public T get(int index) {
return wrapped.get(index);
}
@Override
public int size() {
return wrapped.size();
}
}
public static class AbstractNSet<T> extends AbstractImmutableSet<T> {
protected final Set<T> wrapped;
@SafeVarargs
public AbstractNSet(T... elems) {
this.wrapped = Set.of(elems);
}
public AbstractNSet(Collection<T> col) {
this.wrapped = Set.copyOf(col);
}
@Override
public Iterator<T> iterator() {
return wrapped.iterator();
}
@Override
public int size() {
return wrapped.size();
}
}
public static class AbstractNMap<K, V> extends AbstractImmutableMap<K, V> {
protected final Map<K, V> wrapped;
public AbstractNMap(Map<K, V> map) {
this.wrapped = Collections.unmodifiableMap(new LinkedHashMap<>(map));
}
@Override
public Set<Entry<K, V>> entrySet() {
return wrapped.entrySet();
}
}
public static <K, V> Collection<V> getAllExisting(Map<K, V> map, Collection<K> keys) {
List<V> result = new ArrayList<>();
for (K k : keys) {
if (map.containsKey(k)) {
result.add(map.get(k));
}
}
return result;
}
public static class Delta<T, U extends T> {
public static final Delta<?, ?> EMPTY = new Delta<>(Map.of(), Map.of());
public static final BiPredicate<Object, Object> SAME = (a, b) -> a == b;
public static final BiPredicate<Object, Object> EQUAL = Objects::equals;
@SuppressWarnings("unchecked")
public static final <T, U extends T> Delta<T, U> empty() {
return (Delta<T, U>) EMPTY;
}
public static final <T, U extends T> Delta<T, U> create(Map<String, T> removed,
Map<String, U> added) {
return new Delta<>(removed, added);
}
public static final <T, U extends T> Delta<T, U> create(Collection<String> removedKeys,
Map<String, U> added) {
Map<String, T> removedNull = new HashMap<>();
for (String key : removedKeys) {
removedNull.put(key, null);
}
return new Delta<>(removedNull, added);
}
protected static final <T> void retainKeys(Map<String, T> mutable, Collection<String> keys,
Map<String, T> removed) {
for (Iterator<Map.Entry<String, T>> eit = mutable.entrySet().iterator(); eit
.hasNext();) {
Map.Entry<String, T> oldEnt = eit.next();
if (!keys.contains(oldEnt.getKey())) {
removed.put(oldEnt.getKey(), oldEnt.getValue());
eit.remove();
}
}
}
protected static final <T> void removeKeys(Map<String, T> mutable, Collection<String> keys,
Map<String, T> removed) {
for (String r : keys) {
if (mutable.containsKey(r)) {
removed.put(r, mutable.remove(r));
}
}
}
protected static final <T, U extends T> void putEntries(Map<String, T> mutable,
Map<String, U> entries, Map<String, T> removed, Map<String, U> added,
BiPredicate<? super T, ? super U> equals) {
for (Map.Entry<String, U> e : entries.entrySet()) {
String key = e.getKey();
U newVal = e.getValue();
if (!mutable.containsKey(key)) {
mutable.put(key, newVal);
added.put(key, newVal);
continue;
}
T oldVal = mutable.get(key);
if (!equals.test(oldVal, newVal)) {
mutable.put(key, newVal);
removed.put(key, oldVal);
added.put(key, newVal);
}
}
}
public static final <T, U extends T> Delta<T, U> computeAndSet(Map<String, T> mutable,
Map<String, U> desired, BiPredicate<? super T, ? super U> equals) {
Map<String, T> removed = new LinkedHashMap<>();
Map<String, U> added = new LinkedHashMap<>();
retainKeys(mutable, desired.keySet(), removed);
putEntries(mutable, desired, removed, added, equals);
return create(removed, added);
}
/*public static final <T, U extends T> Delta<T, U> computeAndSet(Map<String, T> mutable,
Map<String, U> desired) {
return computeAndSet(mutable, desired, SAME);
}*/
public static final <T, U extends T> Delta<T, U> apply(Map<String, T> mutable,
Collection<String> removed, Map<String, U> added,
BiPredicate<? super T, ? super U> equals) {
if (removed.isEmpty() && added.isEmpty()) {
return empty();
}
Map<String, T> fRemoved = new LinkedHashMap<>();
Map<String, U> fAdded = new LinkedHashMap<>();
removeKeys(mutable, removed, fRemoved);
putEntries(mutable, added, fRemoved, fAdded, equals);
return create(fRemoved, fAdded);
}
/*public static final <T, U extends T> Delta<T, U> apply(Map<String, T> mutable,
Collection<String> removed, Map<String, U> added) {
return apply(mutable, removed, added, SAME);
}*/
public static final void applyToKeys(Set<String> mutable, Collection<String> removed,
Map<String, ?> added) {
mutable.removeAll(removed);
mutable.addAll(added.keySet());
}
public final Map<String, T> removed;
public final Map<String, U> added;
private volatile Set<String> keysRemoved;
// TODO: Moved?
protected Delta(Map<String, T> removed, Map<String, U> added) {
this.removed = removed;
this.added = added;
}
@Override
public String toString() {
return "<Delta removed=" + removed + ", added=" + added + ">";
}
public boolean isEmpty() {
return removed.isEmpty() && added.isEmpty();
}
public Delta<T, U> apply(Map<String, T> mutable,
BiPredicate<Object, Object> equals) {
return apply(mutable, removed.keySet(), added, equals);
}
public Delta<T, U> apply(Map<String, T> mutable) {
return apply(mutable, SAME);
}
public void applyToKeys(Set<String> mutable) {
applyToKeys(mutable, removed.keySet(), added);
}
public Set<String> getKeysRemoved() {
if (keysRemoved != null) {
return keysRemoved;
}
Set<String> temp = new LinkedHashSet<>(removed.keySet());
temp.removeAll(added.keySet());
keysRemoved = temp;
return keysRemoved;
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework.fixtures.impl;
import com.intellij.ProjectTopics;
import com.intellij.ide.IdeView;
import com.intellij.ide.highlighter.ProjectFileType;
import com.intellij.ide.impl.OpenProjectTask;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.ModuleListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.impl.jar.JarFileSystemImpl;
import com.intellij.project.TestProjectManager;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl;
import com.intellij.testFramework.*;
import com.intellij.testFramework.builders.ModuleFixtureBuilder;
import com.intellij.testFramework.fixtures.HeavyIdeaTestFixture;
import com.intellij.util.PathUtil;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.lang.CompoundRuntimeException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Assert;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Creates new project for each test.
*/
@SuppressWarnings("TestOnlyProblems")
final class HeavyIdeaTestFixtureImpl extends BaseFixture implements HeavyIdeaTestFixture {
private Project myProject;
private volatile Module myModule;
private final Set<Path> myFilesToDelete = new HashSet<>();
private final Set<ModuleFixtureBuilder<?>> myModuleFixtureBuilders = new LinkedHashSet<>();
private EditorListenerTracker myEditorListenerTracker;
private ThreadTracker myThreadTracker;
private final String mySanitizedName;
private final Path myProjectPath;
private final boolean myIsDirectoryBasedProject;
private SdkLeakTracker myOldSdks;
private AccessToken projectTracker;
HeavyIdeaTestFixtureImpl(@NotNull String name, @Nullable Path projectPath, boolean isDirectoryBasedProject) {
mySanitizedName = FileUtil.sanitizeFileName(name, false);
myProjectPath = projectPath;
myIsDirectoryBasedProject = isDirectoryBasedProject;
}
void addModuleFixtureBuilder(ModuleFixtureBuilder<?> builder) {
myModuleFixtureBuilders.add(builder);
}
@Override
public void setUp() throws Exception {
super.setUp();
initApplication();
projectTracker = ((TestProjectManager)ProjectManager.getInstance()).startTracking();
setUpProject();
EncodingManager.getInstance(); // adds listeners
myEditorListenerTracker = new EditorListenerTracker();
myThreadTracker = new ThreadTracker();
InjectedLanguageManagerImpl.pushInjectors(getProject());
myOldSdks = new SdkLeakTracker();
}
@Override
public void tearDown() {
List<ThrowableRunnable<?>> actions = new ArrayList<>();
if (myProject != null) {
Project project = myProject;
actions.add(() -> {
TestApplicationManagerKt.tearDownProjectAndApp(myProject);
myProject = null;
});
for (ModuleFixtureBuilder<?> moduleFixtureBuilder : myModuleFixtureBuilders) {
actions.add(() -> moduleFixtureBuilder.getFixture().tearDown());
}
actions.add(() -> InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project));
}
JarFileSystemImpl.cleanupForNextTest();
for (Path fileToDelete : myFilesToDelete) {
actions.add(() -> {
List<IOException> errors;
try (Stream<Path> stream = Files.walk(fileToDelete)) {
errors = stream
.sorted(Comparator.reverseOrder())
.map(x -> {
try {
Files.delete(x);
return null;
}
catch (IOException e) {
return e;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
catch (NoSuchFileException ignore) {
errors = Collections.emptyList();
}
CompoundRuntimeException.throwIfNotEmpty(errors);
});
}
actions.add(() -> {
AccessToken projectTracker = this.projectTracker;
if (projectTracker != null) {
this.projectTracker = null;
projectTracker.finish();
}
});
actions.add(() -> super.tearDown());
actions.add(() -> {
if (myEditorListenerTracker != null) {
myEditorListenerTracker.checkListenersLeak();
}
});
actions.add(() -> {
if (myThreadTracker != null) {
myThreadTracker.checkLeak();
}
});
actions.add(() -> LightPlatformTestCase.checkEditorsReleased());
actions.add(() -> {
if (myOldSdks != null) {
myOldSdks.checkForJdkTableLeaks();
}
});
// project is disposed by now, no point in passing it
actions.add(() -> HeavyPlatformTestCase.cleanupApplicationCaches(null));
new RunAll(actions).run();
}
private void setUpProject() throws Exception {
OpenProjectTask options = OpenProjectTaskBuilderKt.createTestOpenProjectOptions(true).withBeforeOpenCallback(project -> {
project.getMessageBus().simpleConnect().subscribe(ProjectTopics.MODULES, new ModuleListener() {
@Override
public void moduleAdded(@NotNull Project __, @NotNull Module module) {
if (myModule == null) {
myModule = module;
}
}
});
return true;
});
if (ApplicationManager.getApplication().isDispatchThread()) {
PlatformTestUtil.dispatchAllInvocationEventsInIdeEventQueue();
}
myProject = Objects.requireNonNull(ProjectManagerEx.getInstanceEx().openProject(generateProjectPath(), options));
EdtTestUtil.runInEdtAndWait(() -> {
for (ModuleFixtureBuilder<?> moduleFixtureBuilder : myModuleFixtureBuilders) {
moduleFixtureBuilder.getFixture().setUp();
}
LightPlatformTestCase.clearUncommittedDocuments(myProject);
((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue();
});
}
@NotNull
private Path generateProjectPath() {
Path tempDirectory;
if (myProjectPath == null) {
tempDirectory = TemporaryDirectory.generateTemporaryPath(mySanitizedName);
myFilesToDelete.add(tempDirectory);
}
else {
tempDirectory = myProjectPath;
}
return tempDirectory.resolve(mySanitizedName + (myIsDirectoryBasedProject ? "" : ProjectFileType.DOT_DEFAULT_EXTENSION));
}
private void initApplication() {
TestApplicationManager.getInstance().setDataProvider(new MyDataProvider());
}
@Override
public Project getProject() {
Assert.assertNotNull("setUp() should be called first", myProject);
return myProject;
}
@Override
public Module getModule() {
return myModule;
}
private final class MyDataProvider implements DataProvider {
@Override
@Nullable
public Object getData(@NotNull @NonNls String dataId) {
if (CommonDataKeys.PROJECT.is(dataId)) {
return myProject;
}
else if (CommonDataKeys.EDITOR.is(dataId) || OpenFileDescriptor.NAVIGATE_IN_EDITOR.is(dataId)) {
if (myProject == null || myProject.isDisposed()) {
return null;
}
return FileEditorManager.getInstance(myProject).getSelectedTextEditor();
}
else {
Editor editor = (Editor)getData(CommonDataKeys.EDITOR.getName());
if (editor != null) {
if (PlatformDataKeys.FILE_EDITOR.is(dataId)) {
return TextEditorProvider.getInstance().getTextEditor(editor);
}
else {
FileEditorManagerEx manager = FileEditorManagerEx.getInstanceEx(myProject);
return manager.getData(dataId, editor, editor.getCaretModel().getCurrentCaret());
}
}
if (LangDataKeys.IDE_VIEW.is(dataId)) {
VirtualFile[] contentRoots = ProjectRootManager.getInstance(myProject).getContentRoots();
if (contentRoots.length > 0) {
final PsiDirectory psiDirectory = PsiManager.getInstance(myProject).findDirectory(contentRoots[0]);
return new IdeView() {
@Override
public PsiDirectory @NotNull [] getDirectories() {
return new PsiDirectory[] {psiDirectory};
}
@Override
public PsiDirectory getOrChooseDirectory() {
return psiDirectory;
}
};
}
}
return null;
}
}
}
@Override
public PsiFile addFileToProject(@NotNull @NonNls String rootPath, @NotNull @NonNls final String relativePath, @NotNull @NonNls final String fileText) throws IOException {
final VirtualFile dir = VfsUtil.createDirectories(rootPath + "/" + PathUtil.getParentPath(relativePath));
final VirtualFile[] virtualFile = new VirtualFile[1];
WriteCommandAction.writeCommandAction(getProject()).run(() -> {
virtualFile[0] = dir.createChildData(this, StringUtil.getShortName(relativePath, '/'));
VfsUtil.saveText(virtualFile[0], fileText);
PsiDocumentManager.getInstance(getProject()).commitAllDocuments();
});
return ReadAction.compute(() -> PsiManager.getInstance(getProject()).findFile(virtualFile[0]));
}
}
| |
/*
***************************************************************************
* Copyright (C) 2002-2009 International Business Machines Corporation *
* and others. All rights reserved. *
***************************************************************************
*/
package com.ibm.icu.text;
import java.text.ParsePosition;
import java.util.HashMap;
import com.ibm.icu.lang.UCharacter;
class RBBISymbolTable implements SymbolTable{
String fRules;
HashMap<String, RBBISymbolTableEntry> fHashTable;
RBBIRuleScanner fRuleScanner;
// These next two fields are part of the mechanism for passing references to
// already-constructed UnicodeSets back to the UnicodeSet constructor
// when the pattern includes $variable references.
String ffffString;
UnicodeSet fCachedSetLookup;
static class RBBISymbolTableEntry {
String key;
RBBINode val;
}
RBBISymbolTable(RBBIRuleScanner rs, String rules) {
fRules = rules;
fRuleScanner = rs;
fHashTable = new HashMap<String, RBBISymbolTableEntry>();
ffffString = "\uffff";
}
//
// RBBISymbolTable::lookup This function from the abstract symbol table inteface
// looks up a variable name and returns a UnicodeString
// containing the substitution text.
//
// The variable name does NOT include the leading $.
//
public char[] lookup(String s) {
RBBISymbolTableEntry el;
RBBINode varRefNode;
RBBINode exprNode;
RBBINode usetNode;
String retString;
el = fHashTable.get(s);
if (el == null) {
return null;
}
// Walk through any chain of variable assignments that ultimately resolve to a Set Ref.
varRefNode = el.val;
while (varRefNode.fLeftChild.fType == RBBINode.varRef) {
varRefNode = varRefNode.fLeftChild;
}
exprNode = varRefNode.fLeftChild; // Root node of expression for variable
if (exprNode.fType == RBBINode.setRef) {
// The $variable refers to a single UnicodeSet
// return the ffffString, which will subsequently be interpreted as a
// stand-in character for the set by RBBISymbolTable::lookupMatcher()
usetNode = exprNode.fLeftChild;
fCachedSetLookup = usetNode.fInputSet;
retString = ffffString;
} else {
// The variable refers to something other than just a set.
// This is an error in the rules being compiled. $Variables inside of UnicodeSets
// must refer only to another set, not to some random non-set expression.
// Note: single characters are represented as sets, so they are ok.
fRuleScanner.error(RBBIRuleBuilder.U_BRK_MALFORMED_SET);
retString = exprNode.fText;
fCachedSetLookup = null;
}
return retString.toCharArray();
}
//
// RBBISymbolTable::lookupMatcher This function from the abstract symbol table
// interface maps a single stand-in character to a
// pointer to a Unicode Set. The Unicode Set code uses this
// mechanism to get all references to the same $variable
// name to refer to a single common Unicode Set instance.
//
// This implementation cheats a little, and does not maintain a map of stand-in chars
// to sets. Instead, it takes advantage of the fact that the UnicodeSet
// constructor will always call this function right after calling lookup(),
// and we just need to remember what set to return between these two calls.
public UnicodeMatcher lookupMatcher(int ch) {
UnicodeSet retVal = null;
if (ch == 0xffff) {
retVal = fCachedSetLookup;
fCachedSetLookup = null;
}
return retVal;
}
//
// RBBISymbolTable::parseReference This function from the abstract symbol table interface
// looks for a $variable name in the source text.
// It does not look it up, only scans for it.
// It is used by the UnicodeSet parser.
//
public String parseReference(String text, ParsePosition pos, int limit) {
int start = pos.getIndex();
int i = start;
String result = "";
while (i < limit) {
int c = UTF16.charAt(text, i);
if ((i == start && !UCharacter.isUnicodeIdentifierStart(c))
|| !UCharacter.isUnicodeIdentifierPart(c)) {
break;
}
i += UTF16.getCharCount(c);
}
if (i == start) { // No valid name chars
return result; // Indicate failure with empty string
}
pos.setIndex(i);
result = text.substring(start, i);
return result;
}
//
// RBBISymbolTable::lookupNode Given a key (a variable name), return the
// corresponding RBBI Node. If there is no entry
// in the table for this name, return NULL.
//
RBBINode lookupNode(String key) {
RBBINode retNode = null;
RBBISymbolTableEntry el;
el = fHashTable.get(key);
if (el != null) {
retNode = el.val;
}
return retNode;
}
//
// RBBISymbolTable::addEntry Add a new entry to the symbol table.
// Indicate an error if the name already exists -
// this will only occur in the case of duplicate
// variable assignments.
//
void addEntry(String key, RBBINode val) {
RBBISymbolTableEntry e;
e = fHashTable.get(key);
if (e != null) {
fRuleScanner.error(RBBIRuleBuilder.U_BRK_VARIABLE_REDFINITION);
return;
}
e = new RBBISymbolTableEntry();
e.key = key;
e.val = val;
fHashTable.put(e.key, e);
}
//
// RBBISymbolTable::print Debugging function, dump out the symbol table contents.
//
///CLOVER:OFF
void rbbiSymtablePrint() {
System.out
.print("Variable Definitions\n"
+ "Name Node Val String Val\n"
+ "----------------------------------------------------------------------\n");
RBBISymbolTableEntry[] syms = fHashTable.values().toArray(new RBBISymbolTableEntry[0]);
for (int i = 0; i < syms.length; i++) {
RBBISymbolTableEntry s = syms[i];
System.out.print(" " + s.key + " "); // TODO: format output into columns.
System.out.print(" " + s.val + " ");
System.out.print(s.val.fLeftChild.fText);
System.out.print("\n");
}
System.out.println("\nParsed Variable Definitions\n");
for (int i = 0; i < syms.length; i++) {
RBBISymbolTableEntry s = syms[i];
System.out.print(s.key);
s.val.fLeftChild.printTree(true);
System.out.print("\n");
}
}
///CLOVER:ON
}
| |
package com.gdm.awsv2;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import software.amazon.awssdk.auth.credentials.AwsCredentials;
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
import software.amazon.awssdk.services.ec2.Ec2Client;
import software.amazon.awssdk.services.ec2.model.CreateCustomerGatewayRequest;
import software.amazon.awssdk.services.ec2.model.CreateCustomerGatewayResponse;
import software.amazon.awssdk.services.ec2.model.CreateTagsRequest;
import software.amazon.awssdk.services.ec2.model.CreateTagsResponse;
import software.amazon.awssdk.services.ec2.model.CreateVpnConnectionRequest;
import software.amazon.awssdk.services.ec2.model.CreateVpnConnectionResponse;
import software.amazon.awssdk.services.ec2.model.DeleteCustomerGatewayRequest;
import software.amazon.awssdk.services.ec2.model.DeleteVpnConnectionRequest;
import software.amazon.awssdk.services.ec2.model.DescribeInstancesRequest;
import software.amazon.awssdk.services.ec2.model.DescribeInstancesResponse;
import software.amazon.awssdk.services.ec2.model.DescribeTransitGatewaysRequest;
import software.amazon.awssdk.services.ec2.model.DescribeTransitGatewaysResponse;
import software.amazon.awssdk.services.ec2.model.DescribeVpnConnectionsRequest;
import software.amazon.awssdk.services.ec2.model.DescribeVpnConnectionsResponse;
import software.amazon.awssdk.services.ec2.model.Filter;
import software.amazon.awssdk.services.ec2.model.GatewayType;
import software.amazon.awssdk.services.ec2.model.Instance;
import software.amazon.awssdk.services.ec2.model.Reservation;
//import software.amazon.awssdk.services.networkmanager.model.Tag;
import software.amazon.awssdk.services.ec2.model.Tag;
import software.amazon.awssdk.services.ec2.model.TransitGateway;
import software.amazon.awssdk.services.ec2.model.VpnConnection;
import software.amazon.awssdk.services.ec2.model.VpnConnectionOptionsSpecification;
import software.amazon.awssdk.services.ec2.model.VpnTunnelOptionsSpecification;
import software.amazon.awssdk.services.iam.IamClient;
import software.amazon.awssdk.services.networkmanager.NetworkManagerClient;
import software.amazon.awssdk.services.networkmanager.model.AssociateCustomerGatewayRequest;
import software.amazon.awssdk.services.networkmanager.model.AssociateCustomerGatewayResponse;
import software.amazon.awssdk.services.networkmanager.model.AssociateLinkRequest;
import software.amazon.awssdk.services.networkmanager.model.AssociateLinkResponse;
import software.amazon.awssdk.services.networkmanager.model.CreateDeviceRequest;
import software.amazon.awssdk.services.networkmanager.model.CreateDeviceResponse;
import software.amazon.awssdk.services.networkmanager.model.CreateLinkRequest;
import software.amazon.awssdk.services.networkmanager.model.CreateLinkResponse;
import software.amazon.awssdk.services.networkmanager.model.CreateSiteRequest;
import software.amazon.awssdk.services.networkmanager.model.CreateSiteResponse;
import software.amazon.awssdk.services.networkmanager.model.DeleteDeviceRequest;
import software.amazon.awssdk.services.networkmanager.model.DeleteLinkRequest;
import software.amazon.awssdk.services.networkmanager.model.DeleteSiteRequest;
import software.amazon.awssdk.services.networkmanager.model.DescribeGlobalNetworksRequest;
import software.amazon.awssdk.services.networkmanager.model.DescribeGlobalNetworksResponse;
import software.amazon.awssdk.services.networkmanager.model.DisassociateCustomerGatewayRequest;
import software.amazon.awssdk.services.networkmanager.model.DisassociateLinkRequest;
import software.amazon.awssdk.services.networkmanager.model.GetSitesRequest;
import software.amazon.awssdk.services.networkmanager.model.GetSitesResponse;
import software.amazon.awssdk.services.networkmanager.model.GlobalNetwork;
import software.amazon.awssdk.services.networkmanager.model.Site;
class AWSSiteConfig {
private String peerIp1;
private String peerIp2;
private String psk;
private String hubAddressSpace;
private String hubAddressSpace1;
private String hubAddressSpace2;
boolean isBgpEnabled;
int asNo;
private String bgpPeerIp1;
private String bgpPeerIp2;
public String getHubAddressSpace1() {
return hubAddressSpace1;
}
public void setHubAddressSpace1(String hubAddressSpace1) {
this.hubAddressSpace1 = hubAddressSpace1;
}
public String getHubAddressSpace2() {
return hubAddressSpace2;
}
public void setHubAddressSpace2(String hubAddressSpace2) {
this.hubAddressSpace2 = hubAddressSpace2;
}
public String getPeerIp1() {
return peerIp1;
}
public void setPeerIp1(String peerIp1) {
this.peerIp1 = peerIp1;
}
public String getPeerIp2() {
return peerIp2;
}
public void setPeerIp2(String peerIp2) {
this.peerIp2 = peerIp2;
}
public String getPsk() {
return psk;
}
public void setPsk(String psk) {
this.psk = psk;
}
public String getHubAddressSpace() {
return hubAddressSpace;
}
public void setHubAddressSpace(String hubAddressSpace) {
this.hubAddressSpace = hubAddressSpace;
}
public boolean isBgpEnabled() {
return isBgpEnabled;
}
public void setBgpEnabled(boolean isBgpEnabled) {
this.isBgpEnabled = isBgpEnabled;
}
public int getAsNo() {
return asNo;
}
public void setAsNo(int asNo) {
this.asNo = asNo;
}
public String getBgpPeerIp1() {
return bgpPeerIp1;
}
public void setBgpPeerIp1(String bgpPeerIp1) {
this.bgpPeerIp1 = bgpPeerIp1;
}
public String getBgpPeerIp2() {
return bgpPeerIp2;
}
public void setBgpPeerIp2(String bgpPeerIp2) {
this.bgpPeerIp2 = bgpPeerIp2;
}
}
class CustomCredProvider implements AwsCredentialsProvider {
@Override
public AwsCredentials resolveCredentials() {
return new AwsCredentials() {
@Override
public String secretAccessKey() {
return "";
}
@Override
public String accessKeyId() {
// TODO Auto-generated method stub
return "AKIAJUG6FWEQIPBXQ74Q";
}
};
}
}
public class NMDemo {
Ec2Client ec2 = null;
NetworkManagerClient nmc;
IamClient iam = null;
public static Logger logger = Logger.getLogger(NMDemo.class);
public static void main(String[] argas) throws IOException
{
NMDemo nm = new NMDemo();
nm.demo();
}
private void demo() {
/**
try {
parseVPNConfig("");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} **/
init();
//getAllInstances();
//getAllTransitGateways();
TGWDemo();
}
private void TGWDemo() {
// TODO Auto-generated method stub
getAllGlobalNetworks();
//createSiteAndLinks();
// Site ARN = arn:aws:networkmanager::542139586608:site/global-network-0041aa2612ed6b587/site-0702dc1f65f564820
// GlobaNW = arn:aws:networkmanager::542139586608:global-network/global-network-0041aa2612ed6b587
String siteArn = "arn:aws:networkmanager::542139586608:site/global-network-0041aa2612ed6b587/site-0702dc1f65f564820";
getIdFromArn(siteArn);
getGlobalNetwokrIdFromArn(siteArn);
}
private void getGlobalNetwokrIdFromArn(String siteArn) {
// TODO Auto-generated method stub
String globalNetworkId = siteArn.substring(siteArn.indexOf("global"), siteArn.lastIndexOf("/"));
logger.info("GlobalNetworkID = " + globalNetworkId);
}
private void createSiteAndLinks() {
String globalNetworkArn = "globalNetworkArn";
String globalNetworkId = "globalNetworkId";
String transitGatewayId = "transitGatwayId";
String applianceName = "VersaApp";
//Create Site
CreateSiteRequest createSiteRequest = CreateSiteRequest.builder()
.globalNetworkId(globalNetworkId).description("VersaApp").build();
CreateSiteResponse response = nmc.createSite(createSiteRequest);
String siteId = response.site().siteId();
//Create Link in Site
CreateLinkRequest createLinkRequest = CreateLinkRequest.builder()
.globalNetworkId(globalNetworkId)
.siteId(response.site().siteId())
.build(); // Add BW
CreateLinkResponse lResposne = nmc.createLink(createLinkRequest);
//Create Device and Associate to Site
CreateDeviceRequest createDeviceRequest = CreateDeviceRequest.builder()
.globalNetworkId(globalNetworkId)
.description(applianceName)
.model("Versa")
.siteId(response.site().siteId())
.build(); // Add Location
CreateDeviceResponse createDeviceResponse = nmc.createDevice(createDeviceRequest);
//And associate above device to link
AssociateLinkRequest associateLinkRequest = AssociateLinkRequest.builder()
.linkId(lResposne.link().linkId())
.deviceId(createDeviceResponse.device().deviceId())
.globalNetworkId(globalNetworkId)
.build();
AssociateLinkResponse associateLinkResponse = nmc.associateLink(associateLinkRequest);
String linkId = associateLinkResponse.linkAssociation().linkId();
//Creare CGW
String publicIp = "1.1.1.1";
CreateCustomerGatewayRequest createCustomerGatewayRequest = CreateCustomerGatewayRequest.builder()
.deviceName(applianceName)
.publicIp(publicIp)
.type(GatewayType.IPSEC_1)
.build();
CreateCustomerGatewayResponse createCustomerGatewayResponse = ec2.createCustomerGateway(createCustomerGatewayRequest);
String customerGatewayId = createCustomerGatewayResponse.customerGateway().customerGatewayId();
//Associate CGW with Device and Link
AssociateCustomerGatewayRequest associateCustomerGatewayRequest = AssociateCustomerGatewayRequest.builder()
.customerGatewayArn(createCustomerGatewayResponse.customerGateway().customerGatewayId()) // ARN needed but you have id
.globalNetworkId(createDeviceResponse.device().deviceId())
.deviceId(createDeviceResponse.device().deviceId())
.build();
AssociateCustomerGatewayResponse associateCustomerGatewayResponse = nmc.associateCustomerGateway(associateCustomerGatewayRequest);
String customerGatewayArn = associateCustomerGatewayResponse.customerGatewayAssociation().customerGatewayArn();
String deviceId = associateCustomerGatewayResponse.customerGatewayAssociation().deviceId();
//Create Vpnconnection ( basically attachment between CGW and TGW )
// First create tunnelspec on our CPE side
VpnTunnelOptionsSpecification t1 = VpnTunnelOptionsSpecification.builder()
.preSharedKey("ABC123")
.tunnelInsideCidr("2.2.2.2/24") // CPE Lan Address space
.build(); // No Public Ip
VpnConnectionOptionsSpecification tunnelSpec = VpnConnectionOptionsSpecification.builder()
.tunnelOptions(t1) // We can pass multiple tunnels also here
.build();
CreateVpnConnectionRequest createVpnConnectionRequest = CreateVpnConnectionRequest.builder()
.customerGatewayId(createCustomerGatewayResponse.customerGateway().customerGatewayId())
.transitGatewayId(transitGatewayId)
.options(tunnelSpec)
.build();
CreateVpnConnectionResponse createVpnConnectionResponse = ec2.createVpnConnection(createVpnConnectionRequest);
String vpnConnectionId = createVpnConnectionResponse.vpnConnection().vpnConnectionId();
// Add all these tags to VpnConnection object so that you can identify and delete all corresponding objects
Tag vpnConnectionNameTag = Tag.builder().key("Name").value(applianceName).build();
Tag vpnConnectionVersaIdTag = Tag.builder().key("VersaId").value(applianceName).build(); // For filtering purpose to delete
Tag globalNetworkArnTag = Tag.builder().key("globalNetworkArn").value(globalNetworkArn).build();
Tag globalNetworkIdTag = Tag.builder().key("globalNetworkId").value(globalNetworkId).build();
Tag customerGatewayArnTag = Tag.builder().key("customerGatewayArn").value(customerGatewayArn).build();
Tag customerGatewayIdTag = Tag.builder().key("customerGatewayId").value(customerGatewayId).build();
Tag deviceIdTag = Tag.builder().key("deviceId").value(deviceId).build();
Tag linkIdIdTag = Tag.builder().key("linkId").value(linkId).build();
Tag siteIdTag = Tag.builder().key("siteId").value(siteId).build();
Collection<Tag> tags = new ArrayList<Tag>();
tags.add(vpnConnectionNameTag);
tags.add(vpnConnectionVersaIdTag);
tags.add(globalNetworkArnTag);
tags.add(globalNetworkIdTag);
tags.add(customerGatewayArnTag);
tags.add(customerGatewayIdTag);
tags.add(deviceIdTag);
tags.add(linkIdIdTag);
tags.add(siteIdTag);
CreateTagsRequest createTagsRequest = CreateTagsRequest.builder().resources(vpnConnectionId).tags(tags).build();
CreateTagsResponse createTagsResponse = ec2.createTags(createTagsRequest);
/**
// Get TunnelConfig needed for bind data
List<TunnelOption> tunnelOptions = createVpnConnectionResponse.vpnConnection().options().tunnelOptions();
if ( tunnelOptions != null ) {
TunnelOption t = tunnelOptions.get(0);
t.preSharedKey(); //
t.outsideIpAddress(); // PublicIp
t.tunnelInsideCidr(); // Private CIDR
}
List<VpnStaticRoute> routes = createVpnConnectionResponse.vpnConnection().routes();
VpnStaticRoute r = routes.get(0);
r.destinationCidrBlock();
r.state(); */
}
private void deleteAll() {
//Filter by tag:Name, applianceName
Filter filter = Filter.builder()
.name("tag:VersaId")
.values("applianceName")
.build();
DescribeVpnConnectionsRequest describeVpnConnectionsRequest= DescribeVpnConnectionsRequest.builder().filters(filter).build();
DescribeVpnConnectionsResponse describeVpnConnectionsResponse = ec2.describeVpnConnections(describeVpnConnectionsRequest);
String vpnConnectionId = describeVpnConnectionsResponse.vpnConnections().get(0).vpnConnectionId();
List<Tag> tags = describeVpnConnectionsResponse.vpnConnections().get(0).tags();
Map<String,String> tagMap = new HashMap<String, String>();
for (Tag tag : tags) {
tagMap.put(tag.key(),tag.value());
}
//Deletes
DeleteVpnConnectionRequest deleteVpnConnectionRequest = DeleteVpnConnectionRequest.builder()
.vpnConnectionId(vpnConnectionId)
.build();
ec2.deleteVpnConnection(deleteVpnConnectionRequest);
DisassociateCustomerGatewayRequest disassociateCustomerGatewayRequest = DisassociateCustomerGatewayRequest.builder()
.globalNetworkId(tagMap.get("globalNetworkId")).customerGatewayArn(tagMap.get("customerGatewayArn")).build();
nmc.disassociateCustomerGateway(disassociateCustomerGatewayRequest);
DeleteCustomerGatewayRequest deleteCustomerGatewayRequest = DeleteCustomerGatewayRequest.builder().customerGatewayId(tagMap.get("customerGatewayId")).build();
ec2.deleteCustomerGateway(deleteCustomerGatewayRequest);
DisassociateLinkRequest disassociateLinkRequest =
DisassociateLinkRequest.builder().globalNetworkId(tagMap.get("globalNetworkId")).deviceId(tagMap.get("deviceId")).linkId(tagMap.get("linkId")).build();
nmc.disassociateLink(disassociateLinkRequest);
DeleteDeviceRequest deleteDeviceRequest = DeleteDeviceRequest.builder().globalNetworkId(tagMap.get("globalNetworkId")).deviceId(tagMap.get("deviceId")).build();
nmc.deleteDevice(deleteDeviceRequest);
DeleteLinkRequest deleteLinkRequest = DeleteLinkRequest.builder().globalNetworkId(tagMap.get("globalNetworkId")).linkId(tagMap.get("linkId")).build();
nmc.deleteLink(deleteLinkRequest);
DeleteSiteRequest deleteSiteRequest = DeleteSiteRequest.builder().globalNetworkId(tagMap.get("globalNetworkId")).siteId(tagMap.get("siteId")).build();
nmc.deleteSite(deleteSiteRequest);
}
private void getAllGlobalNetworks() {
DescribeGlobalNetworksRequest request = DescribeGlobalNetworksRequest.builder().build();
DescribeGlobalNetworksResponse response = nmc.describeGlobalNetworks(request);
for ( GlobalNetwork gbn: response.globalNetworks()) {
logger.info("GlobaNW = " + gbn.globalNetworkArn());
logger.info("GlobaNW ID = " + gbn.globalNetworkId());
}
DescribeVpnConnectionsRequest r1 = DescribeVpnConnectionsRequest.builder().build();
DescribeVpnConnectionsResponse resp = ec2.describeVpnConnections(r1);
List<VpnConnection> vpnCpnnections = resp.vpnConnections();
if ( vpnCpnnections != null ) {
for (VpnConnection vpnConnection : vpnCpnnections) {
logger.info("VPNConnection = " + vpnConnection.toString());
//logger.info("XML Config = " + vpnConnection.customerGatewayConfiguration());
List<software.amazon.awssdk.services.ec2.model.Tag> tags = vpnConnection.tags();
for (software.amazon.awssdk.services.ec2.model.Tag tag : tags) {
logger.info("TAG Key = " + tag.key());
logger.info("TAG Value = " + tag.value());
}
try {
parseVPNConfig(vpnConnection.customerGatewayConfiguration());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
GetSitesRequest getSitesRequest = GetSitesRequest.builder().
globalNetworkId("global-network-0041aa2612ed6b587")
.build();
GetSitesResponse getSitesResponse = nmc.getSites(getSitesRequest);
List<Site> sites = getSitesResponse.sites();
for (Site site : sites) {
logger.info("Site = " + site.toString());
logger.info("Site ARN = " + site.siteArn());
logger.info("GlobalNetworkID = " + site.globalNetworkId());
logger.info("Site ID = " + site.siteId());
}
}
private void getAllTransitGateways() {
String nextToken = null;
do {
DescribeTransitGatewaysRequest request = DescribeTransitGatewaysRequest.builder()
.maxResults(10).nextToken(nextToken).build();
DescribeTransitGatewaysResponse response = ec2.describeTransitGateways(request);
for (TransitGateway tgw : response.transitGateways()) {
logger.info("TGW Name = " + tgw.transitGatewayArn());
logger.info("TGW Id = " + tgw.transitGatewayId());
}
} while (nextToken != null);
}
private void getAllInstances() {
String nextToken = null;
do {
DescribeInstancesRequest request = DescribeInstancesRequest.builder().maxResults(6).nextToken(nextToken).build();
DescribeInstancesResponse response = ec2.describeInstances(request);
for (Reservation reservation : response.reservations()) {
for (Instance instance : reservation.instances()) {
logger.info("Tags : " + instance.tags());
logger.info("Instance ID: " + instance.instanceId());
logger.info("Image ID: " + instance.imageId());
logger.info("Instance State: " + instance.state().name());
logger.info("SubnetID: " + instance.subnetId());
logger.info("Private Address: " + instance.privateIpAddress());
}
}
nextToken = response.nextToken();
} while (nextToken != null);
}
private void init() {
/**
ec2 = new AmazonEC2Client();
usWest2 = Region.getRegion(Regions.US_WEST_2);
ec2.setRegion(usWest2); */
//ec2 = Ec2Client.builder().region(software.amazon.awssdk.regions.Region.US_WEST_2).build();
nmc = NetworkManagerClient.builder().credentialsProvider(new CustomCredProvider()).region(software.amazon.awssdk.regions.Region.US_WEST_2).build();
String accessKey;
String secret;
//ec2 with custom credentials
ec2 = Ec2Client.builder().credentialsProvider(new CustomCredProvider()).region(software.amazon.awssdk.regions.Region.US_WEST_2).build();
//ec2 with instance profile
//ec2 = Ec2Client.builder().credentialsProvider(InstanceProfileCredentialsProvider.builder().build()).build();
// iam with custom credentials
//iam = IamClient.builder().credentialsProvider(new CustomCredProvider()).region(Region.AWS_GLOBAL).build();
// iam with instance profile
//iam = IamClient.builder().credentialsProvider(InstanceProfileCredentialsProvider.builder().build()).build();
}
// Site ARN = arn:aws:networkmanager::542139586608:site/global-network-0041aa2612ed6b587/site-0702dc1f65f564820
// GlobaNW = arn:aws:networkmanager::542139586608:global-network/global-network-0041aa2612ed6b587
private String getIdFromArn(String arn) {
String id = arn.substring(arn.lastIndexOf("/")+1);
logger.info("ID = " + id);
return id;
}
AWSSiteConfig parseVPNConfig(String vpnConfig) throws Exception
{
AWSSiteConfig awsSiteConfig = new AWSSiteConfig();
String vpC = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><vpn_connection id=\"vpn-04960711fd320d118\">\n" +
" <customer_gateway_id>cgw-0f06432e265740b78</customer_gateway_id>\n" +
" <vpn_gateway_id></vpn_gateway_id>\n" +
" <vpn_connection_type>ipsec.1</vpn_connection_type>\n" +
" <ipsec_tunnel>\n" +
" <customer_gateway>\n" +
" <tunnel_outside_address>\n" +
" <ip_address>3.3.3.3</ip_address>\n" +
" </tunnel_outside_address>\n" +
" <tunnel_inside_address>\n" +
" <ip_address>169.254.225.178</ip_address>\n" +
" <network_mask>255.255.255.252</network_mask>\n" +
" <network_cidr>30</network_cidr>\n" +
" </tunnel_inside_address>\n" +
" <bgp>\n" +
" <asn>65000</asn>\n" +
" <hold_time>30</hold_time>\n" +
" </bgp>\n" +
" </customer_gateway>\n" +
" <vpn_gateway>\n" +
" <tunnel_outside_address>\n" +
" <ip_address>35.160.221.148</ip_address>\n" +
" </tunnel_outside_address>\n" +
" <tunnel_inside_address>\n" +
" <ip_address>169.254.225.177</ip_address>\n" +
" <network_mask>255.255.255.252</network_mask>\n" +
" <network_cidr>30</network_cidr>\n" +
" </tunnel_inside_address>\n" +
" <bgp>\n" +
" <asn>64512</asn>\n" +
" <hold_time>30</hold_time>\n" +
" </bgp>\n" +
" </vpn_gateway>\n" +
" <ike>\n" +
" <authentication_protocol>sha1</authentication_protocol>\n" +
" <encryption_protocol>aes-128-cbc</encryption_protocol>\n" +
" <lifetime>28800</lifetime>\n" +
" <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n" +
" <mode>main</mode>\n" +
" <pre_shared_key>9qgN9c2hIlDqvUsqIISTupsmYf.nmAGa</pre_shared_key>\n" +
" </ike>\n" +
" <ipsec>\n" +
" <protocol>esp</protocol>\n" +
" <authentication_protocol>hmac-sha1-96</authentication_protocol>\n" +
" <encryption_protocol>aes-128-cbc</encryption_protocol>\n" +
" <lifetime>3600</lifetime>\n" +
" <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n" +
" <mode>tunnel</mode>\n" +
" <clear_df_bit>true</clear_df_bit>\n" +
" <fragmentation_before_encryption>true</fragmentation_before_encryption>\n" +
" <tcp_mss_adjustment>1379</tcp_mss_adjustment>\n" +
" <dead_peer_detection>\n" +
" <interval>10</interval>\n" +
" <retries>3</retries>\n" +
" </dead_peer_detection>\n" +
" </ipsec>\n" +
" </ipsec_tunnel>\n" +
" <ipsec_tunnel>\n" +
" <customer_gateway>\n" +
" <tunnel_outside_address>\n" +
" <ip_address>3.3.3.3</ip_address>\n" +
" </tunnel_outside_address>\n" +
" <tunnel_inside_address>\n" +
" <ip_address>169.254.241.62</ip_address>\n" +
" <network_mask>255.255.255.252</network_mask>\n" +
" <network_cidr>30</network_cidr>\n" +
" </tunnel_inside_address>\n" +
" <bgp>\n" +
" <asn>65000</asn>\n" +
" <hold_time>30</hold_time>\n" +
" </bgp>\n" +
" </customer_gateway>\n" +
" <vpn_gateway>\n" +
" <tunnel_outside_address>\n" +
" <ip_address>54.203.121.61</ip_address>\n" +
" </tunnel_outside_address>\n" +
" <tunnel_inside_address>\n" +
" <ip_address>169.254.241.61</ip_address>\n" +
" <network_mask>255.255.255.252</network_mask>\n" +
" <network_cidr>30</network_cidr>\n" +
" </tunnel_inside_address>\n" +
/**
" <bgp>\n" +
" <asn>64512</asn>\n" +
" <hold_time>30</hold_time>\n" +
" </bgp>\n" +
*/
" </vpn_gateway>\n" +
" <ike>\n" +
" <authentication_protocol>sha1</authentication_protocol>\n" +
" <encryption_protocol>aes-128-cbc</encryption_protocol>\n" +
" <lifetime>28800</lifetime>\n" +
" <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n" +
" <mode>main</mode>\n" +
" <pre_shared_key>7KIu40TClmqbkrfChyok.9wCVCl4Ts2K</pre_shared_key>\n" +
" </ike>\n" +
" <ipsec>\n" +
" <protocol>esp</protocol>\n" +
" <authentication_protocol>hmac-sha1-96</authentication_protocol>\n" +
" <encryption_protocol>aes-128-cbc</encryption_protocol>\n" +
" <lifetime>3600</lifetime>\n" +
" <perfect_forward_secrecy>group2</perfect_forward_secrecy>\n" +
" <mode>tunnel</mode>\n" +
" <clear_df_bit>true</clear_df_bit>\n" +
" <fragmentation_before_encryption>true</fragmentation_before_encryption>\n" +
" <tcp_mss_adjustment>1379</tcp_mss_adjustment>\n" +
" <dead_peer_detection>\n" +
" <interval>10</interval>\n" +
" <retries>3</retries>\n" +
" </dead_peer_detection>\n" +
" </ipsec>\n" +
" </ipsec_tunnel>\n" +
"</vpn_connection>\n" +
" \n" +
" \n" +
" \n" +
"";
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(new InputSource(new StringReader(vpC)));
NodeList vpnConnections = doc.getElementsByTagName("vpn_gateway");
for(int i=0; i<vpnConnections.getLength(); ++i) {
((Element)vpnConnections.item(i)).getElementsByTagName("tunnel_outside_address");
Node n = ((Element)vpnConnections.item(i)).getElementsByTagName("tunnel_outside_address").item(0);
String publicIP = ((Element)n).getElementsByTagName("ip_address").item(0).getTextContent().trim();
logger.info("PublicIP = " + publicIP);
String bgpPeerIP = null;
String cidr = null;
Node n2 = ((Element)vpnConnections.item(i)).getElementsByTagName("tunnel_inside_address").item(0);
if ( n2 != null ) {
bgpPeerIP = ((Element)n2).getElementsByTagName("ip_address").item(0).getTextContent().trim();
cidr = ((Element)n2).getElementsByTagName("network_cidr").item(0).getTextContent().trim();
logger.info("BGPPeerIP = " + bgpPeerIP);
}
Node n3 = ((Element)vpnConnections.item(i)).getElementsByTagName("bgp").item(0);
String asNo = null;
if ( n3 != null ) {
asNo = ((Element)n3).getElementsByTagName("asn").item(0).getTextContent().trim();
logger.info("asNo = " + asNo);
}
if ( i == 0 ) {
awsSiteConfig.setPeerIp1(publicIP);
awsSiteConfig.setHubAddressSpace1(bgpPeerIP+"/"+cidr);
if ( asNo == null ) awsSiteConfig.setBgpEnabled(false);
else {
awsSiteConfig.setBgpPeerIp1(bgpPeerIP);
awsSiteConfig.setBgpEnabled(true);
awsSiteConfig.setAsNo(Integer.parseInt(asNo));
}
} else {
awsSiteConfig.setPeerIp2(publicIP);
awsSiteConfig.setHubAddressSpace2(bgpPeerIP+"/"+cidr);
if ( asNo == null ) awsSiteConfig.setBgpEnabled(false);
else {
awsSiteConfig.setBgpPeerIp2(bgpPeerIP);
awsSiteConfig.setBgpEnabled(true);
awsSiteConfig.setAsNo(Integer.parseInt(asNo));
}
}
}
NodeList ikes = doc.getElementsByTagName("ike");
for(int i=0; i<ikes.getLength(); ++i) {
String preSharedKey = ((Element)ikes.item(i)).getElementsByTagName("pre_shared_key").item(0).getTextContent().trim();
logger.info("PSK = " + preSharedKey);
awsSiteConfig.setPsk(preSharedKey);
}
return awsSiteConfig;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.bigquery;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import com.google.api.services.bigquery.model.Clustering;
import com.google.api.services.bigquery.model.EncryptionConfiguration;
import com.google.api.services.bigquery.model.JobConfigurationLoad;
import com.google.api.services.bigquery.model.JobReference;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.api.services.bigquery.model.TimePartitioning;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.io.FileSystems;
import org.apache.beam.sdk.io.fs.ResourceId;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.PendingJob;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.PendingJobManager;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.SchemaUpdateOption;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.JobService;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Values;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.transforms.windowing.AfterPane;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
import org.apache.beam.sdk.transforms.windowing.Repeatedly;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.ShardedKey;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.sdk.values.TupleTagList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Writes partitions to BigQuery tables.
*
* <p>The input is a list of files corresponding to each partition of a table. These files are
* loaded into a temporary table (or into the final table if there is only one partition). The
* output is a {@link KV} mapping each final table to a list of the temporary tables containing its
* data.
*
* <p>In the case where all the data in the files fit into a single load job, this transform loads
* the data directly into the final table, skipping temporary tables. In this case, the output
* {@link KV} maps the final table to itself.
*/
class WriteTables<DestinationT>
extends PTransform<
PCollection<KV<ShardedKey<DestinationT>, List<String>>>,
PCollection<KV<TableDestination, String>>> {
private static final Logger LOG = LoggerFactory.getLogger(WriteTables.class);
private final boolean tempTable;
private final BigQueryServices bqServices;
private final PCollectionView<String> loadJobIdPrefixView;
private final WriteDisposition firstPaneWriteDisposition;
private final CreateDisposition firstPaneCreateDisposition;
private final Set<SchemaUpdateOption> schemaUpdateOptions;
private final DynamicDestinations<?, DestinationT> dynamicDestinations;
private final List<PCollectionView<?>> sideInputs;
private final TupleTag<KV<TableDestination, String>> mainOutputTag;
private final TupleTag<String> temporaryFilesTag;
private final ValueProvider<String> loadJobProjectId;
private final int maxRetryJobs;
private final boolean ignoreUnknownValues;
@Nullable private final String kmsKey;
private final String sourceFormat;
private class WriteTablesDoFn
extends DoFn<KV<ShardedKey<DestinationT>, List<String>>, KV<TableDestination, String>> {
private Map<DestinationT, String> jsonSchemas = Maps.newHashMap();
// Represents a pending BigQuery load job.
private class PendingJobData {
final BoundedWindow window;
final BigQueryHelpers.PendingJob retryJob;
final List<String> partitionFiles;
final TableDestination tableDestination;
final TableReference tableReference;
public PendingJobData(
BoundedWindow window,
BigQueryHelpers.PendingJob retryJob,
List<String> partitionFiles,
TableDestination tableDestination,
TableReference tableReference) {
this.window = window;
this.retryJob = retryJob;
this.partitionFiles = partitionFiles;
this.tableDestination = tableDestination;
this.tableReference = tableReference;
}
}
// All pending load jobs.
private List<PendingJobData> pendingJobs = Lists.newArrayList();
@StartBundle
public void startBundle(StartBundleContext c) {
// Clear the map on each bundle so we can notice side-input updates.
// (alternative is to use a cache with a TTL).
jsonSchemas.clear();
pendingJobs.clear();
}
@ProcessElement
public void processElement(ProcessContext c, BoundedWindow window) throws Exception {
dynamicDestinations.setSideInputAccessorFromProcessContext(c);
DestinationT destination = c.element().getKey().getKey();
TableSchema tableSchema;
if (firstPaneCreateDisposition == CreateDisposition.CREATE_NEVER) {
tableSchema = null;
} else if (jsonSchemas.containsKey(destination)) {
tableSchema =
BigQueryHelpers.fromJsonString(jsonSchemas.get(destination), TableSchema.class);
} else {
tableSchema = dynamicDestinations.getSchema(destination);
checkArgument(
tableSchema != null,
"Unless create disposition is %s, a schema must be specified, i.e. "
+ "DynamicDestinations.getSchema() may not return null. "
+ "However, create disposition is %s, and %s returned null for destination %s",
CreateDisposition.CREATE_NEVER,
firstPaneCreateDisposition,
dynamicDestinations,
destination);
jsonSchemas.put(destination, BigQueryHelpers.toJsonString(tableSchema));
}
TableDestination tableDestination = dynamicDestinations.getTable(destination);
checkArgument(
tableDestination != null,
"DynamicDestinations.getTable() may not return null, "
+ "but %s returned null for destination %s",
dynamicDestinations,
destination);
boolean destinationCoderSupportsClustering =
!(dynamicDestinations.getDestinationCoder() instanceof TableDestinationCoderV2);
checkArgument(
tableDestination.getClustering() == null || destinationCoderSupportsClustering,
"DynamicDestinations.getTable() may only return destinations with clustering configured"
+ " if a destination coder is supplied that supports clustering, but %s is configured"
+ " to use TableDestinationCoderV2. Set withClustering() on BigQueryIO.write() and, "
+ " if you provided a custom DynamicDestinations instance, override"
+ " getDestinationCoder() to return TableDestinationCoderV3.",
dynamicDestinations);
TableReference tableReference = tableDestination.getTableReference();
if (Strings.isNullOrEmpty(tableReference.getProjectId())) {
tableReference.setProjectId(c.getPipelineOptions().as(BigQueryOptions.class).getProject());
tableDestination = tableDestination.withTableReference(tableReference);
}
Integer partition = c.element().getKey().getShardNumber();
List<String> partitionFiles = Lists.newArrayList(c.element().getValue());
String jobIdPrefix =
BigQueryHelpers.createJobId(
c.sideInput(loadJobIdPrefixView), tableDestination, partition, c.pane().getIndex());
if (tempTable) {
// This is a temp table. Create a new one for each partition and each pane.
tableReference.setTableId(jobIdPrefix);
}
WriteDisposition writeDisposition = firstPaneWriteDisposition;
CreateDisposition createDisposition = firstPaneCreateDisposition;
if (c.pane().getIndex() > 0 && !tempTable) {
// If writing directly to the destination, then the table is created on the first write
// and we should change the disposition for subsequent writes.
writeDisposition = WriteDisposition.WRITE_APPEND;
createDisposition = CreateDisposition.CREATE_NEVER;
} else if (tempTable) {
// In this case, we are writing to a temp table and always need to create it.
// WRITE_TRUNCATE is set so that we properly handle retries of this pane.
writeDisposition = WriteDisposition.WRITE_TRUNCATE;
createDisposition = CreateDisposition.CREATE_IF_NEEDED;
}
BigQueryHelpers.PendingJob retryJob =
startLoad(
bqServices.getJobService(c.getPipelineOptions().as(BigQueryOptions.class)),
bqServices.getDatasetService(c.getPipelineOptions().as(BigQueryOptions.class)),
jobIdPrefix,
tableReference,
tableDestination.getTimePartitioning(),
tableDestination.getClustering(),
tableSchema,
partitionFiles,
writeDisposition,
createDisposition,
schemaUpdateOptions);
pendingJobs.add(
new PendingJobData(window, retryJob, partitionFiles, tableDestination, tableReference));
}
@FinishBundle
public void finishBundle(FinishBundleContext c) throws Exception {
DatasetService datasetService =
bqServices.getDatasetService(c.getPipelineOptions().as(BigQueryOptions.class));
PendingJobManager jobManager = new PendingJobManager();
for (PendingJobData pendingJob : pendingJobs) {
jobManager =
jobManager.addPendingJob(
pendingJob.retryJob,
// Lambda called when the job is done.
j -> {
try {
if (pendingJob.tableDestination.getTableDescription() != null) {
TableReference ref = pendingJob.tableReference;
datasetService.patchTableDescription(
ref.clone()
.setTableId(
BigQueryHelpers.stripPartitionDecorator(ref.getTableId())),
pendingJob.tableDestination.getTableDescription());
}
c.output(
mainOutputTag,
KV.of(
pendingJob.tableDestination,
BigQueryHelpers.toJsonString(pendingJob.tableReference)),
pendingJob.window.maxTimestamp(),
pendingJob.window);
for (String file : pendingJob.partitionFiles) {
c.output(
temporaryFilesTag,
file,
pendingJob.window.maxTimestamp(),
pendingJob.window);
}
return null;
} catch (IOException | InterruptedException e) {
return e;
}
});
}
jobManager.waitForDone();
}
}
private static class GarbageCollectTemporaryFiles extends DoFn<Iterable<String>, Void> {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
removeTemporaryFiles(c.element());
}
}
public WriteTables(
boolean tempTable,
BigQueryServices bqServices,
PCollectionView<String> loadJobIdPrefixView,
WriteDisposition writeDisposition,
CreateDisposition createDisposition,
List<PCollectionView<?>> sideInputs,
DynamicDestinations<?, DestinationT> dynamicDestinations,
@Nullable ValueProvider<String> loadJobProjectId,
int maxRetryJobs,
boolean ignoreUnknownValues,
String kmsKey,
String sourceFormat,
Set<SchemaUpdateOption> schemaUpdateOptions) {
this.tempTable = tempTable;
this.bqServices = bqServices;
this.loadJobIdPrefixView = loadJobIdPrefixView;
this.firstPaneWriteDisposition = writeDisposition;
this.firstPaneCreateDisposition = createDisposition;
this.sideInputs = sideInputs;
this.dynamicDestinations = dynamicDestinations;
this.mainOutputTag = new TupleTag<>("WriteTablesMainOutput");
this.temporaryFilesTag = new TupleTag<>("TemporaryFiles");
this.loadJobProjectId = loadJobProjectId;
this.maxRetryJobs = maxRetryJobs;
this.ignoreUnknownValues = ignoreUnknownValues;
this.kmsKey = kmsKey;
this.sourceFormat = sourceFormat;
this.schemaUpdateOptions = schemaUpdateOptions;
}
@Override
public PCollection<KV<TableDestination, String>> expand(
PCollection<KV<ShardedKey<DestinationT>, List<String>>> input) {
PCollectionTuple writeTablesOutputs =
input.apply(
ParDo.of(new WriteTablesDoFn())
.withSideInputs(sideInputs)
.withOutputTags(mainOutputTag, TupleTagList.of(temporaryFilesTag)));
// Garbage collect temporary files.
// We mustn't start garbage collecting files until we are assured that the WriteTablesDoFn has
// succeeded in loading those files and won't be retried. Otherwise, we might fail part of the
// way through deleting temporary files, and retry WriteTablesDoFn. This will then fail due
// to missing files, causing either the entire workflow to fail or get stuck (depending on how
// the runner handles persistent failures).
writeTablesOutputs
.get(temporaryFilesTag)
.setCoder(StringUtf8Coder.of())
.apply(WithKeys.of((Void) null))
.setCoder(KvCoder.of(VoidCoder.of(), StringUtf8Coder.of()))
.apply(
Window.<KV<Void, String>>into(new GlobalWindows())
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))
.discardingFiredPanes())
.apply(GroupByKey.create())
.apply(Values.create())
.apply(ParDo.of(new GarbageCollectTemporaryFiles()));
return writeTablesOutputs.get(mainOutputTag);
}
private PendingJob startLoad(
JobService jobService,
DatasetService datasetService,
String jobIdPrefix,
TableReference ref,
TimePartitioning timePartitioning,
Clustering clustering,
@Nullable TableSchema schema,
List<String> gcsUris,
WriteDisposition writeDisposition,
CreateDisposition createDisposition,
Set<SchemaUpdateOption> schemaUpdateOptions) {
JobConfigurationLoad loadConfig =
new JobConfigurationLoad()
.setDestinationTable(ref)
.setSchema(schema)
.setSourceUris(gcsUris)
.setWriteDisposition(writeDisposition.name())
.setCreateDisposition(createDisposition.name())
.setSourceFormat(sourceFormat)
.setIgnoreUnknownValues(ignoreUnknownValues);
if (schemaUpdateOptions != null) {
List<String> options =
schemaUpdateOptions.stream().map(Enum::name).collect(Collectors.toList());
loadConfig.setSchemaUpdateOptions(options);
}
if (timePartitioning != null) {
loadConfig.setTimePartitioning(timePartitioning);
// only set clustering if timePartitioning is set
if (clustering != null) {
loadConfig.setClustering(clustering);
}
}
if (kmsKey != null) {
loadConfig.setDestinationEncryptionConfiguration(
new EncryptionConfiguration().setKmsKeyName(kmsKey));
}
String projectId = loadJobProjectId == null ? ref.getProjectId() : loadJobProjectId.get();
String bqLocation =
BigQueryHelpers.getDatasetLocation(datasetService, ref.getProjectId(), ref.getDatasetId());
PendingJob retryJob =
new PendingJob(
// Function to load the data.
jobId -> {
JobReference jobRef =
new JobReference()
.setProjectId(projectId)
.setJobId(jobId.getJobId())
.setLocation(bqLocation);
LOG.info(
"Loading {} files into {} using job {}, job id iteration {}",
gcsUris.size(),
ref,
jobRef,
jobId.getRetryIndex());
try {
jobService.startLoadJob(jobRef, loadConfig);
} catch (IOException | InterruptedException e) {
LOG.warn("Load job {} failed with {}", jobRef, e.toString());
throw new RuntimeException(e);
}
return null;
},
// Function to poll the result of a load job.
jobId -> {
JobReference jobRef =
new JobReference()
.setProjectId(projectId)
.setJobId(jobId.getJobId())
.setLocation(bqLocation);
try {
return jobService.pollJob(jobRef, BatchLoads.LOAD_JOB_POLL_MAX_RETRIES);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
},
// Function to lookup a job.
jobId -> {
JobReference jobRef =
new JobReference()
.setProjectId(projectId)
.setJobId(jobId.getJobId())
.setLocation(bqLocation);
try {
return jobService.getJob(jobRef);
} catch (InterruptedException | IOException e) {
throw new RuntimeException(e);
}
},
maxRetryJobs,
jobIdPrefix);
return retryJob;
}
static void removeTemporaryFiles(Iterable<String> files) throws IOException {
ImmutableList.Builder<ResourceId> fileResources = ImmutableList.builder();
for (String file : files) {
fileResources.add(FileSystems.matchNewResource(file, false /* isDirectory */));
}
FileSystems.delete(fileResources.build());
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
// Portions copyright Hiroshi Ito. Licensed under Apache 2.0 license
package com.gs.fw.common.mithra.test.multivm;
import com.gs.fw.common.mithra.test.util.tinyproxy.FastServletProxyFactory;
import com.gs.fw.common.mithra.test.util.tinyproxy.PspServlet;
import com.gs.fw.common.mithra.test.util.tinyproxy.ThankYouWriter;
import java.io.*;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Properties;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.ServletHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SlaveVm
{
private static Logger logger = LoggerFactory.getLogger(SlaveVm.class.getName());
protected static final String JAVA_HOME = System.getProperty("java.home");
protected static final String SYSTEM_CLASSPATH = System.getProperty("java.class.path");
protected static final String BOOT_CLASSPATH = System.getProperty("sun.boot.class.path");
protected static final String START_DIRECTORY = System.getProperty("user.dir");
private static final String LOG4J_CONFIG = "log4j.configuration";
public static final int PING_INTERVAL_MS = 10000;
public static final int HEARTBEAT_INTERVAL_MS = 10;
private int port;
private int appPort1;
private int appPort2;
private StreamFlusher outFlusher;
private StreamFlusher errFlusher;
private OutputStream slaveVmInput;
private SlaveVmPinger slavePinger;
private RemoteSlaveVm remoteSlaveVm;
private Properties properties = new Properties();
private Process otherVm;
private static final String JACOCO_ARGS = "jacoco.args";
public static final String DERBY_SERVER_DIRECTORY = "derby.system.home";
public static final String DERBY_TMP_DIRECTORY = "derby.storage.tempDirectory";
private static final String ALL_GOOD = "allgood";
private static final String ALL_BAD = "allbad";
public static Logger getLogger()
{
return logger;
}
public SlaveVm()
{
this.port = (int)(Math.random()*20000+10000);
String log4jConfigValue = System.getProperty(LOG4J_CONFIG);
if (log4jConfigValue == null)
{
logger.error(LOG4J_CONFIG+" was not set!");
}
else
{
properties.put(LOG4J_CONFIG, log4jConfigValue);
}
appendSlaveToDirectory(DERBY_SERVER_DIRECTORY);
appendSlaveToDirectory(DERBY_TMP_DIRECTORY);
for(Iterator it = System.getProperties().keySet().iterator(); it.hasNext();)
{
String key = (String) it.next();
if (!key.startsWith("java") && !key.startsWith("user") && !key.startsWith("sun") && properties.get(key) == null)
{
properties.put(key, System.getProperty(key));
}
}
}
public void setApplicationPort1(int appPort1)
{
this.appPort1 = appPort1;
}
public void setApplicationPort2(int appPort2)
{
this.appPort2 = appPort2;
}
private void appendSlaveToDirectory(String key)
{
String derbyTmpDirectory = System.getProperty(key);
if (derbyTmpDirectory != null)
{
derbyTmpDirectory += "slave";
File derbyDir = new File(derbyTmpDirectory);
if (!derbyDir.exists())
{
derbyDir.mkdirs();
}
properties.put(key, derbyTmpDirectory);
}
}
public RemoteSlaveVm getRemoteSlaveVm()
{
return remoteSlaveVm;
}
public void startSlaveVm(Class testClass)
{
ArrayList cmdList = new ArrayList();
cmdList.add(JAVA_HOME+getJavaBinary());
if(logger.isDebugEnabled())
{
cmdList.add("-Xdebug");
cmdList.add("-Xnoagent");
cmdList.add("-Djava.compiler=NONE");
cmdList.add("-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=15005");
}
String jacocoArgs = System.getProperty(JACOCO_ARGS);
if (jacocoArgs != null)
{
cmdList.add(jacocoArgs);
}
cmdList.add("-ea");
cmdList.add("-classpath");
cmdList.add(SYSTEM_CLASSPATH);
cmdList.add("-server");
cmdList.add("-XX:MaxPermSize=256m");
cmdList.add("-Duser.timezone=America/New_York");
addProperties(cmdList);
cmdList.add(this.getClass().getName());
cmdList.add(""+port);
cmdList.add(testClass.getName());
cmdList.add(""+appPort1);
cmdList.add(""+appPort2);
String[] extraArgs = this.getExtraArgs();
if (extraArgs != null)
{
for(String arg: extraArgs)
{
cmdList.add(arg);
}
}
String[] cmdAndArgs = new String[cmdList.size()];
cmdList.toArray(cmdAndArgs);
try
{
String cmdString = "executing slave vm with";
for(int i=0;i<cmdAndArgs.length;i++)
{
cmdString += " "+cmdAndArgs[i];
}
getLogger().debug(cmdString);
otherVm = Runtime.getRuntime().exec(cmdAndArgs, null, new File(START_DIRECTORY));
// this.errFlusher = new StreamFlusher(otherVm.getErrorStream(), System.err, false);
File errFile = File.createTempFile("slavevmslave", ".errlog");
this.errFlusher = new StreamFlusher(otherVm.getErrorStream(), new FileOutputStream(errFile), false);
this.errFlusher.start();
waitForAllGood(otherVm.getInputStream(), errFile.getAbsolutePath(), cmdString);
this.outFlusher = new StreamFlusher(otherVm.getInputStream(), System.out, false);
this.outFlusher.start();
this.slaveVmInput = otherVm.getOutputStream();
FastServletProxyFactory factory = new FastServletProxyFactory();
remoteSlaveVm = factory.create(RemoteSlaveVm.class, "http://localhost:"+this.port+"/PspServlet");
this.slavePinger = new SlaveVmPinger(remoteSlaveVm);
slavePinger.start();
}
catch (Throwable e)
{
getLogger().error("could not start other vm", e);
throwFatal("could not start other vm", e);
}
}
public void throwFatal(String message, Throwable e)
{
try
{
File errFile = File.createTempFile("slavevmmaster", ".errlog");
FileOutputStream out = new FileOutputStream(errFile);
PrintWriter writer = new PrintWriter(out);
writer.write(message);
while(e != null)
{
writeError(e, writer);
e = e.getCause();
}
out.close();
}
catch (IOException e1)
{
//ignore
}
throw new RuntimeException(message, e);
}
public void writeError(Throwable e, PrintWriter writer)
{
writer.write(e.getClass().getName()+": "+e.getMessage());
e.printStackTrace(writer);
}
private static void throwFatal(String message)
{
try
{
File errFile = File.createTempFile("slavevmmaster", ".errlog");
FileOutputStream out = new FileOutputStream(errFile);
PrintWriter writer = new PrintWriter(out);
writer.write(message);
out.close();
}
catch (IOException e1)
{
//ignore
}
throw new RuntimeException(message);
}
protected String[] getExtraArgs()
{
return null;
}
private void waitForAllGood(InputStream inputStream, String errFile, String cmdString)
{
StringBuffer output = new StringBuffer();
StreamSearcher goodSearcher = new StreamSearcher(ALL_GOOD);
StreamSearcher badSearcher = new StreamSearcher(ALL_BAD);
while(true)
{
try
{
int read = inputStream.read();
if (read < 0)
{
throwFatal("Slave VM did not start properly and closed its stream. The stream had: '"
+ output.toString() + "' in it before closing. Also see error file: " + errFile + " started with command '" + cmdString + "'");
}
System.out.print((char) read);
output.append((char) read);
goodSearcher.consume(read);
badSearcher.consume(read);
if (goodSearcher.isFound())
{
return;
}
if (badSearcher.isFound())
{
throwFatal("Slave VM had trouble starting");
}
}
catch (IOException e)
{
getLogger().error("could not write output", e);
throwFatal("Slave VM did not start properly and had an IO exception", e);
}
}
}
private void addProperties(ArrayList cmdList)
{
Enumeration propertyNames = this.properties.propertyNames();
while(propertyNames.hasMoreElements())
{
String name = (String) propertyNames.nextElement();
cmdList.add("-D"+name+"="+this.properties.get(name));
}
}
protected void shutdownSlaveVm()
{
if (this.outFlusher != null)
{
this.outFlusher.setDone(true);
}
if (this.errFlusher != null)
{
this.errFlusher.setDone(true);
}
if (this.slavePinger != null)
{
this.slavePinger.setDone(true);
}
try
{
if (this.slaveVmInput != null)
{
this.slaveVmInput.write('\n');
this.slaveVmInput.flush();
}
}
catch (IOException e)
{
getLogger().error("could not communicate with Slave VM");
}
ThankYouWriter.getInstance().stopThankYouThread();
if (otherVm != null)
{
InterrupterThread interrupter = new InterrupterThread(Thread.currentThread());
interrupter.start();
try
{
otherVm.waitFor();
interrupter.setDone(true);
}
catch (InterruptedException e)
{
getLogger().warn("Slave VM took too long to quit");
Thread.currentThread().interrupted();
}
otherVm.destroy();
}
ThankYouWriter.getInstance().stopThankYouThread();
ThankYouWriter.getInstance().stopThankYouThread();
}
public String getJavaBinary()
{
if (System.getProperty("os.name").toUpperCase().contains("WINDOWS"))
{
return "/bin/java.exe";
}
return "/bin/java";
}
public static void main(String args[])
{
runSlaveVm(args, true);
}
protected static void runSlaveVm(String[] args, boolean runSlaveVmStartup)
{
Server server = new Server(Integer.parseInt(args[0]));
Context context = new Context (server,"/",Context.SESSIONS);
ServletHolder holder = context.addServlet(PspServlet.class, "/PspServlet");
holder.setInitParameter("serviceInterface.RemoteSlaveVm", "com.gs.fw.common.mithra.test.multivm.RemoteSlaveVm");
holder.setInitParameter("serviceClass.RemoteSlaveVm", "com.gs.fw.common.mithra.test.multivm.RemoteSlaveVmImpl");
holder.setInitOrder(10);
try
{
server.start();
holder.getServlet();
Class testClass = Class.forName(args[1]);
MultiVmTest testCase = (MultiVmTest) testClass.newInstance();
testCase.setApplicationPorts(Integer.parseInt(args[2]), Integer.parseInt(args[3]));
RemoteSlaveVmImpl.setTestCase(testCase);
if (runSlaveVmStartup) testCase.slaveVmOnStartup();
System.out.println(ALL_GOOD);
while(true)
{
if (System.in.available() > 0)
{
break;
}
Thread.sleep(100);
}
if (System.in.read() == -1)
{
System.out.println("SlaveVM System.in shutdown???!!!????");
System.out.flush();
Thread.sleep(1000);
throw new IOException("EOF");
}
System.out.println("SlaveVm Exiting");
RemoteSlaveVmImpl.exitSlaveVm();
System.out.flush();
System.exit(0);
}
catch (Exception e)
{
System.out.println(ALL_BAD);
System.out.println("Could not start slave vm "+e.getClass().getName()+": "+e.getMessage());
e.printStackTrace();
System.exit(-1);
}
try
{
Thread.sleep(60000); // we sleep here to give the master vm time to ping us.
}
catch (InterruptedException e)
{
// ignore
}
}
private static class StreamFlusher extends Thread
{
private InputStream in;
private OutputStream out;
private boolean done = false;
private byte[] buf = new byte[1024];
private boolean closeOnEnd;
private StreamFlusher(InputStream in, OutputStream out, boolean closeOnEnd)
{
this.in = in;
this.out = out;
this.setDaemon(true);
this.closeOnEnd = closeOnEnd;
}
public void setDone(boolean done)
{
this.done = done;
}
public void run()
{
while(!done)
{
try
{
sleep(200);
int available = this.in.available();
while (available > 0)
{
int read = this.in.read(buf, 0, Math.min(buf.length, available));
if (read >= 0)
{
this.out.write(buf, 0, read);
available = this.in.available();
}
else
{
this.out.flush();
if (closeOnEnd)
{
this.out.close();
}
break;
}
}
}
catch (InterruptedException e)
{
// nothing to do
}
catch (IOException e)
{
getLogger().error("could not write output", e);
}
}
}
}
private static class SlaveVmPinger extends Thread
{
private RemoteSlaveVm remoteSlaveVm;
private boolean done = false;
private SlaveVmPinger(RemoteSlaveVm remoteSlaveVm)
{
this.remoteSlaveVm = remoteSlaveVm;
this.setDaemon(true);
}
public void setDone(boolean done)
{
this.done = done;
}
public void run()
{
while(!done)
{
try
{
sleep(PING_INTERVAL_MS);
if (!done) remoteSlaveVm.ping();
}
catch (InterruptedException e)
{
// ignore
}
catch(Throwable t)
{
getLogger().error("could not ping remote vm", t);
}
}
}
}
private static class InterrupterThread extends Thread
{
private Thread threadToInterrupt;
private boolean done = false;
private InterrupterThread(Thread threadToInterrupt)
{
this.threadToInterrupt = threadToInterrupt;
}
public synchronized void setDone(boolean done)
{
this.done = done;
this.notify();
}
public void run()
{
long startTime = System.currentTimeMillis();
while(!done)
{
synchronized(this)
{
try
{
this.wait(100);
}
catch (InterruptedException e)
{
// nothing to do
}
}
if (System.currentTimeMillis() - startTime >= 10000)
{
threadToInterrupt.interrupt();
break;
}
}
}
}
private static class StreamSearcher
{
private String target;
private int currentLocation = 0;
private boolean found;
private StreamSearcher(String target)
{
this.target = target;
}
public void consume(int read)
{
if (!found)
{
if (read == this.target.charAt(currentLocation))
{
this.currentLocation++;
if (currentLocation == this.target.length()) found = true;
}
else this.currentLocation = 0;
}
}
public boolean isFound()
{
return found;
}
}
}
| |
package main.browser;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.List;
import javax.swing.GroupLayout;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import main.sonogram.SonogramPreference;
import main.sonogram.SonogramPreference.EnumWindowFunction;
public class SonogramPreferenceCard extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
private final Browser browser;
private final SonogramPreferenceListener sonogramPreferenceListener;
private final JButton closeButton;
private final JPanel navBar;
private final JLabel tagLabel;
private final JTextField tagTextField;
private final JLabel bufferSizeLabel;
private final JTextField bufferSizeTextField;
private final JLabel windowFunctionLabel;
private final JComboBox<EnumWindowFunction> windowFunctionComboBox;
private final JLabel loCutoffLabel;
private final JTextField loCutoffTextField;
private final JLabel hiCutoffLabel;
private final JTextField hiCutoffTextField;
private final JLabel componentsLabel;
private final JTextField componentsTextField;
private final JButton addSonogramPreferenceRecButton;
private final JButton removeSonogramPreferenceRecButton;
private final JPanel editor;
private final JPanel spacerPanel;
private final JPanel infoPanel;
private JTable table = null;
List<SonogramPreference> sonogramPreferenceList;
SonogramPreferenceCard(Browser b) {
browser = b;
setLayout(new BorderLayout());
sonogramPreferenceListener = new SonogramPreferenceListener();
tagLabel = new JLabel("Tag");
windowFunctionLabel = new JLabel("Window Function");
bufferSizeLabel = new JLabel("Buffer Size");
loCutoffLabel = new JLabel("Low Frequency Cutoff (Hz)");
hiCutoffLabel = new JLabel("Hi Frequency Cutoff (Hz)");
componentsLabel = new JLabel("Components");
tagTextField = new JTextField();
tagTextField.setActionCommand("sono tag change");
tagTextField.addActionListener(sonogramPreferenceListener);
windowFunctionComboBox = new JComboBox<EnumWindowFunction>();
EnumWindowFunction[] wf = EnumWindowFunction.values();
for (int i=0; i<wf.length; i++)
windowFunctionComboBox.addItem(wf[i]);
windowFunctionComboBox.setActionCommand("sono window function change");
windowFunctionComboBox.addActionListener(sonogramPreferenceListener);
bufferSizeTextField = new JTextField();
bufferSizeTextField.setActionCommand("sono buffer size change");
bufferSizeTextField.addActionListener(sonogramPreferenceListener);
loCutoffTextField = new JTextField();
loCutoffTextField.setActionCommand("sono lo cutoff change");
loCutoffTextField.addActionListener(sonogramPreferenceListener);
hiCutoffTextField = new JTextField();
hiCutoffTextField.setActionCommand("sono hi cutoff change");
hiCutoffTextField.addActionListener(sonogramPreferenceListener);
componentsTextField = new JTextField();
componentsTextField.setActionCommand("sono components change");
componentsTextField.addActionListener(sonogramPreferenceListener);
closeButton = new JButton("Close");
closeButton.setActionCommand("close");
closeButton.addActionListener(sonogramPreferenceListener);
addSonogramPreferenceRecButton = new JButton("Add Record");
addSonogramPreferenceRecButton.setActionCommand("add sonogram settings rec");
addSonogramPreferenceRecButton.addActionListener(sonogramPreferenceListener);
removeSonogramPreferenceRecButton = new JButton("Remove");
removeSonogramPreferenceRecButton.setActionCommand("remove");
removeSonogramPreferenceRecButton.addActionListener(sonogramPreferenceListener);
removeSonogramPreferenceRecButton.setEnabled(false);
sonogramPreferenceList = SonogramPreference.retrieve();
navBar = new SonogramPreferenceSelect();
add(navBar, BorderLayout.NORTH);
editor = new Editor();
spacerPanel = new JPanel();
spacerPanel.setPreferredSize(new Dimension(150, 20));
add(spacerPanel, BorderLayout.WEST);
add(editor, BorderLayout.CENTER);
infoPanel = new ActionBar();
add(infoPanel, BorderLayout.SOUTH);
}
private class SonogramPreferenceListener implements ActionListener, ListSelectionListener {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) return;
ListSelectionModel lsm = (ListSelectionModel)e.getSource();
if (lsm.isSelectionEmpty()) {
removeSonogramPreferenceRecButton.setEnabled(false);
} else {
int selectedRow = lsm.getMinSelectionIndex();
long spid = sonogramPreferenceList.get(selectedRow).getSpId();
browser.databaseCard.sonogramPreference = SonogramPreference.retrieve(spid);
removeSonogramPreferenceRecButton.setEnabled(true);
loadEditorValues();
editor.invalidate();
revalidate();
}
}
@Override
public void actionPerformed(ActionEvent e) {
String ac = e.getActionCommand();
if (ac == "add sonogram settings rec") {
storeSonogramPreferenceEditorValues();
browser.databaseCard.sonogramPreference.create();
table.invalidate();
revalidate();
}
else if (ac == "remove") {
browser.databaseCard.sonogramPreference.remove();
table.invalidate();
revalidate();
}
else if (ac == "close")
browser.showContent();
else if (ac == "sono tag change")
browser.databaseCard.sonogramPreference.setTag(tagTextField.getText());
else if (ac == "sono window function change")
browser.databaseCard.sonogramPreference.setEnumWindowFunction((EnumWindowFunction) windowFunctionComboBox.getSelectedItem());
else if (ac == "sono buffer size change")
browser.databaseCard.sonogramPreference.setBufferSize(Integer.parseInt(bufferSizeTextField.getText()));
else if (ac == "sono lo cutoff change")
browser.databaseCard.sonogramPreference.setLoCutoff(Integer.parseInt(loCutoffTextField.getText()));
else if (ac == "sono hi cutoff change")
browser.databaseCard.sonogramPreference.setHiCutoff(Integer.parseInt(hiCutoffTextField.getText()));
else if (ac == "sono components change")
browser.databaseCard.sonogramPreference.setComponents(Integer.parseInt(componentsTextField.getText()));
else
System.out.println("Unsupported action: "+ac);
}
}
private class SonogramPreferenceSelect extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
SonogramPreferenceSelect() {
table = new JTable(new SonoParamTableModel());
table.setPreferredScrollableViewportSize(new Dimension(300, 100));
table.setFillsViewportHeight(true);
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
ListSelectionModel lsm = table.getSelectionModel();
lsm.addListSelectionListener(sonogramPreferenceListener);
add(new JScrollPane(table), BorderLayout.CENTER);
}
}
private class Editor extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
Editor() {
GroupLayout layout = new GroupLayout(this);
setLayout(layout);
layout.setAutoCreateGaps(true);
layout.setAutoCreateContainerGaps(true);
setSonogramComponents();
layout.setHorizontalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addComponent(tagLabel)
.addComponent(tagTextField, 150, 150, 150)
.addComponent(bufferSizeLabel)
.addComponent(bufferSizeTextField, 60, 60, 60))
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addComponent(windowFunctionLabel)
.addComponent(windowFunctionComboBox, 200, 200, 200)
.addComponent(loCutoffLabel)
.addComponent(loCutoffTextField, 60, 60, 60)
.addComponent(hiCutoffLabel)
.addComponent(hiCutoffTextField, 60, 60, 60)
.addComponent(componentsLabel)
.addComponent(componentsTextField, 60, 60, 60)));
layout.setVerticalGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup()
.addComponent(tagLabel)
.addComponent(windowFunctionLabel))
.addGroup(layout.createParallelGroup()
.addComponent(tagTextField, 20, 20, 20)
.addComponent(windowFunctionComboBox, 20, 20, 20))
.addGroup(layout.createParallelGroup()
.addComponent(bufferSizeLabel)
.addComponent(loCutoffLabel))
.addGroup(layout.createParallelGroup()
.addComponent(bufferSizeTextField, 20, 20, 20)
.addComponent(loCutoffTextField, 20, 20, 20))
.addComponent(hiCutoffLabel)
.addComponent(hiCutoffTextField, 20, 20, 20)
.addComponent(componentsLabel)
.addComponent(componentsTextField, 20, 20, 20));
}
private void setSonogramComponents() {
}
}
private class ActionBar extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
ActionBar() {
add(closeButton);
add(addSonogramPreferenceRecButton);
add(removeSonogramPreferenceRecButton);
}
}
private void loadEditorValues() {
tagTextField.setText(browser.databaseCard.sonogramPreference.getTag());
windowFunctionComboBox.setSelectedItem(browser.databaseCard.sonogramPreference.getEnumWindowFunction());
bufferSizeTextField.setText(""+browser.databaseCard.sonogramPreference.getBufferSize());
loCutoffTextField.setText(""+browser.databaseCard.sonogramPreference.getLoCutoff());
hiCutoffTextField.setText(""+browser.databaseCard.sonogramPreference.getHiCutoff());
componentsTextField.setText(""+browser.databaseCard.sonogramPreference.getComponents());
}
private void storeSonogramPreferenceEditorValues() {
browser.databaseCard.sonogramPreference.setTag(tagTextField.getText());
browser.databaseCard.sonogramPreference.setEnumWindowFunction((EnumWindowFunction) windowFunctionComboBox.getSelectedItem());
try {
browser.databaseCard.sonogramPreference.setBufferSize(Integer.parseInt(bufferSizeTextField.getText()));
browser.databaseCard.sonogramPreference.setLoCutoff(Integer.parseInt(loCutoffTextField.getText()));
browser.databaseCard.sonogramPreference.setHiCutoff(Integer.parseInt(hiCutoffTextField.getText()));
browser.databaseCard.sonogramPreference.setComponents(Integer.parseInt(componentsTextField.getText()));
} catch (NumberFormatException e) { }
}
private class SonoParamTableModel extends AbstractTableModel {
/**
*
*/
private static final long serialVersionUID = 1L;
private String[] columnName = {
"ID",
"Tag"
};
@Override
public String getColumnName(int column) { return columnName[column]; }
@Override
public int getRowCount() { return sonogramPreferenceList.size(); }
@Override
public int getColumnCount() { return columnName.length; }
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) { return false; }
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
if (rowIndex >= 0 && rowIndex < sonogramPreferenceList.size()) {
SonogramPreference sp = sonogramPreferenceList.get(rowIndex);
if (columnIndex == 0)
return sp.getSpId();
else if (columnIndex == 1)
return sp.getTag();
}
return null;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.cube.parse;
import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
import java.util.Iterator;
import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.CubeMeasure;
import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.antlr.runtime.CommonToken;
import lombok.extern.slf4j.Slf4j;
/**
* <p> Replace select and having columns with default aggregate functions on them, if default aggregate is defined and
* if there isn't already an aggregate function specified on the columns. </p> <p/> <p> Expressions which already
* contain aggregate sub-expressions will not be changed. </p> <p/> <p> At this point it's assumed that aliases have
* been added to all columns. </p>
*/
@Slf4j
class AggregateResolver implements ContextRewriter {
public AggregateResolver(Configuration conf) {
}
@Override
public void rewriteContext(CubeQueryContext cubeql) throws LensException {
if (cubeql.getCube() == null) {
return;
}
boolean nonDefaultAggregates = false;
boolean aggregateResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER,
CubeQueryConfUtil.DEFAULT_DISABLE_AGGREGATE_RESOLVER);
// Check if the query contains measures
// 1. not inside default aggregate expressions
// 2. With no default aggregate defined
// 3. there are distinct selection of measures
// If yes, only the raw (non aggregated) fact can answer this query.
// In that case remove aggregate facts from the candidate fact list
if (hasMeasuresInDistinctClause(cubeql, cubeql.getSelectAST(), false)
|| hasMeasuresInDistinctClause(cubeql, cubeql.getHavingAST(), false)
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getSelectAST(), null, aggregateResolverDisabled)
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getHavingAST(), null, aggregateResolverDisabled)
|| hasMeasures(cubeql, cubeql.getWhereAST()) || hasMeasures(cubeql, cubeql.getGroupByAST())
|| hasMeasures(cubeql, cubeql.getOrderByAST())) {
Iterator<CandidateFact> factItr = cubeql.getCandidateFacts().iterator();
while (factItr.hasNext()) {
CandidateFact candidate = factItr.next();
if (candidate.fact.isAggregated()) {
cubeql.addFactPruningMsgs(candidate.fact,
CandidateTablePruneCause.missingDefaultAggregate());
factItr.remove();
}
}
nonDefaultAggregates = true;
log.info("Query has non default aggregates, no aggregate resolution will be done");
}
cubeql.pruneCandidateFactSet(CandidateTablePruneCode.MISSING_DEFAULT_AGGREGATE);
if (nonDefaultAggregates || aggregateResolverDisabled) {
return;
}
resolveClause(cubeql, cubeql.getSelectAST());
resolveClause(cubeql, cubeql.getHavingAST());
Configuration distConf = cubeql.getConf();
boolean isDimOnlyDistinctEnabled = distConf.getBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT,
CubeQueryConfUtil.DEFAULT_ATTR_FIELDS_ADD_DISTINCT);
if (isDimOnlyDistinctEnabled) {
// Check if any measure/aggregate columns and distinct clause used in
// select tree. If not, update selectAST token "SELECT" to "SELECT DISTINCT"
if (!hasMeasures(cubeql, cubeql.getSelectAST()) && !isDistinctClauseUsed(cubeql.getSelectAST())
&& !HQLParser.hasAggregate(cubeql.getSelectAST())) {
cubeql.getSelectAST().getToken().setType(HiveParser.TOK_SELECTDI);
}
}
}
// We need to traverse the clause looking for eligible measures which can be
// wrapped inside aggregates
// We have to skip any columns that are already inside an aggregate UDAF
private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws LensException {
if (clause == null) {
return null;
}
for (int i = 0; i < clause.getChildCount(); i++) {
transform(cubeql, clause, (ASTNode) clause.getChild(i), i);
}
return HQLParser.getString(clause);
}
private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws LensException {
if (node == null) {
return;
}
int nodeType = node.getToken().getType();
if (!(HQLParser.isAggregateAST(node))) {
if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
// Leaf node
ASTNode wrapped = wrapAggregate(cubeql, node);
if (wrapped != node) {
if (parent != null) {
parent.setChild(nodePos, wrapped);
// Check if this node has an alias
ASTNode sibling = HQLParser.findNodeByPath(parent, Identifier);
String expr;
if (sibling != null) {
expr = HQLParser.getString(parent);
} else {
expr = HQLParser.getString(wrapped);
}
cubeql.addAggregateExpr(expr.trim());
}
}
} else {
// Dig deeper in non-leaf nodes
for (int i = 0; i < node.getChildCount(); i++) {
transform(cubeql, node, (ASTNode) node.getChild(i), i);
}
}
}
}
// Wrap an aggregate function around the node if its a measure, leave it
// unchanged otherwise
private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws LensException {
String tabname = null;
String colname;
if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
colname = ((ASTNode) node.getChild(0)).getText();
} else {
// node in 'alias.column' format
ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);
ASTNode colIdent = (ASTNode) node.getChild(1);
colname = colIdent.getText();
tabname = tabident.getText();
}
String msrname = StringUtils.isBlank(tabname) ? colname : tabname + "." + colname;
if (cubeql.isCubeMeasure(msrname)) {
if (cubeql.getQueriedExprs().contains(colname)) {
String alias = cubeql.getAliasForTableName(cubeql.getCube().getName());
for (ASTNode exprNode : cubeql.getExprCtx().getExpressionContext(colname, alias).getAllASTNodes()) {
transform(cubeql, null, exprNode, 0);
}
return node;
} else {
CubeMeasure measure = cubeql.getCube().getMeasureByName(colname);
String aggregateFn = measure.getAggregate();
if (StringUtils.isBlank(aggregateFn)) {
throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getValue(), colname);
}
ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
fnroot.setParent(node.getParent());
ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, aggregateFn));
fnIdentNode.setParent(fnroot);
fnroot.addChild(fnIdentNode);
node.setParent(fnroot);
fnroot.addChild(node);
return fnroot;
}
} else {
return node;
}
}
private boolean hasMeasuresNotInDefaultAggregates(CubeQueryContext cubeql, ASTNode node, String function,
boolean aggregateResolverDisabled) {
if (node == null) {
return false;
}
if (HQLParser.isAggregateAST(node)) {
if (node.getChild(0).getType() == HiveParser.Identifier) {
function = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
}
} else if (cubeql.isCubeMeasure(node)) {
// Exit for the recursion
String colname;
if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
colname = ((ASTNode) node.getChild(0)).getText();
} else {
// node in 'alias.column' format
ASTNode colIdent = (ASTNode) node.getChild(1);
colname = colIdent.getText();
}
colname = colname.toLowerCase();
if (cubeql.getQueriedExprs().contains(colname)) {
String cubeAlias = cubeql.getAliasForTableName(cubeql.getCube().getName());
for (ASTNode exprNode : cubeql.getExprCtx().getExpressionContext(colname, cubeAlias).getAllASTNodes()) {
if (hasMeasuresNotInDefaultAggregates(cubeql, exprNode, function, aggregateResolverDisabled)) {
return true;
}
}
return false;
} else {
CubeMeasure measure = cubeql.getCube().getMeasureByName(colname);
if (function != null && !function.isEmpty()) {
// Get the cube measure object and check if the passed function is the
// default one set for this measure
return !function.equalsIgnoreCase(measure.getAggregate());
} else if (!aggregateResolverDisabled && measure.getAggregate() != null) {
// not inside any aggregate, but default aggregate exists
return false;
}
return true;
}
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasuresNotInDefaultAggregates(cubeql, (ASTNode) node.getChild(i), function, aggregateResolverDisabled)) {
// Return on the first measure not inside its default aggregate
return true;
}
}
return false;
}
/*
* Check if distinct keyword used in node
*/
private boolean isDistinctClauseUsed(ASTNode node) {
if (node == null) {
return false;
}
if (node.getToken() != null) {
if (node.getToken().getType() == HiveParser.TOK_FUNCTIONDI
|| node.getToken().getType() == HiveParser.TOK_SELECTDI) {
return true;
}
}
for (int i = 0; i < node.getChildCount(); i++) {
if (isDistinctClauseUsed((ASTNode) node.getChild(i))) {
return true;
}
}
return false;
}
private boolean hasMeasuresInDistinctClause(CubeQueryContext cubeql, ASTNode node, boolean hasDistinct) {
if (node == null) {
return false;
}
int exprTokenType = node.getToken().getType();
boolean isDistinct = hasDistinct;
if (exprTokenType == HiveParser.TOK_FUNCTIONDI || exprTokenType == HiveParser.TOK_SELECTDI) {
isDistinct = true;
} else if (cubeql.isCubeMeasure(node) && isDistinct) {
// Exit for the recursion
return true;
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasuresInDistinctClause(cubeql, (ASTNode) node.getChild(i), isDistinct)) {
// Return on the first measure in distinct clause
return true;
}
}
return false;
}
private boolean hasMeasures(CubeQueryContext cubeql, ASTNode node) {
if (node == null) {
return false;
}
if (cubeql.isCubeMeasure(node)) {
return true;
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasures(cubeql, (ASTNode) node.getChild(i))) {
return true;
}
}
return false;
}
static void updateAggregates(ASTNode root, CubeQueryContext cubeql) {
if (root == null) {
return;
}
if (HQLParser.isAggregateAST(root)) {
cubeql.addAggregateExpr(HQLParser.getString(root).trim());
} else {
for (int i = 0; i < root.getChildCount(); i++) {
ASTNode child = (ASTNode) root.getChild(i);
updateAggregates(child, cubeql);
}
}
}
}
| |
package at.int32.sweaty.ui.utils;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.event.InputEvent;
import java.awt.event.MouseEvent;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.DirectColorModel;
import java.awt.image.IndexColorModel;
import java.awt.image.WritableRaster;
import javax.swing.JPanel;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Device;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.PaletteData;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.graphics.Rectangle;
public class SWTUtils {
private final static String Az = "ABCpqr";
protected static final JPanel DUMMY_PANEL = new JPanel();
public static BufferedImage convertToAWT(ImageData data) {
ColorModel colorModel = null;
PaletteData palette = data.palette;
if (palette.isDirect) {
colorModel = new DirectColorModel(data.depth, palette.redMask, palette.greenMask, palette.blueMask);
BufferedImage bufferedImage = new BufferedImage(colorModel, colorModel.createCompatibleWritableRaster(data.width, data.height), false, null);
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
int pixel = data.getPixel(x, y);
RGB rgb = palette.getRGB(pixel);
bufferedImage.setRGB(x, y, rgb.red << 16 | rgb.green << 8 | rgb.blue);
}
}
return bufferedImage;
} else {
RGB[] rgbs = palette.getRGBs();
byte[] red = new byte[rgbs.length];
byte[] green = new byte[rgbs.length];
byte[] blue = new byte[rgbs.length];
for (int i = 0; i < rgbs.length; i++) {
RGB rgb = rgbs[i];
red[i] = (byte)rgb.red;
green[i] = (byte)rgb.green;
blue[i] = (byte)rgb.blue;
}
if (data.transparentPixel != -1) {
colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue, data.transparentPixel);
} else {
colorModel = new IndexColorModel(data.depth, rgbs.length, red, green, blue);
}
BufferedImage bufferedImage = new BufferedImage(colorModel, colorModel.createCompatibleWritableRaster(data.width, data.height), false, null);
WritableRaster raster = bufferedImage.getRaster();
int[] pixelArray = new int[1];
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
int pixel = data.getPixel(x, y);
pixelArray[0] = pixel;
raster.setPixel(x, y, pixelArray);
}
}
return bufferedImage;
}
}
public static FontData toSwtFontData(Device device, java.awt.Font font, boolean ensureSameSize) {
FontData fontData = new FontData();
fontData.setName(font.getFamily());
int style = SWT.NORMAL;
switch (font.getStyle()) {
case java.awt.Font.PLAIN:
style |= SWT.NORMAL;
break;
case java.awt.Font.BOLD:
style |= SWT.BOLD;
break;
case java.awt.Font.ITALIC:
style |= SWT.ITALIC;
break;
case (java.awt.Font.ITALIC + java.awt.Font.BOLD):
style |= SWT.ITALIC | SWT.BOLD;
break;
}
fontData.setStyle(style);
// convert the font size (in pt for awt) to height in pixels for swt
int height = (int) Math.round(font.getSize() * 72.0 / device.getDPI().y);
fontData.setHeight(height);
// hack to ensure the newly created swt fonts will be rendered with the
// same height as the awt one
if (ensureSameSize) {
GC tmpGC = new GC(device);
Font tmpFont = new Font(device, fontData);
tmpGC.setFont(tmpFont);
if (tmpGC.textExtent(Az).x > DUMMY_PANEL.getFontMetrics(font).stringWidth(Az)) {
while (tmpGC.textExtent(Az).x > DUMMY_PANEL.getFontMetrics(font).stringWidth(Az)) {
tmpFont.dispose();
height--;
fontData.setHeight(height);
tmpFont = new Font(device, fontData);
tmpGC.setFont(tmpFont);
}
} else if (tmpGC.textExtent(Az).x < DUMMY_PANEL.getFontMetrics(font).stringWidth(Az)) {
while (tmpGC.textExtent(Az).x < DUMMY_PANEL.getFontMetrics(font).stringWidth(Az)) {
tmpFont.dispose();
height++;
fontData.setHeight(height);
tmpFont = new Font(device, fontData);
tmpGC.setFont(tmpFont);
}
}
tmpFont.dispose();
tmpGC.dispose();
}
return fontData;
}
public static java.awt.Font toAwtFont(Device device, FontData fontData, boolean ensureSameSize) {
int style;
switch (fontData.getStyle()) {
case SWT.NORMAL:
style = java.awt.Font.PLAIN;
break;
case SWT.ITALIC:
style = java.awt.Font.ITALIC;
break;
case SWT.BOLD:
style = java.awt.Font.BOLD;
break;
default:
style = java.awt.Font.PLAIN;
break;
}
int height = (int) Math.round(fontData.getHeight() * device.getDPI().y / 72.0);
// hack to ensure the newly created awt fonts will be rendered with the
// same height as the swt one
if (ensureSameSize) {
GC tmpGC = new GC(device);
Font tmpFont = new Font(device, fontData);
tmpGC.setFont(tmpFont);
JPanel DUMMY_PANEL = new JPanel();
java.awt.Font tmpAwtFont = new java.awt.Font(fontData.getName(), style, height);
if (DUMMY_PANEL.getFontMetrics(tmpAwtFont).stringWidth(Az) > tmpGC.textExtent(Az).x) {
while (DUMMY_PANEL.getFontMetrics(tmpAwtFont).stringWidth(Az) > tmpGC
.textExtent(Az).x) {
height--;
tmpAwtFont = new java.awt.Font(fontData.getName(), style, height);
}
} else if (DUMMY_PANEL.getFontMetrics(tmpAwtFont).stringWidth(Az) < tmpGC
.textExtent(Az).x) {
while (DUMMY_PANEL.getFontMetrics(tmpAwtFont).stringWidth(Az) < tmpGC
.textExtent(Az).x) {
height++;
tmpAwtFont = new java.awt.Font(fontData.getName(), style, height);
}
}
tmpFont.dispose();
tmpGC.dispose();
}
return new java.awt.Font(fontData.getName(), style, height);
}
public static java.awt.Font toAwtFont(Device device, Font font) {
FontData fontData = font.getFontData()[0];
return toAwtFont(device, fontData, true);
}
public static java.awt.Color toAwtColor(Color color) {
return new java.awt.Color(color.getRed(), color.getGreen(), color.getBlue());
}
public static Color toSwtColor(Device device, java.awt.Paint paint) {
java.awt.Color color;
if (paint instanceof java.awt.Color) {
color = (java.awt.Color) paint;
} else {
try {
throw new Exception("only color is supported at present... "
+ "setting paint to uniform black color");
} catch (Exception e) {
e.printStackTrace();
color = new java.awt.Color(0, 0, 0);
}
}
return new org.eclipse.swt.graphics.Color(device, color.getRed(), color.getGreen(),
color.getBlue());
}
public static Color toSwtColor(Device device, java.awt.Color color) {
return new org.eclipse.swt.graphics.Color(device, color.getRed(), color.getGreen(),
color.getBlue());
}
public static Rectangle toSwtRectangle(Rectangle2D rect2d) {
return new Rectangle((int) Math.round(rect2d.getMinX()),
(int) Math.round(rect2d.getMinY()), (int) Math.round(rect2d.getWidth()),
(int) Math.round(rect2d.getHeight()));
}
public static Rectangle2D toAwtRectangle(Rectangle rect) {
Rectangle2D rect2d = new Rectangle2D.Double();
rect2d.setRect(rect.x, rect.y, rect.width, rect.height);
return rect2d;
}
public static Point2D toAwtPoint(Point p) {
return new java.awt.Point(p.x, p.y);
}
public static Point toSwtPoint(java.awt.Point p) {
return new Point(p.x, p.y);
}
public static Point toSwtPoint(java.awt.geom.Point2D p) {
return new Point((int) Math.round(p.getX()), (int) Math.round(p.getY()));
}
public static MouseEvent toAwtMouseEvent(org.eclipse.swt.events.MouseEvent event) {
int button = MouseEvent.NOBUTTON;
switch (event.button) {
case 1:
button = MouseEvent.BUTTON1;
break;
case 2:
button = MouseEvent.BUTTON2;
break;
case 3:
button = MouseEvent.BUTTON3;
break;
}
int modifiers = 0;
if ((event.stateMask & SWT.CTRL) != 0) {
modifiers |= InputEvent.CTRL_DOWN_MASK;
}
if ((event.stateMask & SWT.SHIFT) != 0) {
modifiers |= InputEvent.SHIFT_DOWN_MASK;
}
if ((event.stateMask & SWT.ALT) != 0) {
modifiers |= InputEvent.ALT_DOWN_MASK;
}
MouseEvent awtMouseEvent = new MouseEvent(DUMMY_PANEL, event.hashCode(), event.time,
modifiers, event.x, event.y, 1, false, button);
return awtMouseEvent;
}
public static ImageData convertAWTImageToSWT(Image image) {
if (image == null) {
throw new IllegalArgumentException("Null 'image' argument.");
}
int w = image.getWidth(null);
int h = image.getHeight(null);
if (w == -1 || h == -1) {
return null;
}
BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
Graphics g = bi.getGraphics();
g.drawImage(image, 0, 0, null);
g.dispose();
return convertToSWT(bi);
}
public static ImageData convertToSWT(BufferedImage bufferedImage) {
if (bufferedImage.getColorModel() instanceof DirectColorModel) {
DirectColorModel colorModel = (DirectColorModel) bufferedImage.getColorModel();
PaletteData palette = new PaletteData(colorModel.getRedMask(),
colorModel.getGreenMask(), colorModel.getBlueMask());
ImageData data = new ImageData(bufferedImage.getWidth(), bufferedImage.getHeight(),
colorModel.getPixelSize(), palette);
WritableRaster raster = bufferedImage.getRaster();
int[] pixelArray = new int[3];
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
raster.getPixel(x, y, pixelArray);
int pixel = palette.getPixel(new RGB(pixelArray[0], pixelArray[1],
pixelArray[2]));
data.setPixel(x, y, pixel);
}
}
return data;
} else if (bufferedImage.getColorModel() instanceof IndexColorModel) {
IndexColorModel colorModel = (IndexColorModel) bufferedImage.getColorModel();
int size = colorModel.getMapSize();
byte[] reds = new byte[size];
byte[] greens = new byte[size];
byte[] blues = new byte[size];
colorModel.getReds(reds);
colorModel.getGreens(greens);
colorModel.getBlues(blues);
RGB[] rgbs = new RGB[size];
for (int i = 0; i < rgbs.length; i++) {
rgbs[i] = new RGB(reds[i] & 0xFF, greens[i] & 0xFF, blues[i] & 0xFF);
}
PaletteData palette = new PaletteData(rgbs);
ImageData data = new ImageData(bufferedImage.getWidth(), bufferedImage.getHeight(),
colorModel.getPixelSize(), palette);
data.transparentPixel = colorModel.getTransparentPixel();
WritableRaster raster = bufferedImage.getRaster();
int[] pixelArray = new int[1];
for (int y = 0; y < data.height; y++) {
for (int x = 0; x < data.width; x++) {
raster.getPixel(x, y, pixelArray);
data.setPixel(x, y, pixelArray[0]);
}
}
return data;
}
return null;
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Jul 13, 2004
*/
package org.jkiss.dbeaver.erd.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.DBSEntity;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.dbeaver.model.struct.DBSObjectContainer;
import org.jkiss.dbeaver.model.virtual.DBVUtils;
import java.util.*;
/**
* Represents a Schema in the model. Note that this class also includes
* diagram specific information (layoutManualDesired and layoutManualAllowed fields)
* although ideally these should be in a separate model hierarchy
*
* @author Serge Rider
*/
public class ERDDiagram extends ERDObject<DBSObject> implements ERDContainer {
private static final Log log = Log.getLog(ERDDiagram.class);
private static class DataSourceInfo {
int index;
List<ERDEntity> entities = new ArrayList<>();
public DataSourceInfo(int index) {
this.index = index;
}
}
private final ERDContentProvider contentProvider;
private String name;
private final List<ERDEntity> entities = new ArrayList<>();
private final Map<DBPDataSourceContainer, DataSourceInfo> dataSourceMap = new LinkedHashMap<>();
private final Map<DBPDataSourceContainer, Map<DBSObjectContainer, Integer>> dataSourceContainerMap = new LinkedHashMap<>();
private DBSObjectContainer rootObjectContainer;
private boolean layoutManualDesired = true;
private boolean layoutManualAllowed = false;
private boolean needsAutoLayout;
private final Map<DBSEntity, ERDEntity> entityMap = new IdentityHashMap<>();
private final List<ERDNote> notes = new ArrayList<>();
private final List<String> errorMessages = new ArrayList<>();
public ERDDiagram(DBSObject container, String name, ERDContentProvider contentProvider) {
super(container);
this.name = name;
this.contentProvider = contentProvider;
if (name == null) {
throw new IllegalArgumentException("Name cannot be null");
}
}
@Override
public ERDContentProvider getContentProvider() {
return contentProvider;
}
public int getEntityOrder(ERDEntity entity) {
synchronized (entities) {
return entities.indexOf(entity);
}
}
public void addEntity(ERDEntity entity, boolean reflect) {
addEntity(entity, -1, reflect);
}
public void addEntity(ERDEntity entity, int i, boolean reflect) {
DBSEntity object = entity.getObject();
if (object == null) {
log.debug("Null object passed");
return;
} else if (object.getDataSource() == null) {
log.debug("Object " + object.getName() + " is not connected with datasource");
return;
}
synchronized (entities) {
if (i < 0) {
entities.add(entity);
} else {
entities.add(i, entity);
}
entityMap.put(object, entity);
DBPDataSourceContainer dataSource = object.getDataSource().getContainer();
DataSourceInfo dsInfo = dataSourceMap.computeIfAbsent(dataSource, dsc -> new DataSourceInfo(dataSourceMap.size()));
dsInfo.entities.add(entity);
DBSObjectContainer container = DBUtils.getParentOfType(DBSObjectContainer.class, entity.getObject());
if (container != null) {
dataSourceContainerMap.putIfAbsent(dataSource, new LinkedHashMap<>());
Map<DBSObjectContainer, Integer> containerMap = dataSourceContainerMap.get(dataSource);
containerMap.putIfAbsent(container, containerMap.size());
}
}
if (reflect) {
firePropertyChange(PROP_CHILD, null, entity);
/*
for (ERDAssociation rel : entity.getReferences()) {
entity.firePropertyChange(PROP_INPUT, null, rel);
}
for (ERDAssociation rel : entity.getAssociations()) {
entity.firePropertyChange(PROP_OUTPUT, null, rel);
}
*/
}
resolveRelations(reflect);
if (reflect) {
for (ERDAssociation rel : entity.getReferences()) {
rel.getSourceEntity().firePropertyChange(PROP_OUTPUT, null, rel);
}
for (ERDAssociation rel : entity.getAssociations()) {
rel.getTargetEntity().firePropertyChange(PROP_INPUT, null, rel);
}
}
}
private void resolveRelations(boolean reflect) {
// Resolve incomplete relations
for (ERDEntity erdEntity : getEntities()) {
erdEntity.resolveRelations(this, reflect);
}
}
public synchronized void removeEntity(ERDEntity entity, boolean reflect) {
synchronized (entities) {
entityMap.remove(entity.getObject());
entities.remove(entity);
DBPDataSourceContainer dataSource = entity.getObject().getDataSource().getContainer();
DataSourceInfo dsInfo = dataSourceMap.get(dataSource);
dsInfo.entities.remove(entity);
if (dsInfo.entities.isEmpty()) {
dataSourceMap.remove(dataSource);
}
}
if (reflect) {
firePropertyChange(PROP_CHILD, entity, null);
}
}
/**
* @return the Tables for the current schema
*/
@Override
public List<ERDEntity> getEntities() {
return entities;
}
public List<ERDNote> getNotes() {
return notes;
}
public void addNote(ERDNote note, boolean reflect) {
synchronized (notes) {
notes.add(note);
}
if (reflect) {
firePropertyChange(PROP_CHILD, null, note);
}
}
public void removeNote(ERDNote note, boolean reflect) {
synchronized (notes) {
notes.remove(note);
}
if (reflect) {
firePropertyChange(PROP_CHILD, note, null);
}
}
/**
* @return the name of the schema
*/
@NotNull
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
* @param layoutManualAllowed The layoutManualAllowed to set.
*/
public void setLayoutManualAllowed(boolean layoutManualAllowed) {
this.layoutManualAllowed = layoutManualAllowed;
}
/**
* @return Returns the layoutManualDesired.
*/
public boolean isLayoutManualDesired() {
return layoutManualDesired;
}
/**
* @param layoutManualDesired The layoutManualDesired to set.
*/
public void setLayoutManualDesired(boolean layoutManualDesired) {
this.layoutManualDesired = layoutManualDesired;
}
public boolean isEditEnabled() {
return false;
}
/**
* @return Returns whether we can lay out individual entities manually using the XYLayout
*/
public boolean isLayoutManualAllowed() {
return layoutManualAllowed;
}
public int getEntityCount() {
return entities.size();
}
public ERDDiagram copy() {
ERDDiagram copy = new ERDDiagram(object, name, contentProvider);
copy.entities.addAll(this.entities);
copy.entityMap.putAll(this.entityMap);
copy.layoutManualDesired = this.layoutManualDesired;
copy.layoutManualAllowed = this.layoutManualAllowed;
return copy;
}
public void fillEntities(DBRProgressMonitor monitor, Collection<DBSEntity> entities, DBSObject dbObject) throws DBException {
// Load entities
monitor.beginTask("Load entities metadata", entities.size());
List<ERDEntity> entityCache = new ArrayList<>();
for (DBSEntity table : entities) {
if (monitor.isCanceled()) {
break;
}
try {
table = DBVUtils.getRealEntity(monitor, table);
} catch (DBException e) {
log.error("Error resolving real entity for " + table.getName());
}
if (entityMap.containsKey(table)) {
continue;
}
monitor.subTask("Load " + table.getName());
ERDEntity erdEntity = ERDUtils.makeEntityFromObject(monitor, this, entityCache, table, null);
erdEntity.setPrimary(table == dbObject);
addEntity(erdEntity, false);
entityMap.put(table, erdEntity);
entityCache.add(erdEntity);
monitor.worked(1);
}
monitor.done();
// Load relations
monitor.beginTask("Load entities' relations", entities.size());
for (ERDEntity erdEntity : entityCache) {
if (monitor.isCanceled()) {
break;
}
monitor.subTask("Load " + erdEntity.getName());
erdEntity.addModelRelations(monitor, this, true, false);
monitor.worked(1);
}
monitor.done();
}
public boolean containsTable(DBSEntity table) {
for (ERDEntity erdEntity : entities) {
if (erdEntity.getObject() == table) {
return true;
}
}
return false;
}
public Map<DBSEntity, ERDEntity> getEntityMap() {
return entityMap;
}
public ERDEntity getEntity(DBSEntity table) {
return entityMap.get(table);
}
public List<ERDEntity> getEntities(DBSEntity table) {
List<ERDEntity> result = new ArrayList<>();
for (ERDEntity entity : entities) {
if (entity.getObject() == table) {
result.add(entity);
}
}
return result;
}
@NotNull
public Collection<DBPDataSourceContainer> getDataSources() {
return dataSourceMap.keySet();
}
@Nullable
public Collection<DBSObjectContainer> getObjectContainers(@NotNull DBPDataSourceContainer dataSourceContainer) {
final Map<DBSObjectContainer, Integer> containers = dataSourceContainerMap.get(dataSourceContainer);
if (containers != null) {
return containers.keySet();
}
return null;
}
@Nullable
public DBSObjectContainer getRootObjectContainer() {
return rootObjectContainer;
}
public void setRootObjectContainer(@NotNull DBSObjectContainer rootObjectContainer) {
this.rootObjectContainer = rootObjectContainer;
}
public List<ERDEntity> getEntities(DBPDataSourceContainer dataSourceContainer) {
DataSourceInfo dsInfo = dataSourceMap.get(dataSourceContainer);
return dsInfo == null ? Collections.emptyList() : dsInfo.entities;
}
public int getDataSourceIndex(DBPDataSourceContainer dataSource) {
DataSourceInfo dsInfo = dataSourceMap.get(dataSource);
return dsInfo == null ? 0 : dsInfo.index;
}
public int getContainerIndex(DBPDataSourceContainer dataSource, DBSObjectContainer container) {
Map<DBSObjectContainer, Integer> containerMap = dataSourceContainerMap.get(dataSource);
Integer index;
if (containerMap != null) {
index = containerMap.get(container);
return index == null ? 0 : index;
}
return 0;
}
public void clear() {
this.entities.clear();
this.entityMap.clear();
}
public boolean isNeedsAutoLayout() {
return needsAutoLayout;
}
public void setNeedsAutoLayout(boolean needsAutoLayout) {
this.needsAutoLayout = needsAutoLayout;
}
public void addInitRelationBends(ERDElement<?> sourceEntity, ERDElement<?> targetEntity, String relName, List<int[]> bends) {
for (ERDAssociation rel : sourceEntity.getReferences()) {
if (rel.getSourceEntity() == targetEntity && relName.equals(rel.getObject().getName())) {
rel.setInitBends(bends);
}
}
}
public List<ERDObject> getContents() {
List<ERDObject> children = new ArrayList<>(entities.size() + notes.size());
children.addAll(entities);
children.addAll(notes);
return children;
}
public List<String> getErrorMessages() {
return errorMessages;
}
public void addErrorMessage(String message) {
errorMessages.add(message);
}
public void clearErrorMessages() {
errorMessages.clear();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.sink.timeline;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorHAHelper;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorUnavailableException;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardHostnameHashingStrategy;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy;
import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.AnnotationIntrospector;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManagerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.KeyStore;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public abstract class AbstractTimelineMetricsSink {
public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
public static final String MAX_METRIC_ROW_CACHE_SIZE = "maxRowCacheSize";
public static final String METRICS_SEND_INTERVAL = "sendInterval";
public static final String METRICS_POST_TIMEOUT_SECONDS = "timeout";
public static final String COLLECTOR_HOSTS_PROPERTY = "collector.hosts";
public static final String COLLECTOR_PROTOCOL = "protocol";
public static final String COLLECTOR_PORT = "port";
public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum";
public static final String COLLECTOR_ZOOKEEPER_QUORUM = "metrics.zookeeper.quorum";
public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
public static final String SKIP_COUNTER_TRANSFROMATION = "skipCounterDerivative";
public static final String RPC_METRIC_PREFIX = "metric.rpc";
public static final String WS_V1_TIMELINE_METRICS = "/ws/v1/timeline/metrics";
public static final String SSL_KEYSTORE_PATH_PROPERTY = "truststore.path";
public static final String SSL_KEYSTORE_TYPE_PROPERTY = "truststore.type";
public static final String SSL_KEYSTORE_PASSWORD_PROPERTY = "truststore.password";
public static final String HOST_IN_MEMORY_AGGREGATION_ENABLED_PROPERTY = "host_in_memory_aggregation";
public static final String HOST_IN_MEMORY_AGGREGATION_PORT_PROPERTY = "host_in_memory_aggregation_port";
public static final String COLLECTOR_LIVE_NODES_PATH = "/ws/v1/timeline/metrics/livenodes";
public static final String INSTANCE_ID_PROPERTY = "instanceId";
public static final String SET_INSTANCE_ID_PROPERTY = "set.instanceId";
public static final String COOKIE = "Cookie";
private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
private static final String NEGOTIATE = "Negotiate";
protected final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
protected static final AtomicInteger nullCollectorCounter = new AtomicInteger(0);
public static int NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS = 20;
public int ZK_CONNECT_TRY_COUNT = 10;
public int ZK_SLEEP_BETWEEN_RETRY_TIME = 2000;
public boolean shardExpired = true;
private int zookeeperMinBackoffTimeMins = 2;
private int zookeeperMaxBackoffTimeMins = 5;
private long zookeeperBackoffTimeMillis;
private long lastFailedZkRequestTime = 0l;
private SSLSocketFactory sslSocketFactory;
private AppCookieManager appCookieManager = null;
protected final Log LOG;
protected static ObjectMapper mapper;
protected MetricCollectorHAHelper collectorHAHelper;
protected MetricSinkWriteShardStrategy metricSinkWriteShardStrategy;
// Single element cache with fixed expiration - Helps adjacent Sinks as
// well as timed refresh
protected Supplier<String> targetCollectorHostSupplier;
protected final SortedSet<String> allKnownLiveCollectors = new TreeSet<>();
private volatile boolean isInitializedForHA = false;
@SuppressWarnings("all")
private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 3;
private final Gson gson = new Gson();
private final Random rand = new Random();
private static final int COLLECTOR_HOST_CACHE_MAX_EXPIRATION_MINUTES = 75;
private static final int COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES = 60;
static {
mapper = new ObjectMapper();
AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
mapper.setAnnotationIntrospector(introspector);
mapper.getSerializationConfig()
.withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
}
public AbstractTimelineMetricsSink() {
LOG = LogFactory.getLog(this.getClass());
}
/**
* Initialize Sink write strategy with respect to HA Collector
*/
protected void init() {
metricSinkWriteShardStrategy = new MetricSinkWriteShardHostnameHashingStrategy(getHostname());
collectorHAHelper = new MetricCollectorHAHelper(getZookeeperQuorum(),
ZK_CONNECT_TRY_COUNT, ZK_SLEEP_BETWEEN_RETRY_TIME);
zookeeperBackoffTimeMillis = getZookeeperBackoffTimeMillis();
isInitializedForHA = true;
}
protected boolean emitMetricsJson(String connectUrl, String jsonData) {
int timeout = getTimeoutSeconds() * 1000;
HttpURLConnection connection = null;
try {
if (connectUrl == null) {
throw new IOException("Unknown URL. Unable to connect to metrics collector.");
}
connection = connectUrl.startsWith("https") ?
getSSLConnection(connectUrl) : getConnection(connectUrl);
if (LOG.isDebugEnabled()) {
LOG.debug("emitMetricsJson to " + connectUrl + ", " + jsonData);
}
AppCookieManager appCookieManager = getAppCookieManager();
String appCookie = appCookieManager.getCachedAppCookie(connectUrl);
if (appCookie != null) {
if (LOG.isInfoEnabled()) {
LOG.info("Using cached app cookie for URL:" + connectUrl);
}
connection.setRequestProperty(COOKIE, appCookie);
}
int statusCode = emitMetricsJson(connection, timeout, jsonData);
if (statusCode == HttpStatus.SC_UNAUTHORIZED ) {
String wwwAuthHeader = connection.getHeaderField(WWW_AUTHENTICATE);
if (LOG.isInfoEnabled()) {
LOG.info("Received WWW-Authentication header:" + wwwAuthHeader + ", for URL:" + connectUrl);
}
if (wwwAuthHeader != null && wwwAuthHeader.trim().startsWith(NEGOTIATE)) {
appCookie = appCookieManager.getAppCookie(connectUrl, true);
if (appCookie != null) {
cleanupInputStream(connection.getInputStream());
connection = connectUrl.startsWith("https") ?
getSSLConnection(connectUrl) : getConnection(connectUrl);
connection.setRequestProperty(COOKIE, appCookie);
statusCode = emitMetricsJson(connection, timeout, jsonData);
}
} else {
// no supported authentication type found
// we would let the original response propagate
LOG.error("Unsupported WWW-Authentication header:" + wwwAuthHeader+ ", for URL:" + connectUrl);
}
}
if (statusCode != 200) {
LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
"statusCode = " + statusCode);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Metrics posted to Collector " + connectUrl);
}
}
cleanupInputStream(connection.getInputStream());
// reset failedCollectorConnectionsCounter to "0"
failedCollectorConnectionsCounter.set(0);
return true;
} catch (IOException ioe) {
StringBuilder errorMessage =
new StringBuilder("Unable to connect to collector, " + connectUrl + "\n"
+ "This exceptions will be ignored for next " + NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS + " times\n");
try {
if ((connection != null)) {
errorMessage.append(cleanupInputStream(connection.getErrorStream()));
}
} catch (IOException e) {
//NOP
}
if (failedCollectorConnectionsCounter.getAndIncrement() == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug(errorMessage, ioe);
} else {
LOG.info(errorMessage);
}
throw new UnableToConnectException(ioe).setConnectUrl(connectUrl);
} else {
failedCollectorConnectionsCounter.compareAndSet(NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS, 0);
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Ignoring %s AMS connection exceptions", NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS));
}
return false;
}
}
}
private int emitMetricsJson(HttpURLConnection connection, int timeout, String jsonData) throws IOException {
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Connection", "Keep-Alive");
connection.setConnectTimeout(timeout);
connection.setReadTimeout(timeout);
connection.setDoOutput(true);
if (jsonData != null) {
try (OutputStream os = connection.getOutputStream()) {
os.write(jsonData.getBytes("UTF-8"));
}
}
int statusCode = connection.getResponseCode();
if (LOG.isDebugEnabled()) {
LOG.debug("emitMetricsJson: statusCode = " + statusCode);
}
return statusCode;
}
protected String getCurrentCollectorHost() {
String collectorHost;
// Get cached target
if (targetCollectorHostSupplier != null) {
collectorHost = targetCollectorHostSupplier.get();
// Last X attempts have failed - force refresh
if (failedCollectorConnectionsCounter.get() > RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER) {
LOG.debug("Removing collector " + collectorHost + " from allKnownLiveCollectors.");
allKnownLiveCollectors.remove(collectorHost);
targetCollectorHostSupplier = null;
collectorHost = findPreferredCollectHost();
}
} else {
collectorHost = findPreferredCollectHost();
}
if (collectorHost == null) {
if (nullCollectorCounter.getAndIncrement() == 0) {
LOG.info("No live collector to send metrics to. Metrics to be sent will be discarded. " +
"This message will be skipped for the next " + NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS + " times.");
} else {
nullCollectorCounter.compareAndSet(NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS, 0);
}
} else {
nullCollectorCounter.set(0);
}
return collectorHost;
}
protected boolean emitMetrics(TimelineMetrics metrics) {
String connectUrl;
boolean validCollectorHost = true;
if (isHostInMemoryAggregationEnabled()) {
connectUrl = constructTimelineMetricUri("http", "localhost", String.valueOf(getHostInMemoryAggregationPort()));
} else {
String collectorHost = getCurrentCollectorHost();
if (collectorHost == null) {
validCollectorHost = false;
}
connectUrl = getCollectorUri(collectorHost);
}
if (validCollectorHost) {
String jsonData = null;
LOG.debug("EmitMetrics connectUrl = " + connectUrl);
try {
jsonData = mapper.writeValueAsString(metrics);
} catch (IOException e) {
LOG.error("Unable to parse metrics", e);
}
if (jsonData != null) {
return emitMetricsJson(connectUrl, jsonData);
}
}
return false;
}
/**
* Get the associated app cookie manager.
*
* @return the app cookie manager
*/
public synchronized AppCookieManager getAppCookieManager() {
if (appCookieManager == null) {
appCookieManager = new AppCookieManager();
}
return appCookieManager;
}
/**
* Cleans up and closes an input stream
* see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
* @param is the InputStream to clean up
* @return string read from the InputStream
* @throws IOException
*/
protected String cleanupInputStream(InputStream is) throws IOException {
StringBuilder sb = new StringBuilder();
if (is != null) {
try (
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr)
) {
// read the response body
String line;
while ((line = br.readLine()) != null) {
if (LOG.isDebugEnabled()) {
sb.append(line);
}
}
} finally {
is.close();
}
}
return sb.toString();
}
// Get a connection
protected HttpURLConnection getConnection(String spec) throws IOException {
return (HttpURLConnection) new URL(spec).openConnection();
}
// Get an ssl connection
protected HttpsURLConnection getSSLConnection(String spec)
throws IOException, IllegalStateException {
HttpsURLConnection connection = (HttpsURLConnection) (new URL(spec).openConnection());
connection.setSSLSocketFactory(sslSocketFactory);
return connection;
}
protected void loadTruststore(String trustStorePath, String trustStoreType,
String trustStorePassword) {
if (sslSocketFactory == null) {
if (trustStorePath == null || trustStorePassword == null) {
String msg = "Can't load TrustStore. Truststore path or password is not set.";
LOG.error(msg);
throw new IllegalStateException(msg);
}
FileInputStream in = null;
try {
in = new FileInputStream(new File(trustStorePath));
KeyStore store = KeyStore.getInstance(trustStoreType == null ?
KeyStore.getDefaultType() : trustStoreType);
store.load(in, trustStorePassword.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory
.getInstance(TrustManagerFactory.getDefaultAlgorithm());
tmf.init(store);
SSLContext context = SSLContext.getInstance("TLS");
context.init(null, tmf.getTrustManagers(), null);
sslSocketFactory = context.getSocketFactory();
} catch (Exception e) {
LOG.error("Unable to load TrustStore", e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
LOG.error("Unable to load TrustStore", e);
}
}
}
}
}
/**
* Find appropriate write shard for this sink using the {@link org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy}
*
* 1. Use configured collector(s) to discover available collectors
* 2. If configured collector(s) are unresponsive check Zookeeper to find live hosts
* 3. Refresh known collector list using ZK
* 4. Default: Return configured collector with no side effect due to discovery.
*
* throws {#link MetricsSinkInitializationException} if called before
* initialization, not other side effect
*
* @return String Collector hostname
*/
protected synchronized String findPreferredCollectHost() {
if (!isInitializedForHA) {
init();
}
shardExpired = false;
// Auto expire and re-calculate after 1 hour
if (targetCollectorHostSupplier != null) {
String targetCollector = targetCollectorHostSupplier.get();
if (targetCollector != null) {
return targetCollector;
}
}
// Reach out to all configured collectors before Zookeeper
Collection<String> collectorHosts = getConfiguredCollectorHosts();
refreshCollectorsFromConfigured(collectorHosts);
// Lookup Zookeeper for live hosts - max 10 seconds wait time
long currentTime = System.currentTimeMillis();
if (allKnownLiveCollectors.size() == 0 && getZookeeperQuorum() != null
&& (currentTime - lastFailedZkRequestTime) > zookeeperBackoffTimeMillis) {
LOG.debug("No live collectors from configuration. Requesting zookeeper...");
allKnownLiveCollectors.addAll(collectorHAHelper.findLiveCollectorHostsFromZNode());
boolean noNewCollectorFromZk = true;
for (String collectorHostFromZk : allKnownLiveCollectors) {
if (!collectorHosts.contains(collectorHostFromZk)) {
noNewCollectorFromZk = false;
break;
}
}
if (noNewCollectorFromZk) {
LOG.debug("No new collector was found from Zookeeper. Will not request zookeeper for " + zookeeperBackoffTimeMillis + " millis");
lastFailedZkRequestTime = System.currentTimeMillis();
}
}
if (allKnownLiveCollectors.size() != 0) {
targetCollectorHostSupplier = Suppliers.memoizeWithExpiration(
new Supplier<String>() {
@Override
public String get() {
//shardExpired flag is used to determine if the Supplier.get() is invoked through the
// findPreferredCollectHost method (No need to refresh collector hosts
// OR
// through Expiry (Refresh needed to pick up dead collectors that might have not become alive).
if (shardExpired) {
refreshCollectorsFromConfigured(getConfiguredCollectorHosts());
}
return metricSinkWriteShardStrategy.findCollectorShard(new ArrayList<>(allKnownLiveCollectors));
}
}, // random.nextInt(max - min + 1) + min # (60 to 75 minutes)
rand.nextInt(COLLECTOR_HOST_CACHE_MAX_EXPIRATION_MINUTES
- COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES + 1)
+ COLLECTOR_HOST_CACHE_MIN_EXPIRATION_MINUTES,
TimeUnit.MINUTES
);
String collectorHost = targetCollectorHostSupplier.get();
shardExpired = true;
return collectorHost;
}
LOG.debug("Couldn't find any live collectors. Returning null");
shardExpired = true;
return null;
}
private void refreshCollectorsFromConfigured(Collection<String> collectorHosts) {
LOG.debug("Trying to find live collector host from : " + collectorHosts);
if (collectorHosts != null && !collectorHosts.isEmpty()) {
for (String hostStr : collectorHosts) {
hostStr = hostStr.trim();
if (!hostStr.isEmpty()) {
try {
Collection<String> liveHosts = findLiveCollectorHostsFromKnownCollector(hostStr, getCollectorPort());
// Update live Hosts - current host will already be a part of this
for (String host : liveHosts) {
allKnownLiveCollectors.add(host);
}
break; // Found at least 1 live collector
} catch (MetricCollectorUnavailableException e) {
LOG.debug("Collector " + hostStr + " is not longer live. Removing " +
"it from list of know live collector hosts : " + allKnownLiveCollectors);
allKnownLiveCollectors.remove(hostStr);
}
}
}
}
}
Collection<String> findLiveCollectorHostsFromKnownCollector(String host, String port) throws MetricCollectorUnavailableException {
List<String> collectors = new ArrayList<>();
HttpURLConnection connection = null;
StringBuilder sb = new StringBuilder(getCollectorProtocol());
sb.append("://");
sb.append(host);
sb.append(":");
sb.append(port);
sb.append(COLLECTOR_LIVE_NODES_PATH);
String connectUrl = sb.toString();
LOG.debug("Requesting live collector nodes : " + connectUrl);
try {
connection = getCollectorProtocol().startsWith("https") ?
getSSLConnection(connectUrl) : getConnection(connectUrl);
connection.setRequestMethod("GET");
// 5 seconds for this op is plenty of wait time
connection.setConnectTimeout(3000);
connection.setReadTimeout(2000);
int responseCode = connection.getResponseCode();
if (responseCode == 200) {
try (InputStream in = connection.getInputStream()) {
StringWriter writer = new StringWriter();
IOUtils.copy(in, writer);
try {
collectors = gson.fromJson(writer.toString(), new TypeToken<List<String>>(){}.getType());
} catch (JsonSyntaxException jse) {
// Swallow this at the behest of still trying to POST
LOG.debug("Exception deserializing the json data on live " +
"collector nodes.", jse);
}
}
}
} catch (IOException ioe) {
StringBuilder errorMessage =
new StringBuilder("Unable to connect to collector, " + connectUrl);
try {
if ((connection != null)) {
errorMessage.append(cleanupInputStream(connection.getErrorStream()));
}
} catch (IOException e) {
//NOP
}
LOG.debug(errorMessage);
LOG.debug(ioe);
String warnMsg = "Unable to connect to collector to find live nodes.";
throw new MetricCollectorUnavailableException(warnMsg);
}
return collectors;
}
// Constructing without UriBuilder to avoid unfavorable httpclient
// dependencies
protected String constructTimelineMetricUri(String protocol, String host, String port) {
StringBuilder sb = new StringBuilder(protocol);
sb.append("://");
sb.append(host);
sb.append(":");
sb.append(port);
sb.append(WS_V1_TIMELINE_METRICS);
return sb.toString();
}
/**
* Parses input Sting of format "host1,host2" into Collection of hostnames
*/
public Collection<String> parseHostsStringIntoCollection(String hostsString) {
Set<String> hosts = new HashSet<>();
if (StringUtils.isEmpty(hostsString)) {
LOG.error("No Metric collector configured.");
return hosts;
}
for (String host : hostsString.split(",")) {
if (StringUtils.isEmpty(host))
continue;
hosts.add(host.trim());
}
return hosts;
}
private long getZookeeperBackoffTimeMillis() {
return (zookeeperMinBackoffTimeMins +
rand.nextInt(zookeeperMaxBackoffTimeMins - zookeeperMinBackoffTimeMins + 1)) * 60*1000l;
}
/**
* Get a pre-formatted URI for the collector
*/
abstract protected String getCollectorUri(String host);
abstract protected String getCollectorProtocol();
abstract protected String getCollectorPort();
/**
* How soon to timeout on the emit calls in seconds.
*/
abstract protected int getTimeoutSeconds();
/**
* Get the zookeeper quorum for the cluster used to find collector
* @return String "host1:port1,host2:port2"
*/
abstract protected String getZookeeperQuorum();
/**
* Get pre-configured list of collectors hosts available
* @return Collection<String> host1,host2
*/
abstract protected Collection<String> getConfiguredCollectorHosts();
/**
* Get hostname used for calculating write shard.
* @return String "host1"
*/
abstract protected String getHostname();
/**
* Check if host in-memory aggregation is enabled
* @return
*/
abstract protected boolean isHostInMemoryAggregationEnabled();
/**
* In memory aggregation port
* @return
*/
abstract protected int getHostInMemoryAggregationPort();
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.passwordSafe.ui;
import com.intellij.ide.passwordSafe.PasswordSafe;
import com.intellij.ide.passwordSafe.PasswordSafeException;
import com.intellij.ide.passwordSafe.config.PasswordSafeSettings;
import com.intellij.ide.passwordSafe.impl.PasswordSafeImpl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Ref;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
/**
* The generic password dialog. Use it to ask a password from user with option to remember it.
*/
public class PasswordSafePromptDialog extends DialogWrapper {
private static final Logger LOG = Logger.getInstance(PasswordSafePromptDialog.class.getName());
private final PasswordPromptComponent myComponent;
/**
* The private constructor. Note that it does not do init on dialog.
*
* @param project the project
* @param title the dialog title
* @param message the message on the dialog
* @param type
*/
private PasswordSafePromptDialog(@Nullable Project project, @NotNull String title, @NotNull PasswordPromptComponent component) {
super(project, true);
setTitle(title);
myComponent = component;
setResizable(false);
init();
}
public PasswordPromptComponent getComponent() {
return myComponent;
}
@Override
protected JComponent createCenterPanel() {
return myComponent.getComponent();
}
@Override
public JComponent getPreferredFocusedComponent() {
return myComponent.getPreferredFocusedComponent();
}
/**
* Ask password possibly asking password database first. The method could be invoked from any thread. If UI needs to be shown,
* the method invokes {@link UIUtil#invokeAndWaitIfNeeded(Runnable)}
* @param project the context project
* @param title the dialog title
* @param message the message describing a resource for which password is asked
* @param requestor the password requestor
* @param key the password key
* @param resetPassword if true, the old password is removed from database and new password will be asked.
* @param error the error to show in the dialog @return null if dialog was cancelled or password (stored in database or a entered by user)
*/
@Nullable
public static String askPassword(final Project project,
final String title,
final String message,
@NotNull final Class<?> requestor,
final String key,
boolean resetPassword, String error) {
return askPassword(project, title, message, requestor, key, resetPassword, error, null, null);
}
/**
* Ask password possibly asking password database first. The method could be invoked from any thread. If UI needs to be shown,
* the method invokes {@link UIUtil#invokeAndWaitIfNeeded(Runnable)}
*
* @param title the dialog title
* @param message the message describing a resource for which password is asked
* @param requestor the password requestor
* @param key the password key
* @param resetPassword if true, the old password is removed from database and new password will be asked.
* @return null if dialog was cancelled or password (stored in database or a entered by user)
*/
@Nullable
public static String askPassword(final String title,
final String message,
@NotNull final Class<?> requestor,
final String key,
boolean resetPassword) {
return askPassword(null, title, message, requestor, key, resetPassword, null);
}
/**
* Ask passphrase possibly asking password database first. The method could be invoked from any thread. If UI needs to be shown,
* the method invokes {@link UIUtil#invokeAndWaitIfNeeded(Runnable)}
* @param project the context project (might be null)
* @param title the dialog title
* @param message the message describing a resource for which password is asked
* @param requestor the password requestor
* @param key the password key
* @param resetPassword if true, the old password is removed from database and new password will be asked.
* @param error the error to show in the dialog @return null if dialog was cancelled or password (stored in database or a entered by user)
*/
@Nullable
public static String askPassphrase(final Project project,
final String title,
final String message,
@NotNull final Class<?> requestor,
final String key,
boolean resetPassword,
String error) {
return askPassword(project, title, message, requestor, key, resetPassword, error,
"Passphrase:", "Remember the passphrase");
}
/**
* Ask password possibly asking password database first. The method could be invoked from any thread. If UI needs to be shown,
* the method invokes {@link UIUtil#invokeAndWaitIfNeeded(Runnable)}
* @param project the context project
* @param title the dialog title
* @param message the message describing a resource for which password is asked
* @param requestor the password requestor
* @param key the password key
* @param resetPassword if true, the old password is removed from database and new password will be asked.
* @param error the error text to show in the dialog
* @param promptLabel the prompt label text
* @param checkboxLabel the checkbox text @return null if dialog was cancelled or password (stored in database or a entered by user)
*/
@Nullable
private static String askPassword(final Project project,
final String title,
final String message,
@NotNull final Class<?> requestor,
final String key,
boolean resetPassword,
final String error,
final String promptLabel,
final String checkboxLabel) {
final PasswordSafeImpl ps = (PasswordSafeImpl)PasswordSafe.getInstance();
try {
if (resetPassword) {
ps.removePassword(project, requestor, key);
}
else {
String pw = ps.getPassword(project, requestor, key);
if (pw != null) {
return pw;
}
}
}
catch (PasswordSafeException ex) {
// ignore exception on get/reset phase
if (LOG.isDebugEnabled()) {
LOG.debug("Failed to retrieve or reset password", ex);
}
}
final Ref<String> ref = Ref.create();
ApplicationManager.getApplication().invokeAndWait(new Runnable() {
public void run() {
PasswordSafeSettings.ProviderType type = ps.getSettings().getProviderType();
final PasswordPromptComponent component = new PasswordPromptComponent(type, message, false, promptLabel, checkboxLabel);
PasswordSafePromptDialog d = new PasswordSafePromptDialog(project, title, component);
d.setErrorText(error);
if (d.showAndGet()) {
ref.set(new String(component.getPassword()));
try {
if (component.isRememberSelected()) {
ps.storePassword(project, requestor, key, ref.get());
}
else if (!type.equals(PasswordSafeSettings.ProviderType.DO_NOT_STORE)) {
ps.getMemoryProvider().storePassword(project, requestor, key, ref.get());
}
}
catch (PasswordSafeException e) {
Messages.showErrorDialog(project, e.getMessage(), "Failed to Store Password");
if (LOG.isDebugEnabled()) {
LOG.debug("Failed to store password", e);
}
}
}
}
}, ModalityState.any());
return ref.get();
}
}
| |
package org.frameworkset.web.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
public class ContentCachingResponseWrapper extends HttpServletResponseWrapper {
private final FastByteArrayOutputStream content = new FastByteArrayOutputStream(1024);
private final ServletOutputStream outputStream = new ResponseServletOutputStream();
private PrintWriter writer;
private int statusCode = HttpServletResponse.SC_OK;
private Integer contentLength;
/**
* Create a new ContentCachingResponseWrapper for the given servlet response.
* @param response the original servlet response
*/
public ContentCachingResponseWrapper(HttpServletResponse response) {
super(response);
}
@Override
public void setStatus(int sc) {
super.setStatus(sc);
this.statusCode = sc;
}
@SuppressWarnings("deprecation")
@Override
public void setStatus(int sc, String sm) {
super.setStatus(sc, sm);
this.statusCode = sc;
}
@Override
public void sendError(int sc) throws IOException {
copyBodyToResponse(false);
try {
super.sendError(sc);
}
catch (IllegalStateException ex) {
// Possibly on Tomcat when called too late: fall back to silent setStatus
super.setStatus(sc);
}
this.statusCode = sc;
}
@Override
@SuppressWarnings("deprecation")
public void sendError(int sc, String msg) throws IOException {
copyBodyToResponse(false);
try {
super.sendError(sc, msg);
}
catch (IllegalStateException ex) {
// Possibly on Tomcat when called too late: fall back to silent setStatus
super.setStatus(sc, msg);
}
this.statusCode = sc;
}
@Override
public void sendRedirect(String location) throws IOException {
copyBodyToResponse(false);
super.sendRedirect(location);
}
@Override
public ServletOutputStream getOutputStream() throws IOException {
return this.outputStream;
}
@Override
public PrintWriter getWriter() throws IOException {
if (this.writer == null) {
String characterEncoding = getCharacterEncoding();
this.writer = (characterEncoding != null ? new ResponsePrintWriter(characterEncoding) :
new ResponsePrintWriter(WebUtils.DEFAULT_CHARACTER_ENCODING));
}
return this.writer;
}
@Override
public void flushBuffer() throws IOException {
// do not flush the underlying response as the content as not been copied to it yet
}
@Override
public void setContentLength(int len) {
if (len > this.content.size()) {
this.content.resize(len);
}
this.contentLength = len;
}
// Overrides Servlet 3.1 setContentLengthLong(long) at runtime
public void setContentLengthLong(long len) {
if (len > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Content-Length exceeds ShallowEtagHeaderFilter's maximum (" +
Integer.MAX_VALUE + "): " + len);
}
int lenInt = (int) len;
if (lenInt > this.content.size()) {
this.content.resize(lenInt);
}
this.contentLength = lenInt;
}
@Override
public void setBufferSize(int size) {
if (size > this.content.size()) {
this.content.resize(size);
}
}
@Override
public void resetBuffer() {
this.content.reset();
}
@Override
public void reset() {
super.reset();
this.content.reset();
}
/**
* Return the status code as specified on the response.
*/
public int getStatusCode() {
return this.statusCode;
}
/**
* Return the cached response content as a byte array.
*/
public byte[] getContentAsByteArray() {
return this.content.toByteArray();
}
/**
* Return an {@link InputStream} to the cached content.
* @since 4.2
*/
public InputStream getContentInputStream() {
return this.content.getInputStream();
}
/**
* Return the current size of the cached content.
* @since 4.2
*/
public int getContentSize() {
return this.content.size();
}
/**
* Copy the complete cached body content to the response.
* @since 4.2
*/
public void copyBodyToResponse() throws IOException {
copyBodyToResponse(true);
}
/**
* Copy the cached body content to the response.
* @param complete whether to set a corresponding content length
* for the complete cached body content
* @since 4.2
*/
protected void copyBodyToResponse(boolean complete) throws IOException {
if (this.content.size() > 0) {
HttpServletResponse rawResponse = (HttpServletResponse) getResponse();
if ((complete || this.contentLength != null) && !rawResponse.isCommitted()) {
rawResponse.setContentLength(complete ? this.content.size() : this.contentLength);
this.contentLength = null;
}
this.content.writeTo(rawResponse.getOutputStream());
this.content.reset();
if (complete) {
super.flushBuffer();
}
}
}
private class ResponseServletOutputStream extends ServletOutputStream {
@Override
public void write(int b) throws IOException {
content.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
content.write(b, off, len);
}
}
private class ResponsePrintWriter extends PrintWriter {
public ResponsePrintWriter(String characterEncoding) throws UnsupportedEncodingException {
super(new OutputStreamWriter(content, characterEncoding));
}
@Override
public void write(char buf[], int off, int len) {
super.write(buf, off, len);
super.flush();
}
@Override
public void write(String s, int off, int len) {
super.write(s, off, len);
super.flush();
}
@Override
public void write(int c) {
super.write(c);
super.flush();
}
}
}
| |
package com.afollestad.appthemeengine.util;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.RippleDrawable;
import android.os.Build;
import android.support.annotation.ColorInt;
import android.support.annotation.FloatRange;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.widget.AppCompatEditText;
import android.support.v7.widget.SwitchCompat;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RadioButton;
import android.widget.SeekBar;
import android.widget.Switch;
import android.widget.TextView;
import com.afollestad.appthemeengine.R;
/**
* @author afollestad, plusCubed
*/
public final class TintHelper {
@SuppressLint("PrivateResource")
@ColorInt
private static int getDefaultRippleColor(@NonNull Context context, boolean useDarkRipple) {
// Light ripple is actually translucent black, and vice versa
return ContextCompat.getColor(context, useDarkRipple ?
R.color.ripple_material_light : R.color.ripple_material_dark);
}
@NonNull
private static ColorStateList getDisabledColorStateList(@ColorInt int normal, @ColorInt int disabled) {
return new ColorStateList(new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled}
}, new int[]{
disabled,
normal
});
}
@SuppressWarnings("deprecation")
public static void setTintSelector(@NonNull View view, @ColorInt int color, boolean darker, boolean useDarkTheme) {
final int disabled = ContextCompat.getColor(view.getContext(), useDarkTheme ?
R.color.ate_disabled_button_dark : R.color.ate_disabled_button_light);
final int pressed = Util.shiftColor(color, darker ? 0.9f : 1.1f);
final int activated = Util.shiftColor(color, darker ? 1.1f : 0.9f);
final int rippleColor = getDefaultRippleColor(view.getContext(), Util.isColorLight(color));
final ColorStateList sl;
if (view instanceof Button) {
sl = getDisabledColorStateList(color, disabled);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP &&
view.getBackground() instanceof RippleDrawable) {
RippleDrawable rd = (RippleDrawable) view.getBackground();
rd.setColor(ColorStateList.valueOf(rippleColor));
}
// Disabled text color state for buttons, may get overridden later by ATE tags
final Button button = (Button) view;
final int defaultTextColor = Util.isColorLight(color) ?
Color.BLACK : Color.WHITE;
button.setTextColor(getDisabledColorStateList(defaultTextColor, Color.BLACK));
} else if (view instanceof FloatingActionButton) {
// FloatingActionButton doesn't support disabled state?
sl = new ColorStateList(new int[][]{
new int[]{-android.R.attr.state_pressed},
new int[]{android.R.attr.state_pressed}
}, new int[]{
color,
pressed
});
} else {
sl = new ColorStateList(
new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled, android.R.attr.state_pressed},
new int[]{android.R.attr.state_enabled, android.R.attr.state_activated},
new int[]{android.R.attr.state_enabled, android.R.attr.state_checked}
},
new int[]{
disabled,
color,
pressed,
activated,
activated
}
);
}
// TODO use other theme values in place of these?
final int tintColor = Util.isColorLight(color) ? Color.BLACK : Color.WHITE;
if (view instanceof FloatingActionButton) {
final FloatingActionButton fab = (FloatingActionButton) view;
fab.setRippleColor(rippleColor);
fab.setBackgroundTintList(sl);
if (fab.getDrawable() != null)
fab.setImageDrawable(tintDrawable(fab.getDrawable(), tintColor));
return;
}
Drawable drawable = view.getBackground();
if (drawable != null) {
drawable = DrawableCompat.wrap(drawable);
DrawableCompat.setTintList(drawable, sl);
Util.setBackgroundCompat(view, drawable);
}
if (view instanceof TextView && !(view instanceof Button)) {
final TextView tv = (TextView) view;
tv.setTextColor(getDisabledColorStateList(tintColor, Util.adjustAlpha(tintColor, 0.4f)));
}
}
@SuppressWarnings("deprecation")
public static void setTintAuto(@NonNull View view, @ColorInt int color, boolean background) {
final boolean isDark = !Util.isColorLight(Util.resolveColor(view.getContext(), android.R.attr.windowBackground));
if (!background) {
if (view instanceof RadioButton)
setTint((RadioButton) view, color, isDark);
else if (view instanceof SeekBar)
setTint((SeekBar) view, color, isDark);
else if (view instanceof ProgressBar)
setTint((ProgressBar) view, color);
else if (view instanceof EditText)
setTint((EditText) view, color, isDark);
else if (view instanceof CheckBox)
setTint((CheckBox) view, color, isDark);
else if (view instanceof ImageView)
setTint((ImageView) view, color);
else if (view instanceof Switch)
setTint((Switch) view, color, isDark);
else if (view instanceof SwitchCompat)
setTint((SwitchCompat) view, color, isDark);
else background = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP &&
!background && view.getBackground() instanceof RippleDrawable) {
// Ripples for the above views (e.g. when you tap and hold a switch or checkbox)
RippleDrawable rd = (RippleDrawable) view.getBackground();
@SuppressLint("PrivateResource")
final int unchecked = ContextCompat.getColor(view.getContext(),
isDark ? R.color.ripple_material_dark : R.color.ripple_material_light);
final int checked = Util.adjustAlpha(color, 0.4f);
final ColorStateList sl = new ColorStateList(
new int[][]{
new int[]{-android.R.attr.state_activated, -android.R.attr.state_checked},
new int[]{android.R.attr.state_activated},
new int[]{android.R.attr.state_checked}
},
new int[]{
unchecked,
checked,
checked
}
);
rd.setColor(sl);
}
}
if (background) {
// Need to tint the background of a view
if (view instanceof FloatingActionButton || view instanceof Button) {
setTintSelector(view, color, false, isDark);
} else if (view.getBackground() != null) {
Drawable drawable = view.getBackground();
if (drawable != null) {
drawable = DrawableCompat.wrap(drawable);
DrawableCompat.setTint(drawable, color);
Util.setBackgroundCompat(view, drawable);
}
}
}
}
public static void setTint(@NonNull RadioButton radioButton, @ColorInt int color, boolean useDarker) {
ColorStateList sl = new ColorStateList(new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled, -android.R.attr.state_checked},
new int[]{android.R.attr.state_enabled, android.R.attr.state_checked}
}, new int[]{
ContextCompat.getColor(radioButton.getContext(), useDarker ? R.color.ate_disabled_radiobutton_dark : R.color.ate_disabled_radiobutton_light),
Util.resolveColor(radioButton.getContext(), R.attr.colorControlNormal),
color
});
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
radioButton.setButtonTintList(sl);
} else {
Drawable d = DrawableCompat.wrap(ContextCompat.getDrawable(radioButton.getContext(), R.drawable.abc_btn_radio_material));
DrawableCompat.setTintList(d, sl);
radioButton.setButtonDrawable(d);
}
}
public static void setTint(@NonNull SeekBar seekBar, @ColorInt int color, boolean useDarker) {
final ColorStateList s1 = getDisabledColorStateList(color,
ContextCompat.getColor(seekBar.getContext(), useDarker ? R.color.ate_disabled_seekbar_dark : R.color.ate_disabled_seekbar_light));
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
seekBar.setThumbTintList(s1);
seekBar.setProgressTintList(s1);
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.GINGERBREAD_MR1) {
Drawable progressDrawable = DrawableCompat.wrap(seekBar.getProgressDrawable());
seekBar.setProgressDrawable(progressDrawable);
DrawableCompat.setTintList(progressDrawable, s1);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
Drawable thumbDrawable = DrawableCompat.wrap(seekBar.getThumb());
DrawableCompat.setTintList(thumbDrawable, s1);
seekBar.setThumb(thumbDrawable);
}
} else {
PorterDuff.Mode mode = PorterDuff.Mode.SRC_IN;
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
mode = PorterDuff.Mode.MULTIPLY;
}
if (seekBar.getIndeterminateDrawable() != null)
seekBar.getIndeterminateDrawable().setColorFilter(color, mode);
if (seekBar.getProgressDrawable() != null)
seekBar.getProgressDrawable().setColorFilter(color, mode);
}
}
public static void setTint(@NonNull ProgressBar progressBar, @ColorInt int color) {
setTint(progressBar, color, false);
}
public static void setTint(@NonNull ProgressBar progressBar, @ColorInt int color, boolean skipIndeterminate) {
ColorStateList sl = ColorStateList.valueOf(color);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
progressBar.setProgressTintList(sl);
progressBar.setSecondaryProgressTintList(sl);
if (!skipIndeterminate)
progressBar.setIndeterminateTintList(sl);
} else {
PorterDuff.Mode mode = PorterDuff.Mode.SRC_IN;
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
mode = PorterDuff.Mode.MULTIPLY;
}
if (!skipIndeterminate && progressBar.getIndeterminateDrawable() != null)
progressBar.getIndeterminateDrawable().setColorFilter(color, mode);
if (progressBar.getProgressDrawable() != null)
progressBar.getProgressDrawable().setColorFilter(color, mode);
}
}
public static void setTint(@NonNull EditText editText, @ColorInt int color, boolean useDarker) {
final ColorStateList editTextColorStateList = new ColorStateList(new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled, -android.R.attr.state_pressed, -android.R.attr.state_focused},
new int[]{}
}, new int[]{
ContextCompat.getColor(editText.getContext(), useDarker ? R.color.ate_disabled_edittext_dark : R.color.ate_disabled_edittext_light),
Util.resolveColor(editText.getContext(), R.attr.colorControlNormal),
color
});
if (editText instanceof AppCompatEditText) {
((AppCompatEditText) editText).setSupportBackgroundTintList(editTextColorStateList);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
editText.setBackgroundTintList(editTextColorStateList);
}
}
public static void setTint(@NonNull CheckBox box, @ColorInt int color, boolean useDarker) {
ColorStateList sl = new ColorStateList(new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled, -android.R.attr.state_checked},
new int[]{android.R.attr.state_enabled, android.R.attr.state_checked}
}, new int[]{
ContextCompat.getColor(box.getContext(), useDarker ? R.color.ate_disabled_checkbox_dark : R.color.ate_disabled_checkbox_light),
Util.resolveColor(box.getContext(), R.attr.colorControlNormal),
color
});
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
box.setButtonTintList(sl);
} else {
Drawable drawable = tintDrawable(ContextCompat.getDrawable(box.getContext(), R.drawable.abc_btn_check_material), sl);
box.setButtonDrawable(drawable);
}
}
public static void setTint(@NonNull ImageView image, @ColorInt int color) {
image.setColorFilter(color, PorterDuff.Mode.SRC_ATOP);
}
private static Drawable modifySwitchDrawable(@NonNull Context context, @NonNull Drawable from, @ColorInt int tint, @FloatRange(from = 0.0, to = 1.0) float alpha, boolean thumb, boolean useDarker) {
if (alpha < 1f)
tint = Util.adjustAlpha(tint, alpha);
int disabled;
if (thumb) {
disabled = ContextCompat.getColor(context, useDarker ? R.color.ate_disabled_switch_thumb_dark : R.color.ate_disabled_switch_thumb_light);
} else {
disabled = ContextCompat.getColor(context, useDarker ? R.color.ate_disabled_switch_track_dark : R.color.ate_disabled_switch_track_light);
}
final ColorStateList sl = new ColorStateList(
new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_enabled, -android.R.attr.state_activated, -android.R.attr.state_checked},
new int[]{android.R.attr.state_enabled, android.R.attr.state_activated},
new int[]{android.R.attr.state_enabled, android.R.attr.state_checked}
},
new int[]{
disabled,
Color.parseColor(thumb ? "#e7e7e7" : "#9f9f9f"),
tint,
tint
}
);
return tintDrawable(from, sl);
}
public static void setTint(@NonNull Switch switchView, @ColorInt int color, boolean useDarker) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) return;
if (switchView.getTrackDrawable() != null) {
switchView.setTrackDrawable(modifySwitchDrawable(switchView.getContext(),
switchView.getTrackDrawable(), color, 0.5f, false, useDarker));
}
if (switchView.getThumbDrawable() != null) {
switchView.setThumbDrawable(modifySwitchDrawable(switchView.getContext(),
switchView.getThumbDrawable(), color, 1.0f, true, useDarker));
}
}
public static void setTint(@NonNull SwitchCompat switchView, @ColorInt int color, boolean useDarker) {
if (switchView.getTrackDrawable() != null) {
switchView.setTrackDrawable(modifySwitchDrawable(switchView.getContext(),
switchView.getTrackDrawable(), color, 0.5f, false, useDarker));
}
if (switchView.getThumbDrawable() != null) {
switchView.setThumbDrawable(modifySwitchDrawable(switchView.getContext(),
switchView.getThumbDrawable(), color, 1.0f, true, useDarker));
}
}
@Nullable
public static Drawable tintDrawable(@Nullable Drawable drawable, @ColorInt int color) {
if (drawable == null) return null;
drawable = DrawableCompat.wrap(drawable);
DrawableCompat.setTint(drawable, color);
return drawable;
}
@Nullable
public static Drawable tintDrawable(@Nullable Drawable drawable, @NonNull ColorStateList sl) {
if (drawable == null) return null;
drawable = DrawableCompat.wrap(drawable);
DrawableCompat.setTintList(drawable, sl);
return drawable;
}
}
| |
package sergey.test.lwjgl;
import java.io.File;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL20;
import sergey.lib.api.lwjgl.ShaderProgram;
import sergey.lib.api.lwjgl.Texture;
import sergey.lib.api.lwjgl.gl.GLAttributeType;
import sergey.lib.api.lwjgl.gl.GLConnectionMode;
import sergey.lib.api.lwjgl.gl.GLDataType;
import sergey.lib.api.lwjgl.gl.VertexDataDef;
import sergey.lib.api.lwjgl.gl.util.GLUtil;
import sergey.lib.api.lwjgl.mesh.Mesh;
import sergey.lib.api.math.Vector4f;
import sergeysav.neuralnetwork.NeuralNetwork;
import sergeysav.neuralnetwork.Neuron;
import sergeysav.neuralnetwork.chess.ChessBoard;
import sergeysav.neuralnetwork.chess.ChessStore;
public class ChessNNGame extends WindowApplication {
private ShaderProgram shaderProgram;
private Mesh[][] tiles;
private Texture textureAtlas;
private ChessBoard board;
private int fromX = -1;
private int fromY;
private boolean whiteTeamMoving = true;
private final Vector4f NO_TINT = new Vector4f(0,0,0,0);
private final Vector4f FROM_TINT = new Vector4f(1f,0,0,0.4f);
private final Vector4f HIGHLIGHT_TINT = new Vector4f(0,1f,0,0.4f);
private final Vector4f BOTH_TINT = new Vector4f(1f,1f,0,0.4f);
private final Vector4f LEGAL_TINT = new Vector4f(0f,0f,1f,0.4f);
private final Vector4f AI_FROM_TINT = new Vector4f(1f,0f,1f,0.4f); //Purplish
private final Vector4f AI_TO_TINT = new Vector4f(0f,1f,1f,0.4f); //Cyanish
private String aiMove = null;
private NeuralNetwork ai;
@Override
public void create() {
ai = ChessStore.load(new File("ai.store")).network;
ai.init();
board = new ChessBoard();
try {
PrintWriter writer = new PrintWriter(new File("NeuralNet.txt"));
Neuron[][] data = ai.getNeuralData();
for (int i = 0; i<data.length; i++) {
for (int j = 0; j<data[i].length; j++) {
writer.print("[");
for (int k = 0; k<data[i][j].getWeights().length; k++) {
if (k == data[i][j].getWeights().length-1) {
writer.print(String.format("%18.14f", data[i][j].getWeights()[k]));
} else {
writer.print(String.format("%18.14f, ", data[i][j].getWeights()[k]));
}
}
writer.print("] " + data[i][j].getBias() + "\n");
}
writer.println();
}
writer.flush();
writer.close();
} catch (Exception e) {
e.printStackTrace();
}
aiMove = getAIOutput();
GL11.glClearColor(1f, 1f, 1f, 1.0f);
shaderProgram = new ShaderProgram("shader", null, null);
try {
textureAtlas = new Texture(new File(getClass().getResource("/atlas.png").toURI()).getAbsolutePath(), Texture.Type.RGBA, true, true);
} catch (URISyntaxException e) {
e.printStackTrace();
}
tiles = new Mesh[8][8];
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
tiles[i][j] = genMesh(i, j);
}
}
//GL11.glEnable(GL11.GL_DEPTH_TEST);
GL11.glDisable(GL11.GL_DEPTH_TEST);
//GL11.gl
GL11.glPolygonMode(GL11.GL_FRONT_AND_BACK, GL11.GL_FILL);
}
@Override
public void render() {
textureAtlas.bind();
int xGrid = (int) Math.floor(getMouseX()/getWidth()*10 - 1);
int yGrid = 7-(int) Math.floor(getMouseY()/getHeight()*10 - 1);
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
int piece = board.getPieceAt(i, j);
GLUtil.setUniform(shaderProgram.getUniform("u_tint"), (fromX == xGrid && fromY == yGrid && fromX == j && fromY == i ? BOTH_TINT : (fromX == j && fromY == i ? FROM_TINT : (xGrid == j && yGrid == i ? HIGHLIGHT_TINT : (aiMove.substring(0,1).equals(i+"") && aiMove.substring(1,2).equals(j+"") ? AI_FROM_TINT : (aiMove.substring(3,4).equals(i+"") && aiMove.substring(4,5).equals(j+"") ? AI_TO_TINT : fromX > -1 && board.isLegalMove(fromY + "" + fromX, i + "" + j) ? LEGAL_TINT : NO_TINT))))));
GL20.glUniform1f(shaderProgram.getUniform("u_team"), (i + j) % 2 == 0 ? 1 : 0);
GL20.glUniform1f(shaderProgram.getUniform("u_type"), 0);
tiles[i][j].draw();
GLUtil.setUniform(shaderProgram.getUniform("u_tint"), NO_TINT);
if (Math.abs(piece) != 7 && piece != 0) {
GL20.glUniform1f(shaderProgram.getUniform("u_team"), piece < 0 ? 0 : 1);
GL20.glUniform1f(shaderProgram.getUniform("u_type"), Math.abs(piece));
tiles[i][j].draw();
}
}
}
Texture.unbind();
}
@Override
public void dispose() {
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
tiles[i][j].dispose();
}
}
textureAtlas.dispose();
shaderProgram.dispose();
}
private Mesh genMesh(int j, int i) {
Mesh mesh = new Mesh();
mesh.setDrawingMode(GLConnectionMode.TRIANGLE_STRIP.glMode);
float[] vertexBuffer = new float[]{
toGLSpace((i+1)/10f), toGLSpace((j+1)/10f), 0f, 0f,
toGLSpace((i+1)/10f), toGLSpace((j+2)/10f), 0f, 1f,
toGLSpace((i+2)/10f), toGLSpace((j+1)/10f), 1f, 0f,
toGLSpace((i+2)/10f), toGLSpace((j+2)/10f), 1f, 1f
};
int[] indexBuffer = new int[]{0,1,2,3};
mesh.setVertexBuffer(vertexBuffer, GLDataType.STATIC, new VertexDataDef("a_position", GLAttributeType.VEC2), new VertexDataDef("a_uv", GLAttributeType.VEC2));
mesh.setElementBuffer(indexBuffer, GLDataType.STATIC);
mesh.setShader(shaderProgram);
return mesh;
}
private float toGLSpace(float v) {
return 2*v - 1;
}
@Override
protected boolean handleMouseInput(int button, int action, int mods) {
if (action == GLFW.GLFW_RELEASE && button == GLFW.GLFW_MOUSE_BUTTON_1) {
int xGrid = (int) Math.floor(getMouseX()/getWidth()*10 - 1);
int yGrid = 7-(int) Math.floor(getMouseY()/getHeight()*10 - 1);
if (xGrid >= 0 && yGrid >= 0 && xGrid < 8 && yGrid < 8) {
if (fromX == -1) {
if ((whiteTeamMoving ? board.getPieceAt(yGrid, xGrid) > 0 : board.getPieceAt(yGrid, xGrid) < 0)) {
fromX = xGrid;
fromY = yGrid;
}
} else {
tryMove(fromX, fromY, xGrid, yGrid);
fromX = -1;
}
} else {
fromX = -1;
}
//board.isLegalMove(from, to)
return true;
}
return super.handleMouseInput(button, action, mods);
}
private boolean tryMove(int fX, int fY, int tX, int tY) {
if ((whiteTeamMoving ? board.getPieceAt(fY, fX) > 0 : board.getPieceAt(fY, fX) < 0) && board.isLegalMove(fY + "" + fX, tY + "" + tX)) {
board.applyConvertedMove(fromY + "" + fromX + ";" + tY + "" + tX + ";" + board.getPieceAt(fromY, fromX));
whiteTeamMoving = !whiteTeamMoving;
//System.out.println(Arrays.toString(board.generateNeuralInputs(whiteTeamMoving)));
System.out.println();
aiMove = getAIOutput();
return true;
}
return false;
}
private String getAIOutput() {
double[] outputs;
try { //ai.testAll with printing stuffs added
int digits = 4;
String formatStr = "%" + (digits + 4) + "." + digits + "f";
PrintWriter writer = new PrintWriter(new File("NeuronValues.txt"));
//Define an array that represents the last layer that was evaluated
System.out.println(whiteTeamMoving);
double[] lastLayer = board.generateNeuralInputs(whiteTeamMoving);
writer.print("[");
for (int i = 0; i<lastLayer.length; i++) {
writer.print(String.format(formatStr, lastLayer[i]));
if (i < lastLayer.length-2) {
writer.print(", ");
}
}
writer.println("]");
//Loop through each layer of neurons
for (int i = 0; i<ai.getNeuralData().length; i++) {
//Create an array representing the outputs of this layer
double[] newLayer = new double[ai.getNeuralData()[i].length];
//Loop through each neuron in this layer
writer.print("[");
for (int j = 0; j<newLayer.length; j++) {
//Evaluate the given neuron using the values of the previous layer
newLayer[j] = ai.getNeuralData()[i][j].getOutput(lastLayer);
writer.print(String.format(formatStr, newLayer[j]));
if (i < newLayer.length-2) {
writer.print(", ");
}
}
writer.println("]");
//Set the current layer as the last layer calculated
lastLayer = newLayer;
}
outputs = lastLayer;
writer.flush();
writer.close();
} catch (Exception e) {
e.printStackTrace();
return "";
}
//double[] outputs = ai.testAll();
int fX = -1;
int fY = -1;
int tX = -1;
int tY = -1;
int type = -1;
if (whiteTeamMoving) {
double v = -1;
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
if (outputs[8*i + j] > v) {
fX = i;
fY = j;
v = outputs[8*i + j];
}
}
}
System.out.println("From: " + fX + " " + fY + " @ " + v);
v = -1;
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
if (outputs[8*i + j + 64] > v) {
tX = i;
tY = j;
v = outputs[8*i + j + 64];
}
}
}
System.out.println("To: " + tX + " " + tY + " @ " + v);
} else {
double v = -1;
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
if (outputs[8*(7-i) + j] > v) {
fX = i;
fY = j;
v = outputs[8*(7-i) + j];
}
}
}
System.out.println("From: " + fX + " " + fY + " @ " + v);
v = -1;
for (int i = 0; i<8; i++) {
for (int j = 0; j<8; j++) {
if (outputs[8*(7-i) + j + 64] > v) {
tX = i;
tY = j;
v = outputs[8*(7-i) + j + 64];
}
}
}
System.out.println("To: " + tX + " " + tY + " @ " + v);
}
{
double v = -1;
for (int i = 128; i<134; i++) {
if (outputs[i] > v) {
v = outputs[i];
type= i-127;
}
}
if (!whiteTeamMoving) type *= -1;
System.out.println("Type: " + type + " @ " + v);
}
String move = fX + "" + fY + ";" + tX + "" + tY + ";" + type;
if (!whiteTeamMoving && board.isLegalMove(fX + "" + fY, tX + "" + tY) && board.getPieceAt(fX, fY) < 0) {
board.applyConvertedMove(move);
whiteTeamMoving = !whiteTeamMoving;
System.out.println();
return getAIOutput();
//tryMove(fX, fY, tX, tY);
//board.applyConvertedMove(move);
//aiMove = getAIOutput();
}
return move;
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.testrunner;
import com.google.inject.Inject;
import com.yahoo.vespa.defaults.Defaults;
import com.yahoo.vespa.testrunner.HtmlLogger;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.SortedMap;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.logging.Level.SEVERE;
/**
* @author valerijf
* @author jvenstad
*/
public class TestRunner implements com.yahoo.vespa.testrunner.TestRunner {
private static final Logger logger = Logger.getLogger(TestRunner.class.getName());
private static final Path vespaHome = Paths.get(Defaults.getDefaults().vespaHome());
private static final String settingsXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<settings xmlns=\"http://maven.apache.org/SETTINGS/1.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/SETTINGS/1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd\">\n" +
" <mirrors>\n" +
" <mirror>\n" +
" <id>maven central</id>\n" +
" <mirrorOf>*</mirrorOf>\n" + // Use this for everything!
" <url>https://repo.maven.apache.org/maven2/</url>\n" +
" </mirror>\n" +
" </mirrors>\n" +
"</settings>";
private final Path artifactsPath;
private final Path testPath;
private final Path configFile;
private final Path settingsFile;
private final Function<TestProfile, ProcessBuilder> testBuilder;
private final SortedMap<Long, LogRecord> log = new ConcurrentSkipListMap<>();
private volatile Status status = Status.NOT_STARTED;
@Inject
public TestRunner(TestRunnerConfig config) {
this(config.artifactsPath(),
vespaHome.resolve("tmp/test"),
vespaHome.resolve("tmp/config.json"),
vespaHome.resolve("tmp/settings.xml"),
profile -> mavenProcessFrom(profile, config));
}
TestRunner(Path artifactsPath, Path testPath, Path configFile, Path settingsFile, Function<TestProfile, ProcessBuilder> testBuilder) {
this.artifactsPath = artifactsPath;
this.testPath = testPath;
this.configFile = configFile;
this.settingsFile = settingsFile;
this.testBuilder = testBuilder;
}
static ProcessBuilder mavenProcessFrom(TestProfile profile, TestRunnerConfig config) {
List<String> command = new ArrayList<>();
command.add("mvn"); // mvn must be in PATH of the jDisc containers
command.add("test");
command.add("--batch-mode"); // Run in non-interactive (batch) mode (disables output color)
command.add("--show-version"); // Display version information WITHOUT stopping build
command.add("--settings"); // Need to override repository settings in ymaven config >_<
command.add(vespaHome.resolve("tmp/settings.xml").toString());
// Disable maven download progress indication
command.add("-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn");
command.add("-Dstyle.color=always"); // Enable ANSI color codes again
command.add("-DfailIfNoTests=" + profile.failIfNoTests());
command.add("-Dvespa.test.config=" + vespaHome.resolve("tmp/config.json"));
if (config.useAthenzCredentials())
command.add("-Dvespa.test.credentials.root=" + Defaults.getDefaults().vespaHome() + "/var/vespa/sia");
else if (config.useTesterCertificate())
command.add("-Dvespa.test.credentials.root=" + config.artifactsPath());
command.add(String.format("-DargLine=-Xms%1$dm -Xmx%1$dm", config.surefireMemoryMb()));
command.add("-Dmaven.repo.local=" + vespaHome.resolve("tmp/.m2/repository"));
ProcessBuilder builder = new ProcessBuilder(command);
builder.environment().merge("MAVEN_OPTS", " -Djansi.force=true", String::concat);
builder.directory(vespaHome.resolve("tmp/test").toFile());
builder.redirectErrorStream(true);
return builder;
}
@Override
public synchronized CompletableFuture<?> test(Suite suite, byte[] testConfig) {
if (status == Status.RUNNING)
throw new IllegalArgumentException("Tests are already running; should not receive this request now.");
log.clear();
if ( ! hasTestsJar()) {
status = Status.NO_TESTS;
return CompletableFuture.completedFuture(null);
}
status = Status.RUNNING;
return CompletableFuture.runAsync(() -> runTests(toProfile(suite), testConfig));
}
@Override
public Collection<LogRecord> getLog(long after) {
return log.tailMap(after + 1).values();
}
@Override
public synchronized Status getStatus() {
return status;
}
private boolean hasTestsJar() {
return listFiles(artifactsPath).stream().anyMatch(file -> file.toString().endsWith("tests.jar"));
}
private void runTests(TestProfile testProfile, byte[] testConfig) {
ProcessBuilder builder = testBuilder.apply(testProfile);
{
LogRecord record = new LogRecord(Level.INFO,
String.format("Starting %s. Artifacts directory: %s Config file: %s\nCommand to run: %s\nEnv: %s\n",
testProfile.name(), artifactsPath, configFile,
String.join(" ", builder.command()),
builder.environment()));
log.put(record.getSequenceNumber(), record);
logger.log(record);
log.put(record.getSequenceNumber(), record);
logger.log(record);
}
boolean success;
try {
writeTestApplicationPom(testProfile);
Files.write(configFile, testConfig);
Files.write(settingsFile, settingsXml.getBytes());
Process mavenProcess = builder.start();
BufferedReader in = new BufferedReader(new InputStreamReader(mavenProcess.getInputStream()));
HtmlLogger htmlLogger = new HtmlLogger();
in.lines().forEach(line -> {
LogRecord html = htmlLogger.toLog(line);
log.put(html.getSequenceNumber(), html);
});
success = mavenProcess.waitFor() == 0;
}
catch (Exception exception) {
LogRecord record = new LogRecord(SEVERE, "Failed to execute maven command: " + String.join(" ", builder.command()));
record.setThrown(exception);
logger.log(record);
log.put(record.getSequenceNumber(), record);
status = Status.ERROR;
return;
}
status = success ? Status.SUCCESS : Status.FAILURE;
}
private void writeTestApplicationPom(TestProfile testProfile) throws IOException {
List<Path> files = listFiles(artifactsPath);
Path testJar = files.stream().filter(file -> file.toString().endsWith("tests.jar")).findFirst()
.orElseThrow(() -> new NoTestsException("No file ending with 'tests.jar' found under '" + artifactsPath + "'!"));
String pomXml = PomXmlGenerator.generatePomXml(testProfile, files, testJar);
testPath.toFile().mkdirs();
Files.write(testPath.resolve("pom.xml"), pomXml.getBytes());
}
private static List<Path> listFiles(Path directory) {
try (Stream<Path> element = Files.walk(directory)) {
return element
.filter(Files::isRegularFile)
.filter(path -> path.toString().endsWith(".jar"))
.collect(Collectors.toList());
} catch (IOException e) {
throw new UncheckedIOException("Failed to list files under " + directory, e);
}
}
static class NoTestsException extends RuntimeException {
private NoTestsException(String message) { super(message); }
}
static TestProfile toProfile(Suite suite) {
switch (suite) {
case SYSTEM_TEST: return TestProfile.SYSTEM_TEST;
case STAGING_SETUP_TEST: return TestProfile.STAGING_SETUP_TEST;
case STAGING_TEST: return TestProfile.STAGING_TEST;
case PRODUCTION_TEST: return TestProfile.PRODUCTION_TEST;
default: throw new IllegalArgumentException("Unknown test suite '" + suite + "'");
}
}
}
| |
/*
* XML Type: UpdateContextSubscriptionRequest
* Namespace:
* Java type: noNamespace.UpdateContextSubscriptionRequest
*
* Automatically generated - do not modify.
*/
package noNamespace;
/**
* An XML UpdateContextSubscriptionRequest(@).
*
* This is a complex type.
*/
public interface UpdateContextSubscriptionRequest extends org.apache.xmlbeans.XmlObject
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(UpdateContextSubscriptionRequest.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s12286FC6F9B192FE0A2CA4F97C96C4A4").resolveHandle("updatecontextsubscriptionrequest4ab5type");
/**
* Gets the "duration" element
*/
org.apache.xmlbeans.GDuration getDuration();
/**
* Gets (as xml) the "duration" element
*/
org.apache.xmlbeans.XmlDuration xgetDuration();
/**
* True if has "duration" element
*/
boolean isSetDuration();
/**
* Sets the "duration" element
*/
void setDuration(org.apache.xmlbeans.GDuration duration);
/**
* Sets (as xml) the "duration" element
*/
void xsetDuration(org.apache.xmlbeans.XmlDuration duration);
/**
* Unsets the "duration" element
*/
void unsetDuration();
/**
* Gets the "restriction" element
*/
noNamespace.Restriction getRestriction();
/**
* True if has "restriction" element
*/
boolean isSetRestriction();
/**
* Sets the "restriction" element
*/
void setRestriction(noNamespace.Restriction restriction);
/**
* Appends and returns a new empty "restriction" element
*/
noNamespace.Restriction addNewRestriction();
/**
* Unsets the "restriction" element
*/
void unsetRestriction();
/**
* Gets the "subscriptionId" element
*/
java.lang.String getSubscriptionId();
/**
* Gets (as xml) the "subscriptionId" element
*/
org.apache.xmlbeans.XmlString xgetSubscriptionId();
/**
* Sets the "subscriptionId" element
*/
void setSubscriptionId(java.lang.String subscriptionId);
/**
* Sets (as xml) the "subscriptionId" element
*/
void xsetSubscriptionId(org.apache.xmlbeans.XmlString subscriptionId);
/**
* Gets the "notifyConditions" element
*/
noNamespace.NotifyConditionList getNotifyConditions();
/**
* True if has "notifyConditions" element
*/
boolean isSetNotifyConditions();
/**
* Sets the "notifyConditions" element
*/
void setNotifyConditions(noNamespace.NotifyConditionList notifyConditions);
/**
* Appends and returns a new empty "notifyConditions" element
*/
noNamespace.NotifyConditionList addNewNotifyConditions();
/**
* Unsets the "notifyConditions" element
*/
void unsetNotifyConditions();
/**
* Gets the "throttling" element
*/
org.apache.xmlbeans.GDuration getThrottling();
/**
* Gets (as xml) the "throttling" element
*/
org.apache.xmlbeans.XmlDuration xgetThrottling();
/**
* True if has "throttling" element
*/
boolean isSetThrottling();
/**
* Sets the "throttling" element
*/
void setThrottling(org.apache.xmlbeans.GDuration throttling);
/**
* Sets (as xml) the "throttling" element
*/
void xsetThrottling(org.apache.xmlbeans.XmlDuration throttling);
/**
* Unsets the "throttling" element
*/
void unsetThrottling();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static noNamespace.UpdateContextSubscriptionRequest newInstance() {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest newInstance(org.apache.xmlbeans.XmlOptions options) {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static noNamespace.UpdateContextSubscriptionRequest parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static noNamespace.UpdateContextSubscriptionRequest parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static noNamespace.UpdateContextSubscriptionRequest parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static noNamespace.UpdateContextSubscriptionRequest parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (noNamespace.UpdateContextSubscriptionRequest) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| |
package php.runtime.memory.support;
import php.runtime.Memory;
import php.runtime.env.Environment;
import php.runtime.env.TraceInfo;
import php.runtime.exceptions.CriticalException;
import php.runtime.lang.BaseWrapper;
import php.runtime.lang.IObject;
import php.runtime.memory.ObjectMemory;
import php.runtime.memory.StringMemory;
import php.runtime.memory.support.operation.*;
import php.runtime.memory.support.operation.array.*;
import php.runtime.memory.support.operation.collection.HashSetMemoryOperation;
import php.runtime.memory.support.operation.collection.ListMemoryOperation;
import php.runtime.memory.support.operation.collection.SetMemoryOperation;
import php.runtime.memory.support.operation.iterator.IterableMemoryOperation;
import php.runtime.memory.support.operation.map.HashMapMemoryOperation;
import php.runtime.memory.support.operation.map.MapMemoryOperation;
import php.runtime.memory.support.operation.map.PropertiesMemoryOperation;
import php.runtime.reflection.ParameterEntity;
import php.runtime.reflection.support.ReflectionUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
abstract public class MemoryOperation<T> {
protected final static Map<Class<?>, Class<? extends BaseWrapper>> wrappers = new HashMap<Class<?>, Class<? extends BaseWrapper>>();
protected final static Map<Class<? extends BaseWrapper>, Class<?>> wrappersOut = new HashMap<Class<? extends BaseWrapper>, Class<?>>();
protected final static Map<Class<?>, MemoryOperation> operations = new HashMap<Class<?>, MemoryOperation>();
protected final static Map<ParametrizedClass, MemoryOperation> genericOperations = new HashMap<ParametrizedClass, MemoryOperation>();
abstract public Class<?>[] getOperationClasses();
final public T convertNoThrow(Environment env, TraceInfo trace, Memory arg) {
try {
return convert(env, trace, arg);
} catch (Throwable throwable) {
env.wrapThrow(throwable);
return null;
}
}
final public Memory unconvertNoThow(Environment env, TraceInfo trace, T arg) {
try {
return unconvert(env, trace, arg);
} catch (Throwable throwable) {
env.wrapThrow(throwable);
return Memory.NULL;
}
}
abstract public T convert(Environment env, TraceInfo trace, Memory arg) throws Throwable;
abstract public Memory unconvert(Environment env, TraceInfo trace, T arg) throws Throwable;
public void releaseConverted(Environment env, TraceInfo info, T arg) {
// nop
}
public Type[] getGenericTypes() {
return null;
}
public void applyTypeHinting(ParameterEntity parameter) {
// nop
}
protected MemoryOperation<T> instance(Type... genericTypes) {
return this;
}
public static <T> Class<? extends BaseWrapper> getWrapper(Class<T> clazz) {
return wrappers.get(clazz);
}
@SuppressWarnings("unchecked")
public static <T> Class<T> getClassOfWrapper(Class<? extends BaseWrapper<T>> clazz) {
return (Class<T>) wrappersOut.get(clazz);
}
@SuppressWarnings("unchecked")
public static MemoryOperation get(final Class<?> type, Type genericTypes) {
MemoryOperation operation = null;
if (genericTypes instanceof ParameterizedType) {
operation = genericOperations.get(new ParametrizedClass(type, ((ParameterizedType) genericTypes).getActualTypeArguments()));
}
if (operation == null) {
operation = operations.get(type);
if (operation == null) {
final Class<? extends BaseWrapper> wrapperClass = wrappers.get(type);
if (wrapperClass != null) {
final Constructor<BaseWrapper> constructor;
try {
constructor = (Constructor<BaseWrapper>) wrapperClass.getConstructor(Environment.class, type);
} catch (NoSuchMethodException e) {
throw new CriticalException(e);
}
return new MemoryOperation() {
@Override
public Class<?>[] getOperationClasses() {
return new Class<?>[0];
}
@Override
public Object convert(Environment env, TraceInfo trace, Memory arg) throws Throwable {
if (arg.isNull()) {
return null;
}
return arg.toObject(BaseWrapper.class).getWrappedObject();
}
@Override
public Memory unconvert(Environment env, TraceInfo trace, Object arg) throws Throwable {
if (arg == null) {
return Memory.NULL;
}
Constructor<BaseWrapper> constructorContext = constructor;
Class<? extends BaseWrapper> wrapperClassContext = wrapperClass;
if (arg.getClass() != type) {
wrapperClassContext = wrappers.get(arg.getClass());
}
if (wrapperClassContext != null && wrapperClassContext != wrapperClass) {
constructorContext = (Constructor<BaseWrapper>) wrapperClassContext.getConstructor(Environment.class, arg.getClass());
}
try {
return ObjectMemory.valueOf(constructorContext.newInstance(env, arg));
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new CriticalException(e);
}
}
@Override
public void applyTypeHinting(ParameterEntity parameter) {
parameter.setTypeNativeClass(type);
}
};
}
if (IObject.class.isAssignableFrom(type)) {
return new MemoryOperation() {
@Override
public Class<?>[] getOperationClasses() {
return new Class<?>[]{IObject.class};
}
@Override
@SuppressWarnings("unchecked")
public Object convert(Environment env, TraceInfo trace, Memory arg) throws Throwable {
if (arg.isNull()) {
return null;
}
return arg.toObject((Class<? extends IObject>) type);
}
@Override
public Memory unconvert(Environment env, TraceInfo trace, Object arg) throws Throwable {
if (arg == null) {
return Memory.NULL;
}
return ObjectMemory.valueOf((IObject) arg);
}
@Override
public void applyTypeHinting(ParameterEntity parameter) {
parameter.setType(ReflectionUtils.getClassName(type));
}
};
} else if (Enum.class.isAssignableFrom(type)) {
return new MemoryOperation() {
@Override
public Class<?>[] getOperationClasses() {
return new Class<?>[] { Enum.class };
}
@Override
@SuppressWarnings("unchecked")
public Object convert(Environment env, TraceInfo trace, Memory arg) throws Throwable {
return arg.isNull() ? null : Enum.valueOf((Class<? extends Enum>)type, arg.toString());
}
@Override
public Memory unconvert(Environment env, TraceInfo trace, Object arg) throws Throwable {
return arg == null ? Memory.NULL : StringMemory.valueOf(((Enum) arg).name());
}
@Override
public void applyTypeHinting(ParameterEntity parameter) {
parameter.setTypeEnum((Class<? extends Enum>)type);
}
};
} else if (type.isArray()) {
MemoryOperation arrayMemoryOperation = new ArrayMemoryOperation(type);
register(arrayMemoryOperation);
return arrayMemoryOperation;
}
}
}
if (operation == null) {
return null;
}
if (genericTypes instanceof ParameterizedType) {
return operation.instance(((ParameterizedType) genericTypes).getActualTypeArguments());
}
return operation;
}
@SuppressWarnings("unchecked")
public static void register(MemoryOperation operation) {
if (operation.getGenericTypes() != null) {
for (Class<?> type : operation.getOperationClasses()) {
genericOperations.put(new ParametrizedClass(type, operation.getGenericTypes()), operation);
}
} else {
for (Class<?> type : operation.getOperationClasses()) {
operations.put(type, operation);
}
}
}
public static <T> void registerWrapper(Class<T> clazz, Class<? extends BaseWrapper> wrapperClass) {
wrappers.put(clazz, wrapperClass);
wrappersOut.put(wrapperClass, clazz);
}
public static class ParametrizedClass<T> {
protected Class<T> clazz;
protected Type[] genericTypes;
public ParametrizedClass(Class<T> clazz, Type[] genericTypes) {
this.clazz = clazz;
this.genericTypes = genericTypes;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ParametrizedClass)) return false;
ParametrizedClass that = (ParametrizedClass) o;
if (!clazz.equals(that.clazz)) return false;
if (!Arrays.equals(genericTypes, that.genericTypes)) return false;
return true;
}
@Override
public int hashCode() {
int result = clazz.hashCode();
result = 31 * result + Arrays.hashCode(genericTypes);
return result;
}
}
static {
register(new ObjectMemoryOperation());
register(new VoidMemoryOperation());
register(new MemoryMemoryOperation());
register(new ArrayMemoryMemoryOperation());
register(new BooleanMemoryOperation());
register(new LongMemoryOperation());
register(new IntegerMemoryOperation());
register(new ShortMemoryOperation());
register(new ByteMemoryOperation());
register(new DoubleMemoryOperation());
register(new FloatMemoryOperation());
register(new StringMemoryOperation());
register(new CharSequenceMemoryOperation());
register(new CharacterMemoryOperation());
register(new InvokerMemoryOperation());
register(new ForeachIteratorMemoryOperation());
register(new InputStreamMemoryOperation());
register(new OutputStreamMemoryOperation());
register(new FileMemoryOperation());
register(new ByteArrayInputStreamMemoryOperation());
register(new PatternMemoryOperation());
register(new IterableMemoryOperation());
register(new ListMemoryOperation());
register(new SetMemoryOperation());
register(new HashSetMemoryOperation());
register(new MapMemoryOperation());
register(new HashMapMemoryOperation());
register(new PropertiesMemoryOperation());
register(new UrlMemoryOperation());
register(new UriMemoryOperation());
register(new BinaryMemoryOperation());
register(new NumberMemoryOperation());
register(new BigDecimalOperation());
register(new BigIntegerOperation());
register(new ClassMemoryOperation());
register(new LocaleMemoryOperation());
register(new DateMemoryOperation());
register(new TimeZoneMemoryOperation());
register(new ScannerMemoryOperation());
register(new ThreadMemoryOperation());
register(new ThreadGroupMemoryOperation());
register(new FloatArrayMemoryOperation());
register(new DoubleArrayMemoryOperation());
register(new LongArrayMemoryOperation());
register(new IntegerArrayMemoryOperation());
register(new ShortArrayMemoryOperation());
register(new BooleanArrayMemoryOperation());
register(new CharArrayMemoryOperation());
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.sms.config;
import java.io.IOException;
import java.util.*;
import lombok.extern.slf4j.Slf4j;
import org.hisp.dhis.outboundmessage.OutboundMessage;
import org.hisp.dhis.outboundmessage.OutboundMessageBatch;
import org.hisp.dhis.outboundmessage.OutboundMessageResponse;
import org.hisp.dhis.sms.outbound.GatewayResponse;
import org.jsmpp.InvalidResponseException;
import org.jsmpp.PDUException;
import org.jsmpp.bean.*;
import org.jsmpp.extra.NegativeResponseException;
import org.jsmpp.extra.ResponseTimeoutException;
import org.jsmpp.session.BindParameter;
import org.jsmpp.session.SMPPSession;
import org.jsmpp.util.AbsoluteTimeFormatter;
import org.jsmpp.util.DeliveryReceiptState;
import org.jsmpp.util.TimeFormatter;
import org.springframework.stereotype.Component;
/**
* @author Zubair Asghar.
*/
@Slf4j
@Component( "org.hisp.dhis.sms.config.SMPPClient" )
public class SMPPClient
{
private static final String SOURCE = "DHIS2";
private static final String SENDING_FAILED = "SMS sending failed";
private static final String SESSION_ERROR = "SMPP Session cannot be null";
private static final TimeFormatter TIME_FORMATTER = new AbsoluteTimeFormatter();
public OutboundMessageResponse send( String text, Set<String> recipients, SMPPGatewayConfig config )
{
SMPPSession session = start( config );
if ( session == null )
{
return new OutboundMessageResponse( SESSION_ERROR, GatewayResponse.SMPP_SESSION_FAILURE, false );
}
OutboundMessageResponse response = send( session, text, recipients, config );
stop( session );
return response;
}
public List<OutboundMessageResponse> sendBatch( OutboundMessageBatch batch, SMPPGatewayConfig config )
{
SMPPSession session = start( config );
if ( session == null )
{
return Collections.emptyList();
}
List<OutboundMessageResponse> responses = new ArrayList<>();
for ( OutboundMessage message : batch.getMessages() )
{
OutboundMessageResponse response = send( session, message.getText(), message.getRecipients(), config );
responses.add( response );
}
stop( session );
return responses;
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private OutboundMessageResponse send( SMPPSession session, String text, Set<String> recipients,
SMPPGatewayConfig config )
{
OutboundMessageResponse response = new OutboundMessageResponse();
SubmitMultiResult result = null;
try
{
result = session.submitMultiple( config.getSystemType(), config.getTypeOfNumber(),
config.getNumberPlanIndicator(), SOURCE, getAddresses( recipients ), new ESMClass(), (byte) 0, (byte) 1,
TIME_FORMATTER.format( new Date() ), null,
new RegisteredDelivery( SMSCDeliveryReceipt.SUCCESS_FAILURE ), ReplaceIfPresentFlag.DEFAULT,
new GeneralDataCoding( Alphabet.ALPHA_DEFAULT, MessageClass.CLASS1, config.isCompressed() ), (byte) 0,
text.getBytes() );
log.info( String.format( "Messages submitted, result is %s", result.getMessageId() ) );
}
catch ( PDUException e )
{
log.error( "Invalid PDU parameter", e );
}
catch ( ResponseTimeoutException e )
{
log.error( "Response timeout", e );
}
catch ( InvalidResponseException e )
{
log.error( "Receive invalid response", e );
}
catch ( NegativeResponseException e )
{
log.error( "Receive negative response", e );
}
catch ( IOException e )
{
log.error( "I/O error", e );
}
catch ( Exception e )
{
log.error( "Exception in submitting SMPP request", e );
}
if ( result != null )
{
if ( result.getUnsuccessDeliveries() == null || result.getUnsuccessDeliveries().length == 0 )
{
log.info( "Message pushed to broker successfully" );
response.setOk( true );
response.setDescription( result.getMessageId() );
response.setResponseObject( GatewayResponse.RESULT_CODE_0 );
}
else
{
String failureCause = DeliveryReceiptState
.valueOf( result.getUnsuccessDeliveries()[0].getErrorStatusCode() ) + " - " + result.getMessageId();
log.error( failureCause );
response.setDescription( failureCause );
response.setResponseObject( GatewayResponse.FAILED );
}
}
else
{
response.setDescription( SENDING_FAILED );
response.setResponseObject( GatewayResponse.FAILED );
}
return response;
}
private void stop( SMPPSession session )
{
if ( session != null )
{
session.unbindAndClose();
}
}
private SMPPSession start( SMPPGatewayConfig config )
{
try
{
SMPPSession session = new SMPPSession( config.getUrlTemplate(), config.getPort(),
getBindParameters( config ) );
log.info( "SMPP client connected to SMSC: " + config.getUrlTemplate() );
return session;
}
catch ( IOException e )
{
log.error( "I/O error occured", e );
}
return null;
}
private BindParameter getBindParameters( SMPPGatewayConfig config )
{
return new BindParameter( config.getBindType(), config.getUsername(), config.getPassword(),
config.getSystemType(),
config.getTypeOfNumber(), config.getNumberPlanIndicator(), null );
}
private Address[] getAddresses( Set<String> recipients )
{
Address[] addresses = new Address[recipients.size()];
int i = 0;
for ( String number : recipients )
{
addresses[i] = new Address( TypeOfNumber.NATIONAL, NumberingPlanIndicator.UNKNOWN, number );
i++;
}
return addresses;
}
}
| |
/*
* Copyright 2015 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.util.collection.IntCollections;
import io.netty.util.collection.IntObjectHashMap;
import io.netty.util.collection.IntObjectMap;
import io.netty.util.internal.DefaultPriorityQueue;
import io.netty.util.internal.EmptyPriorityQueue;
import io.netty.util.internal.MathUtil;
import io.netty.util.internal.PriorityQueue;
import io.netty.util.internal.PriorityQueueNode;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.UnstableApi;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_MIN_ALLOCATION_CHUNK;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.streamableBytes;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.connectionError;
import static java.lang.Integer.MAX_VALUE;
import static java.lang.Math.max;
import static java.lang.Math.min;
/**
* A {@link StreamByteDistributor} that is sensitive to stream priority and uses
* <a href="https://en.wikipedia.org/wiki/Weighted_fair_queueing">Weighted Fair Queueing</a> approach for distributing
* bytes.
* <p>
* Inspiration for this distributor was taken from Linux's
* <a href="https://www.kernel.org/doc/Documentation/scheduler/sched-design-CFS.txt">Completely Fair Scheduler</a>
* to model the distribution of bytes to simulate an "ideal multi-tasking CPU", but in this case we are simulating
* an "ideal multi-tasking NIC".
* <p>
* Each write operation will use the {@link #allocationQuantum(int)} to know how many more bytes should be allocated
* relative to the next stream which wants to write. This is to balance fairness while also considering goodput.
*/
@UnstableApi
public final class WeightedFairQueueByteDistributor implements StreamByteDistributor {
/**
* The initial size of the children map is chosen to be conservative on initial memory allocations under
* the assumption that most streams will have a small number of children. This choice may be
* sub-optimal if when children are present there are many children (i.e. a web page which has many
* dependencies to load).
*
* Visible only for testing!
*/
static final int INITIAL_CHILDREN_MAP_SIZE =
max(1, SystemPropertyUtil.getInt("io.netty.http2.childrenMapSize", 2));
/**
* FireFox currently uses 5 streams to establish QoS classes.
*/
private static final int DEFAULT_MAX_STATE_ONLY_SIZE = 5;
private final Http2Connection.PropertyKey stateKey;
/**
* If there is no Http2Stream object, but we still persist priority information then this is where the state will
* reside.
*/
private final IntObjectMap<State> stateOnlyMap;
/**
* This queue will hold streams that are not active and provides the capability to retain priority for streams which
* have no {@link Http2Stream} object. See {@link StateOnlyComparator} for the priority comparator.
*/
private final PriorityQueue<State> stateOnlyRemovalQueue;
private final Http2Connection connection;
private final State connectionState;
/**
* The minimum number of bytes that we will attempt to allocate to a stream. This is to
* help improve goodput on a per-stream basis.
*/
private int allocationQuantum = DEFAULT_MIN_ALLOCATION_CHUNK;
private final int maxStateOnlySize;
public WeightedFairQueueByteDistributor(Http2Connection connection) {
this(connection, DEFAULT_MAX_STATE_ONLY_SIZE);
}
public WeightedFairQueueByteDistributor(Http2Connection connection, int maxStateOnlySize) {
if (maxStateOnlySize < 0) {
throw new IllegalArgumentException("maxStateOnlySize: " + maxStateOnlySize + " (expected: >0)");
} else if (maxStateOnlySize == 0) {
stateOnlyMap = IntCollections.emptyMap();
stateOnlyRemovalQueue = EmptyPriorityQueue.instance();
} else {
stateOnlyMap = new IntObjectHashMap<State>(maxStateOnlySize);
// +2 because we may exceed the limit by 2 if a new dependency has no associated Http2Stream object. We need
// to create the State objects to put them into the dependency tree, which then impacts priority.
stateOnlyRemovalQueue = new DefaultPriorityQueue<State>(StateOnlyComparator.INSTANCE, maxStateOnlySize + 2);
}
this.maxStateOnlySize = maxStateOnlySize;
this.connection = connection;
stateKey = connection.newKey();
final Http2Stream connectionStream = connection.connectionStream();
connectionStream.setProperty(stateKey, connectionState = new State(connectionStream, 16));
// Register for notification of new streams.
connection.addListener(new Http2ConnectionAdapter() {
@Override
public void onStreamAdded(Http2Stream stream) {
State state = stateOnlyMap.remove(stream.id());
if (state == null) {
state = new State(stream);
// Only the stream which was just added will change parents. So we only need an array of size 1.
List<ParentChangedEvent> events = new ArrayList<ParentChangedEvent>(1);
connectionState.takeChild(state, false, events);
notifyParentChanged(events);
} else {
stateOnlyRemovalQueue.removeTyped(state);
state.stream = stream;
}
switch (stream.state()) {
case RESERVED_REMOTE:
case RESERVED_LOCAL:
state.setStreamReservedOrActivated();
// wasStreamReservedOrActivated is part of the comparator for stateOnlyRemovalQueue there is no
// need to reprioritize here because it will not be in stateOnlyRemovalQueue.
break;
default:
break;
}
stream.setProperty(stateKey, state);
}
@Override
public void onStreamActive(Http2Stream stream) {
state(stream).setStreamReservedOrActivated();
// wasStreamReservedOrActivated is part of the comparator for stateOnlyRemovalQueue there is no need to
// reprioritize here because it will not be in stateOnlyRemovalQueue.
}
@Override
public void onStreamClosed(Http2Stream stream) {
state(stream).close();
}
@Override
public void onStreamRemoved(Http2Stream stream) {
// The stream has been removed from the connection. We can no longer rely on the stream's property
// storage to track the State. If we have room, and the precedence of the stream is sufficient, we
// should retain the State in the stateOnlyMap.
State state = state(stream);
// Typically the stream is set to null when the stream is closed because it is no longer needed to write
// data. However if the stream was not activated it may not be closed (reserved streams) so we ensure
// the stream reference is set to null to avoid retaining a reference longer than necessary.
state.stream = null;
if (WeightedFairQueueByteDistributor.this.maxStateOnlySize == 0) {
state.parent.removeChild(state);
return;
}
if (stateOnlyRemovalQueue.size() == WeightedFairQueueByteDistributor.this.maxStateOnlySize) {
State stateToRemove = stateOnlyRemovalQueue.peek();
if (StateOnlyComparator.INSTANCE.compare(stateToRemove, state) >= 0) {
// The "lowest priority" stream is a "higher priority" than the stream being removed, so we
// just discard the state.
state.parent.removeChild(state);
return;
}
stateOnlyRemovalQueue.poll();
stateToRemove.parent.removeChild(stateToRemove);
stateOnlyMap.remove(stateToRemove.streamId);
}
stateOnlyRemovalQueue.add(state);
stateOnlyMap.put(state.streamId, state);
}
});
}
@Override
public void updateStreamableBytes(StreamState state) {
state(state.stream()).updateStreamableBytes(streamableBytes(state),
state.hasFrame() && state.windowSize() >= 0);
}
@Override
public void updateDependencyTree(int childStreamId, int parentStreamId, short weight, boolean exclusive) {
State state = state(childStreamId);
if (state == null) {
// If there is no State object that means there is no Http2Stream object and we would have to keep the
// State object in the stateOnlyMap and stateOnlyRemovalQueue. However if maxStateOnlySize is 0 this means
// stateOnlyMap and stateOnlyRemovalQueue are empty collections and cannot be modified so we drop the State.
if (maxStateOnlySize == 0) {
return;
}
state = new State(childStreamId);
stateOnlyRemovalQueue.add(state);
stateOnlyMap.put(childStreamId, state);
}
State newParent = state(parentStreamId);
if (newParent == null) {
// If there is no State object that means there is no Http2Stream object and we would have to keep the
// State object in the stateOnlyMap and stateOnlyRemovalQueue. However if maxStateOnlySize is 0 this means
// stateOnlyMap and stateOnlyRemovalQueue are empty collections and cannot be modified so we drop the State.
if (maxStateOnlySize == 0) {
return;
}
newParent = new State(parentStreamId);
stateOnlyRemovalQueue.add(newParent);
stateOnlyMap.put(parentStreamId, newParent);
// Only the stream which was just added will change parents. So we only need an array of size 1.
List<ParentChangedEvent> events = new ArrayList<ParentChangedEvent>(1);
connectionState.takeChild(newParent, false, events);
notifyParentChanged(events);
}
// if activeCountForTree == 0 then it will not be in its parent's pseudoTimeQueue and thus should not be counted
// toward parent.totalQueuedWeights.
if (state.activeCountForTree != 0 && state.parent != null) {
state.parent.totalQueuedWeights += weight - state.weight;
}
state.weight = weight;
if (newParent != state.parent || (exclusive && newParent.children.size() != 1)) {
final List<ParentChangedEvent> events;
if (newParent.isDescendantOf(state)) {
events = new ArrayList<ParentChangedEvent>(2 + (exclusive ? newParent.children.size() : 0));
state.parent.takeChild(newParent, false, events);
} else {
events = new ArrayList<ParentChangedEvent>(1 + (exclusive ? newParent.children.size() : 0));
}
newParent.takeChild(state, exclusive, events);
notifyParentChanged(events);
}
// The location in the dependency tree impacts the priority in the stateOnlyRemovalQueue map. If we created new
// State objects we must check if we exceeded the limit after we insert into the dependency tree to ensure the
// stateOnlyRemovalQueue has been updated.
while (stateOnlyRemovalQueue.size() > maxStateOnlySize) {
State stateToRemove = stateOnlyRemovalQueue.poll();
stateToRemove.parent.removeChild(stateToRemove);
stateOnlyMap.remove(stateToRemove.streamId);
}
}
@Override
public boolean distribute(int maxBytes, Writer writer) throws Http2Exception {
// As long as there is some active frame we should write at least 1 time.
if (connectionState.activeCountForTree == 0) {
return false;
}
// The goal is to write until we write all the allocated bytes or are no longer making progress.
// We still attempt to write even after the number of allocated bytes has been exhausted to allow empty frames
// to be sent. Making progress means the active streams rooted at the connection stream has changed.
int oldIsActiveCountForTree;
do {
oldIsActiveCountForTree = connectionState.activeCountForTree;
// connectionState will never be active, so go right to its children.
maxBytes -= distributeToChildren(maxBytes, writer, connectionState);
} while (connectionState.activeCountForTree != 0 &&
(maxBytes > 0 || oldIsActiveCountForTree != connectionState.activeCountForTree));
return connectionState.activeCountForTree != 0;
}
/**
* Sets the amount of bytes that will be allocated to each stream. Defaults to 1KiB.
* @param allocationQuantum the amount of bytes that will be allocated to each stream. Must be > 0.
*/
public void allocationQuantum(int allocationQuantum) {
if (allocationQuantum <= 0) {
throw new IllegalArgumentException("allocationQuantum must be > 0");
}
this.allocationQuantum = allocationQuantum;
}
private int distribute(int maxBytes, Writer writer, State state) throws Http2Exception {
if (state.isActive()) {
int nsent = min(maxBytes, state.streamableBytes);
state.write(nsent, writer);
if (nsent == 0 && maxBytes != 0) {
// If a stream sends zero bytes, then we gave it a chance to write empty frames and it is now
// considered inactive until the next call to updateStreamableBytes. This allows descendant streams to
// be allocated bytes when the parent stream can't utilize them. This may be as a result of the
// stream's flow control window being 0.
state.updateStreamableBytes(state.streamableBytes, false);
}
return nsent;
}
return distributeToChildren(maxBytes, writer, state);
}
/**
* It is a pre-condition that {@code state.poll()} returns a non-{@code null} value. This is a result of the way
* the allocation algorithm is structured and can be explained in the following cases:
* <h3>For the recursive case</h3>
* If a stream has no children (in the allocation tree) than that node must be active or it will not be in the
* allocation tree. If a node is active then it will not delegate to children and recursion ends.
* <h3>For the initial case</h3>
* We check connectionState.activeCountForTree == 0 before any allocation is done. So if the connection stream
* has no active children we don't get into this method.
*/
private int distributeToChildren(int maxBytes, Writer writer, State state) throws Http2Exception {
long oldTotalQueuedWeights = state.totalQueuedWeights;
State childState = state.pollPseudoTimeQueue();
State nextChildState = state.peekPseudoTimeQueue();
childState.setDistributing();
try {
assert nextChildState == null || nextChildState.pseudoTimeToWrite >= childState.pseudoTimeToWrite :
"nextChildState[" + nextChildState.streamId + "].pseudoTime(" + nextChildState.pseudoTimeToWrite +
") < " + " childState[" + childState.streamId + "].pseudoTime(" + childState.pseudoTimeToWrite + ")";
int nsent = distribute(nextChildState == null ? maxBytes :
min(maxBytes, (int) min((nextChildState.pseudoTimeToWrite - childState.pseudoTimeToWrite) *
childState.weight / oldTotalQueuedWeights + allocationQuantum, MAX_VALUE)
),
writer,
childState);
state.pseudoTime += nsent;
childState.updatePseudoTime(state, nsent, oldTotalQueuedWeights);
return nsent;
} finally {
childState.unsetDistributing();
// Do in finally to ensure the internal flags is not corrupted if an exception is thrown.
// The offer operation is delayed until we unroll up the recursive stack, so we don't have to remove from
// the priority pseudoTimeQueue due to a write operation.
if (childState.activeCountForTree != 0) {
state.offerPseudoTimeQueue(childState);
}
}
}
private State state(Http2Stream stream) {
return stream.getProperty(stateKey);
}
private State state(int streamId) {
Http2Stream stream = connection.stream(streamId);
return stream != null ? state(stream) : stateOnlyMap.get(streamId);
}
/**
* For testing only!
*/
int streamableBytes0(Http2Stream stream) {
return state(stream).streamableBytes;
}
/**
* For testing only!
*/
boolean isChild(int childId, int parentId, short weight) {
State parent = state(parentId);
State child;
return parent.children.containsKey(childId) &&
(child = state(childId)).parent == parent && child.weight == weight;
}
/**
* For testing only!
*/
int numChildren(int streamId) {
State state = state(streamId);
return state == null ? 0 : state.children.size();
}
/**
* Notify all listeners of the priority tree change events (in ascending order)
* @param events The events (top down order) which have changed
*/
void notifyParentChanged(List<ParentChangedEvent> events) {
for (int i = 0; i < events.size(); ++i) {
ParentChangedEvent event = events.get(i);
stateOnlyRemovalQueue.priorityChanged(event.state);
if (event.state.parent != null && event.state.activeCountForTree != 0) {
event.state.parent.offerAndInitializePseudoTime(event.state);
event.state.parent.activeCountChangeForTree(event.state.activeCountForTree);
}
}
}
/**
* A comparator for {@link State} which has no associated {@link Http2Stream} object. The general precedence is:
* <ul>
* <li>Was a stream activated or reserved (streams only used for priority are higher priority)</li>
* <li>Depth in the priority tree (closer to root is higher priority></li>
* <li>Stream ID (higher stream ID is higher priority - used for tie breaker)</li>
* </ul>
*/
private static final class StateOnlyComparator implements Comparator<State>, Serializable {
private static final long serialVersionUID = -4806936913002105966L;
static final StateOnlyComparator INSTANCE = new StateOnlyComparator();
private StateOnlyComparator() {
}
@Override
public int compare(State o1, State o2) {
// "priority only streams" (which have not been activated) are higher priority than streams used for data.
boolean o1Actived = o1.wasStreamReservedOrActivated();
if (o1Actived != o2.wasStreamReservedOrActivated()) {
return o1Actived ? -1 : 1;
}
// Numerically greater depth is higher priority.
int x = o2.dependencyTreeDepth - o1.dependencyTreeDepth;
// I also considered tracking the number of streams which are "activated" (eligible transfer data) at each
// subtree. This would require a traversal from each node to the root on dependency tree structural changes,
// and then it would require a re-prioritization at each of these nodes (instead of just the nodes where the
// direct parent changed). The costs of this are judged to be relatively high compared to the nominal
// benefit it provides to the heuristic. Instead folks should just increase maxStateOnlySize.
// Last resort is to give larger stream ids more priority.
return x != 0 ? x : o1.streamId - o2.streamId;
}
}
private static final class StatePseudoTimeComparator implements Comparator<State>, Serializable {
private static final long serialVersionUID = -1437548640227161828L;
static final StatePseudoTimeComparator INSTANCE = new StatePseudoTimeComparator();
private StatePseudoTimeComparator() {
}
@Override
public int compare(State o1, State o2) {
return MathUtil.compare(o1.pseudoTimeToWrite, o2.pseudoTimeToWrite);
}
}
/**
* The remote flow control state for a single stream.
*/
private final class State implements PriorityQueueNode {
private static final byte STATE_IS_ACTIVE = 0x1;
private static final byte STATE_IS_DISTRIBUTING = 0x2;
private static final byte STATE_STREAM_ACTIVATED = 0x4;
/**
* Maybe {@code null} if the stream if the stream is not active.
*/
Http2Stream stream;
State parent;
IntObjectMap<State> children = IntCollections.emptyMap();
private final PriorityQueue<State> pseudoTimeQueue;
final int streamId;
int streamableBytes;
int dependencyTreeDepth;
/**
* Count of nodes rooted at this sub tree with {@link #isActive()} equal to {@code true}.
*/
int activeCountForTree;
private int pseudoTimeQueueIndex = INDEX_NOT_IN_QUEUE;
private int stateOnlyQueueIndex = INDEX_NOT_IN_QUEUE;
/**
* An estimate of when this node should be given the opportunity to write data.
*/
long pseudoTimeToWrite;
/**
* A pseudo time maintained for immediate children to base their {@link #pseudoTimeToWrite} off of.
*/
long pseudoTime;
long totalQueuedWeights;
private byte flags;
short weight = DEFAULT_PRIORITY_WEIGHT;
State(int streamId) {
this(streamId, null, 0);
}
State(Http2Stream stream) {
this(stream, 0);
}
State(Http2Stream stream, int initialSize) {
this(stream.id(), stream, initialSize);
}
State(int streamId, Http2Stream stream, int initialSize) {
this.stream = stream;
this.streamId = streamId;
pseudoTimeQueue = new DefaultPriorityQueue<State>(StatePseudoTimeComparator.INSTANCE, initialSize);
}
boolean isDescendantOf(State state) {
State next = parent;
while (next != null) {
if (next == state) {
return true;
}
next = next.parent;
}
return false;
}
void takeChild(State child, boolean exclusive, List<ParentChangedEvent> events) {
takeChild(null, child, exclusive, events);
}
/**
* Adds a child to this priority. If exclusive is set, any children of this node are moved to being dependent on
* the child.
*/
void takeChild(Iterator<IntObjectMap.PrimitiveEntry<State>> childItr, State child, boolean exclusive,
List<ParentChangedEvent> events) {
State oldParent = child.parent;
if (oldParent != this) {
events.add(new ParentChangedEvent(child, oldParent));
child.setParent(this);
// If the childItr is not null we are iterating over the oldParent.children collection and should
// use the iterator to remove from the collection to avoid concurrent modification. Otherwise it is
// assumed we are not iterating over this collection and it is safe to call remove directly.
if (childItr != null) {
childItr.remove();
} else if (oldParent != null) {
oldParent.children.remove(child.streamId);
}
// Lazily initialize the children to save object allocations.
initChildrenIfEmpty();
final State oldChild = children.put(child.streamId, child);
assert oldChild == null : "A stream with the same stream ID was already in the child map.";
}
if (exclusive && !children.isEmpty()) {
// If it was requested that this child be the exclusive dependency of this node,
// move any previous children to the child node, becoming grand children of this node.
Iterator<IntObjectMap.PrimitiveEntry<State>> itr = removeAllChildrenExcept(child).entries().iterator();
while (itr.hasNext()) {
child.takeChild(itr, itr.next().value(), false, events);
}
}
}
/**
* Removes the child priority and moves any of its dependencies to being direct dependencies on this node.
*/
void removeChild(State child) {
if (children.remove(child.streamId) != null) {
List<ParentChangedEvent> events = new ArrayList<ParentChangedEvent>(1 + child.children.size());
events.add(new ParentChangedEvent(child, child.parent));
child.setParent(null);
// Move up any grand children to be directly dependent on this node.
Iterator<IntObjectMap.PrimitiveEntry<State>> itr = child.children.entries().iterator();
while (itr.hasNext()) {
takeChild(itr, itr.next().value(), false, events);
}
notifyParentChanged(events);
}
}
/**
* Remove all children with the exception of {@code streamToRetain}.
* This method is intended to be used to support an exclusive priority dependency operation.
* @return The map of children prior to this operation, excluding {@code streamToRetain} if present.
*/
private IntObjectMap<State> removeAllChildrenExcept(State stateToRetain) {
stateToRetain = children.remove(stateToRetain.streamId);
IntObjectMap<State> prevChildren = children;
// This map should be re-initialized in anticipation for the 1 exclusive child which will be added.
// It will either be added directly in this method, or after this method is called...but it will be added.
initChildren();
if (stateToRetain != null) {
children.put(stateToRetain.streamId, stateToRetain);
}
return prevChildren;
}
private void setParent(State newParent) {
// if activeCountForTree == 0 then it will not be in its parent's pseudoTimeQueue.
if (activeCountForTree != 0 && parent != null) {
parent.removePseudoTimeQueue(this);
parent.activeCountChangeForTree(-activeCountForTree);
}
parent = newParent;
// Use MAX_VALUE if no parent because lower depth is considered higher priority by StateOnlyComparator.
dependencyTreeDepth = newParent == null ? MAX_VALUE : newParent.dependencyTreeDepth + 1;
}
private void initChildrenIfEmpty() {
if (children == IntCollections.<State>emptyMap()) {
initChildren();
}
}
private void initChildren() {
children = new IntObjectHashMap<State>(INITIAL_CHILDREN_MAP_SIZE);
}
void write(int numBytes, Writer writer) throws Http2Exception {
assert stream != null;
try {
writer.write(stream, numBytes);
} catch (Throwable t) {
throw connectionError(INTERNAL_ERROR, t, "byte distribution write error");
}
}
void activeCountChangeForTree(int increment) {
assert activeCountForTree + increment >= 0;
activeCountForTree += increment;
if (parent != null) {
assert activeCountForTree != increment ||
pseudoTimeQueueIndex == INDEX_NOT_IN_QUEUE ||
parent.pseudoTimeQueue.containsTyped(this) :
"State[" + streamId + "].activeCountForTree changed from 0 to " + increment + " is in a " +
"pseudoTimeQueue, but not in parent[ " + parent.streamId + "]'s pseudoTimeQueue";
if (activeCountForTree == 0) {
parent.removePseudoTimeQueue(this);
} else if (activeCountForTree == increment && !isDistributing()) {
// If frame count was 0 but is now not, and this node is not already in a pseudoTimeQueue (assumed
// to be pState's pseudoTimeQueue) then enqueue it. If this State object is being processed the
// pseudoTime for this node should not be adjusted, and the node will be added back to the
// pseudoTimeQueue/tree structure after it is done being processed. This may happen if the
// activeCountForTree == 0 (a node which can't stream anything and is blocked) is at/near root of
// the tree, and is popped off the pseudoTimeQueue during processing, and then put back on the
// pseudoTimeQueue because a child changes position in the priority tree (or is closed because it is
// not blocked and finished writing all data).
parent.offerAndInitializePseudoTime(this);
}
parent.activeCountChangeForTree(increment);
}
}
void updateStreamableBytes(int newStreamableBytes, boolean isActive) {
if (isActive() != isActive) {
if (isActive) {
activeCountChangeForTree(1);
setActive();
} else {
activeCountChangeForTree(-1);
unsetActive();
}
}
streamableBytes = newStreamableBytes;
}
/**
* Assumes the parents {@link #totalQueuedWeights} includes this node's weight.
*/
void updatePseudoTime(State parentState, int nsent, long totalQueuedWeights) {
assert streamId != CONNECTION_STREAM_ID && nsent >= 0;
// If the current pseudoTimeToSend is greater than parentState.pseudoTime then we previously over accounted
// and should use parentState.pseudoTime.
pseudoTimeToWrite = min(pseudoTimeToWrite, parentState.pseudoTime) + nsent * totalQueuedWeights / weight;
}
/**
* The concept of pseudoTime can be influenced by priority tree manipulations or if a stream goes from "active"
* to "non-active". This method accounts for that by initializing the {@link #pseudoTimeToWrite} for
* {@code state} to {@link #pseudoTime} of this node and then calls {@link #offerPseudoTimeQueue(State)}.
*/
void offerAndInitializePseudoTime(State state) {
state.pseudoTimeToWrite = pseudoTime;
offerPseudoTimeQueue(state);
}
void offerPseudoTimeQueue(State state) {
pseudoTimeQueue.offer(state);
totalQueuedWeights += state.weight;
}
/**
* Must only be called if the pseudoTimeQueue is non-empty!
*/
State pollPseudoTimeQueue() {
State state = pseudoTimeQueue.poll();
// This method is only ever called if the pseudoTimeQueue is non-empty.
totalQueuedWeights -= state.weight;
return state;
}
void removePseudoTimeQueue(State state) {
if (pseudoTimeQueue.removeTyped(state)) {
totalQueuedWeights -= state.weight;
}
}
State peekPseudoTimeQueue() {
return pseudoTimeQueue.peek();
}
void close() {
updateStreamableBytes(0, false);
stream = null;
}
boolean wasStreamReservedOrActivated() {
return (flags & STATE_STREAM_ACTIVATED) != 0;
}
void setStreamReservedOrActivated() {
flags |= STATE_STREAM_ACTIVATED;
}
boolean isActive() {
return (flags & STATE_IS_ACTIVE) != 0;
}
private void setActive() {
flags |= STATE_IS_ACTIVE;
}
private void unsetActive() {
flags &= ~STATE_IS_ACTIVE;
}
boolean isDistributing() {
return (flags & STATE_IS_DISTRIBUTING) != 0;
}
void setDistributing() {
flags |= STATE_IS_DISTRIBUTING;
}
void unsetDistributing() {
flags &= ~STATE_IS_DISTRIBUTING;
}
@Override
public int priorityQueueIndex(DefaultPriorityQueue<?> queue) {
return queue == stateOnlyRemovalQueue ? stateOnlyQueueIndex : pseudoTimeQueueIndex;
}
@Override
public void priorityQueueIndex(DefaultPriorityQueue<?> queue, int i) {
if (queue == stateOnlyRemovalQueue) {
stateOnlyQueueIndex = i;
} else {
pseudoTimeQueueIndex = i;
}
}
@Override
public String toString() {
// Use activeCountForTree as a rough estimate for how many nodes are in this subtree.
StringBuilder sb = new StringBuilder(256 * (activeCountForTree > 0 ? activeCountForTree : 1));
toString(sb);
return sb.toString();
}
private void toString(StringBuilder sb) {
sb.append("{streamId ").append(streamId)
.append(" streamableBytes ").append(streamableBytes)
.append(" activeCountForTree ").append(activeCountForTree)
.append(" pseudoTimeQueueIndex ").append(pseudoTimeQueueIndex)
.append(" pseudoTimeToWrite ").append(pseudoTimeToWrite)
.append(" pseudoTime ").append(pseudoTime)
.append(" flags ").append(flags)
.append(" pseudoTimeQueue.size() ").append(pseudoTimeQueue.size())
.append(" stateOnlyQueueIndex ").append(stateOnlyQueueIndex)
.append(" parent.streamId ").append(parent == null ? -1 : parent.streamId).append("} [");
if (!pseudoTimeQueue.isEmpty()) {
for (State s : pseudoTimeQueue) {
s.toString(sb);
sb.append(", ");
}
// Remove the last ", "
sb.setLength(sb.length() - 2);
}
sb.append(']');
}
}
/**
* Allows a correlation to be made between a stream and its old parent before a parent change occurs.
*/
private static final class ParentChangedEvent {
final State state;
final State oldParent;
/**
* Create a new instance.
* @param state The state who has had a parent change.
* @param oldParent The previous parent.
*/
ParentChangedEvent(State state, State oldParent) {
this.state = state;
this.oldParent = oldParent;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.library.api;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.tez.common.annotation.ConfigurationClass;
import org.apache.tez.common.annotation.ConfigurationProperty;
import org.apache.tez.runtime.library.conf.OrderedPartitionedKVOutputConfig.SorterImpl;
/**
* Meant for user configurable job properties.
* <p/>
* Note for developers: Whenever a new key is added to this file, it must also be added to the set of
* known tezRuntimeKeys.
* @see <a href="../../../../../../configs/TezRuntimeConfiguration.html">Detailed Configuration Information</a>
* @see <a href="../../../../../configs/tez-runtime-default-template.xml">XML-based Config Template</a>
*/
// TODO EVENTUALLY A description for each property.
@Public
@Evolving
@ConfigurationClass(templateFileName = "tez-runtime-default-template.xml")
public class TezRuntimeConfiguration {
private static final String TEZ_RUNTIME_PREFIX = "tez.runtime.";
private static final Set<String> tezRuntimeKeys = new HashSet<String>();
private static Set<String> umnodifiableTezRuntimeKeySet;
private static final Set<String> otherKeys = new HashSet<String>();
private static Set<String> unmodifiableOtherKeySet;
private static Configuration defaultConf = new Configuration(false);
private static final Map<String, String> tezRuntimeConfMap = new HashMap<String, String>();
private static final Map<String, String> otherConfMap = new HashMap<String, String>();
/**
* Prefixes from Hadoop configuration which are allowed.
*/
private static final List<String> allowedPrefixes = new ArrayList<String>();
private static List<String> unmodifiableAllowedPrefixes;
static {
Configuration.addDeprecation("tez.runtime.sort.threads",
TezRuntimeConfiguration.TEZ_RUNTIME_PIPELINED_SORTER_SORT_THREADS);
}
/**
* Configuration key to enable/disable IFile readahead.
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_IFILE_READAHEAD = TEZ_RUNTIME_PREFIX +
"ifile.readahead";
public static final boolean TEZ_RUNTIME_IFILE_READAHEAD_DEFAULT = true;
/**
* Configuration key to set the IFile readahead length in bytes.
*/
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_IFILE_READAHEAD_BYTES = TEZ_RUNTIME_PREFIX +
"ifile.readahead.bytes";
public static final int TEZ_RUNTIME_IFILE_READAHEAD_BYTES_DEFAULT =
4 * 1024 * 1024;
public static final int TEZ_RUNTIME_IFILE_BUFFER_SIZE_DEFAULT = -1;
/**
* This is copy of io.file.buffer.size from Hadoop, which is used in several places such
* as compression codecs, buffer sizes in IFile, while fetching etc.
* Variable exists so that it can be referenced, instead of using the string name directly.
*/
public static final String TEZ_RUNTIME_IO_FILE_BUFFER_SIZE = "io.file.buffer.size";
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_IO_SORT_FACTOR = TEZ_RUNTIME_PREFIX +
"io.sort.factor";
public static final int TEZ_RUNTIME_IO_SORT_FACTOR_DEFAULT = 100;
@ConfigurationProperty(type = "float")
public static final String TEZ_RUNTIME_SORT_SPILL_PERCENT = TEZ_RUNTIME_PREFIX +
"sort.spill.percent";
public static final float TEZ_RUNTIME_SORT_SPILL_PERCENT_DEFAULT = 0.8f;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_IO_SORT_MB = TEZ_RUNTIME_PREFIX + "io.sort.mb";
public static final int TEZ_RUNTIME_IO_SORT_MB_DEFAULT = 100;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_INDEX_CACHE_MEMORY_LIMIT_BYTES = TEZ_RUNTIME_PREFIX +
"index.cache.memory.limit.bytes";
public static final int TEZ_RUNTIME_INDEX_CACHE_MEMORY_LIMIT_BYTES_DEFAULT =
1024 * 1024;
// TODO Use the default value
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_COMBINE_MIN_SPILLS = TEZ_RUNTIME_PREFIX +
"combine.min.spills";
public static final int TEZ_RUNTIME_COMBINE_MIN_SPILLS_DEFAULT = 3;
/**
* Tries to allocate @link{#TEZ_RUNTIME_IO_SORT_MB} in chunks specified in
* this parameter.
*/
@ConfigurationProperty(type = "integer")
public static final String
TEZ_RUNTIME_PIPELINED_SORTER_MIN_BLOCK_SIZE_IN_MB = TEZ_RUNTIME_PREFIX +
"pipelined.sorter.min-block.size.in.mb";
public static final int
TEZ_RUNTIME_PIPELINED_SORTER_MIN_BLOCK_SIZE_IN_MB_DEFAULT = 2000;
/**
* Setting this to true would enable sorter
* to auto-allocate memory on need basis in progressive fashion.
*
* Setting to false would allocate all available memory during
* initialization of sorter. In such cases,@link{#TEZ_RUNTIME_PIPELINED_SORTER_MIN_BLOCK_SIZE_IN_MB}
* would be honored and memory specified in @link{#TEZ_RUNTIME_IO_SORT_MB}
* would be initialized upfront.
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_PIPELINED_SORTER_LAZY_ALLOCATE_MEMORY = TEZ_RUNTIME_PREFIX +
"pipelined.sorter.lazy-allocate.memory";
public static final boolean
TEZ_RUNTIME_PIPELINED_SORTER_LAZY_ALLOCATE_MEMORY_DEFAULT = false;
/**
* String value.
* Which sorter implementation to use.
* Valid values:
* - LEGACY
* - PIPELINED ( default )
* {@link org.apache.tez.runtime.library.conf.OrderedPartitionedKVOutputConfig.SorterImpl}
*/
@ConfigurationProperty
public static final String TEZ_RUNTIME_SORTER_CLASS = TEZ_RUNTIME_PREFIX +
"sorter.class";
public static final String TEZ_RUNTIME_SORTER_CLASS_DEFAULT = SorterImpl.PIPELINED.name();
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_PIPELINED_SORTER_SORT_THREADS = TEZ_RUNTIME_PREFIX +
"pipelined.sorter.sort.threads";
public static final int TEZ_RUNTIME_PIPELINED_SORTER_SORT_THREADS_DEFAULT = 2;
/**
* Integer value. Percentage of buffer to be filled before we spill to disk. Default value is 0,
* which will spill for every buffer.
*/
@ConfigurationProperty(type="int")
public static final String TEZ_RUNTIME_UNORDERED_PARTITIONED_KVWRITER_BUFFER_MERGE_PERCENT =
TEZ_RUNTIME_PREFIX + "unordered-partitioned-kvwriter.buffer-merge-percent";
public static final int TEZ_RUNTIME_UNORDERED_PARTITIONED_KVWRITER_BUFFER_MERGE_PERCENT_DEFAULT =
0;
/**
* Report partition statistics (e.g better scheduling in ShuffleVertexManager). TEZ-2496
* This can be enabled/disabled at vertex level.
* {@link org.apache.tez.runtime.library.api.TezRuntimeConfiguration.ReportPartitionStats}
* defines the list of values that can be specified.
* TODO TEZ-3303 Given ShuffleVertexManager doesn't consume precise stats
* yet. So do not set the value to "precise" yet when ShuffleVertexManager is used.
*/
@ConfigurationProperty
public static final String TEZ_RUNTIME_REPORT_PARTITION_STATS =
TEZ_RUNTIME_PREFIX + "report.partition.stats";
public static final String TEZ_RUNTIME_REPORT_PARTITION_STATS_DEFAULT =
ReportPartitionStats.MEMORY_OPTIMIZED.getType();
/**
* Size of the buffer to use if not writing directly to disk.
*/
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB = TEZ_RUNTIME_PREFIX +
"unordered.output.buffer.size-mb";
public static final int TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB_DEFAULT = 100;
/**
* Maximum size for individual buffers used in the UnsortedPartitionedOutput.
* This is only meant to be used by unit tests for now.
*/
@Private
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_UNORDERED_OUTPUT_MAX_PER_BUFFER_SIZE_BYTES =
TEZ_RUNTIME_PREFIX +
"unordered.output.max-per-buffer.size-bytes";
/**
* Specifies a partitioner class, which is used in Tez Runtime components
* like OnFileSortedOutput
*/
@ConfigurationProperty
public static final String TEZ_RUNTIME_PARTITIONER_CLASS =
TEZ_RUNTIME_PREFIX + "partitioner.class";
/**
* Specifies a combiner class (primarily for Shuffle)
*/
@ConfigurationProperty
public static final String TEZ_RUNTIME_COMBINER_CLASS = TEZ_RUNTIME_PREFIX + "combiner.class";
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES = TEZ_RUNTIME_PREFIX +
"shuffle.parallel.copies";
public static final int TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES_DEFAULT = 20;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT = TEZ_RUNTIME_PREFIX +
"shuffle.fetch.failures.limit";
public static final int TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT_DEFAULT = 5;
/**
* Specifies in milliseconds the maximum delay a penalized host can have before being retried,
* defaults to 10 minutes.
*/
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_HOST_PENALTY_TIME_LIMIT_MS = TEZ_RUNTIME_PREFIX +
"shuffle.host.penalty.time.limit";
public static final int TEZ_RUNTIME_SHUFFLE_HOST_PENALTY_TIME_LIMIT_MS_DEFAULT = 600000;
@Private
@Unstable
@ConfigurationProperty(type = "integer")
/**
* Expert setting made available only for debugging. Do not change it. Sets
* the number of retries before giving up on downloading from source
* attempt by consumer. Code internally handles the threshold if set to -1.
*/
public static final String
TEZ_RUNTIME_SHUFFLE_SOURCE_ATTEMPT_ABORT_LIMIT =
TEZ_RUNTIME_PREFIX + "shuffle.src-attempt.abort.limit";
public static final int
TEZ_RUNTIME_SHUFFLE_SOURCE_ATTEMPT_ABORT_LIMIT_DEFAULT = -1;
@Private
@Unstable
@ConfigurationProperty(type = "float")
/**
* Expert setting made available only for debugging. Do not change it. Setting
* to determine if failures happened across a percentage of nodes. This
* helps in determining if the consumer has to be restarted on continuous
* failures. Setting it to lower value can make consumer restarts more
* aggressive on failures.
*/
public static final String
TEZ_RUNTIME_SHUFFLE_ACCEPTABLE_HOST_FETCH_FAILURE_FRACTION =
TEZ_RUNTIME_PREFIX + "shuffle.acceptable.host-fetch.failure.fraction";
public static final float
TEZ_RUNTIME_SHUFFLE_ACCEPTABLE_HOST_FETCH_FAILURE_FRACTION_DEFAULT = 0.2f;
@Private
@Unstable
@ConfigurationProperty(type = "integer")
/**
* Expert setting made available only for debugging. Do not change it. Setting
* to determine if the consumer has to be restarted on continuous
* failures across nodes. Used along with {@link
* TEZ_RUNTIME_SHUFFLE_ACCEPTABLE_HOST_FETCH_FAILURE_FRACTION}.
*/
public static final String
TEZ_RUNTIME_SHUFFLE_MIN_FAILURES_PER_HOST =
TEZ_RUNTIME_PREFIX + "shuffle.min.failures.per.host";
public static final int TEZ_RUNTIME_SHUFFLE_MIN_FAILURES_PER_HOST_DEFAULT = 4;
@Private
@Unstable
@ConfigurationProperty(type = "float")
/**
* Expert setting made available only for debugging. Do not change it.
* Maximum percentage of time (compared to overall progress), the fetcher is
* allowed before concluding that it is stalled.
*/
public static final String TEZ_RUNTIME_SHUFFLE_MAX_STALL_TIME_FRACTION =
TEZ_RUNTIME_PREFIX + "shuffle.max.stall.time.fraction";
public static final float
TEZ_RUNTIME_SHUFFLE_MAX_STALL_TIME_FRACTION_DEFAULT = 0.5f;
@Private
@Unstable
@ConfigurationProperty(type = "float")
/**
* Expert setting made available only for debugging. Do not change it.
* Fraction to determine whether the shuffle has progressed enough or not
* If it has not progressed enough, it could be qualified for the consumer.
*/
public static final String
TEZ_RUNTIME_SHUFFLE_MIN_REQUIRED_PROGRESS_FRACTION =
TEZ_RUNTIME_PREFIX + "shuffle.min.required.progress.fraction";
public static final float
TEZ_RUNTIME_SHUFFLE_MIN_REQUIRED_PROGRESS_FRACTION_DEFAULT = 0.5f;
@Private
@Unstable
@ConfigurationProperty(type = "float")
/**
* Expert setting made available only for debugging. Do not change it.
* Provides threshold for determining whether fetching has to be marked
* unhealthy based on the ratio of (failures/(failures+completed))
*/
public static final String
TEZ_RUNTIME_SHUFFLE_MAX_ALLOWED_FAILED_FETCH_ATTEMPT_FRACTION =
TEZ_RUNTIME_PREFIX + "shuffle.max.allowed.failed.fetch.fraction";
public static final float
TEZ_RUNTIME_SHUFFLE_MAX_ALLOWED_FAILED_FETCH_ATTEMPT_FRACTION_DEFAULT = 0.5f;
@Private
@Unstable
@ConfigurationProperty(type = "boolean")
/**
* Expert setting made available only for debugging. Do not change it.
* Provides threshold for determining whether fetching has to be marked
* unhealthy based on the ratio of (failures/(failures+completed))
*/
public static final String
TEZ_RUNTIME_SHUFFLE_FAILED_CHECK_SINCE_LAST_COMPLETION =
TEZ_RUNTIME_PREFIX + "shuffle.failed.check.since-last.completion";
public static final boolean
TEZ_RUNTIME_SHUFFLE_FAILED_CHECK_SINCE_LAST_COMPLETION_DEFAULT = true;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE =
TEZ_RUNTIME_PREFIX +
"shuffle.fetch.max.task.output.at.once";
public final static int TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE_DEFAULT
= 20;
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR = TEZ_RUNTIME_PREFIX +
"shuffle.notify.readerror";
public static final boolean TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR_DEFAULT = true;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT = TEZ_RUNTIME_PREFIX +
"shuffle.connect.timeout";
public static final int TEZ_RUNTIME_SHUFFLE_STALLED_COPY_TIMEOUT_DEFAULT =
3 * 60 * 1000;
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_ENABLED = TEZ_RUNTIME_PREFIX +
"shuffle.keep-alive.enabled";
public static final boolean TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_ENABLED_DEFAULT = false;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_MAX_CONNECTIONS = TEZ_RUNTIME_PREFIX +
"shuffle.keep-alive.max.connections";
public static final int TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_MAX_CONNECTIONS_DEFAULT = 20;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT =
TEZ_RUNTIME_PREFIX + "shuffle.read.timeout";
public final static int TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT_DEFAULT =
3 * 60 * 1000;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_BUFFER_SIZE =
TEZ_RUNTIME_PREFIX + "shuffle.buffersize";
public final static int TEZ_RUNTIME_SHUFFLE_BUFFER_SIZE_DEFAULT =
8 * 1024;
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_USE_ASYNC_HTTP = TEZ_RUNTIME_PREFIX +
"shuffle.use.async.http";
public static final boolean TEZ_RUNTIME_SHUFFLE_USE_ASYNC_HTTP_DEFAULT = false;
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_ENABLE_SSL = TEZ_RUNTIME_PREFIX +
"shuffle.ssl.enable";
public static final boolean TEZ_RUNTIME_SHUFFLE_ENABLE_SSL_DEFAULT = false;
/**
* Controls verification of data checksums when fetching data directly to
* disk. Enabling verification allows the fetcher to detect corrupted data
* and report the failure against the upstream task before the data reaches
* the Processor and causes the fetching task to fail.
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_FETCH_VERIFY_DISK_CHECKSUM =
TEZ_RUNTIME_PREFIX + "shuffle.fetch.verify-disk-checksum";
public static final boolean TEZ_RUNTIME_SHUFFLE_FETCH_VERIFY_DISK_CHECKSUM_DEFAULT = true;
@ConfigurationProperty(type = "float")
public static final String TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT = TEZ_RUNTIME_PREFIX +
"shuffle.fetch.buffer.percent";
public static final float TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT_DEFAULT =
0.90f;
@ConfigurationProperty(type = "float")
public static final String TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT = TEZ_RUNTIME_PREFIX +
"shuffle.memory.limit.percent";
public static final float TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT_DEFAULT =
0.25f;
// Rename to fraction
@ConfigurationProperty(type = "float")
public static final String TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT = TEZ_RUNTIME_PREFIX +
"shuffle.merge.percent";
public static final float TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT_DEFAULT = 0.90f;
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_MEMTOMEM_SEGMENTS = TEZ_RUNTIME_PREFIX +
"shuffle.memory-to-memory.segments";
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM = TEZ_RUNTIME_PREFIX +
"shuffle.memory-to-memory.enable";
public static final boolean TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM_DEFAULT =
false;
@Private
@Unstable
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_SHUFFLE_FETCHER_USE_SHARED_POOL = TEZ_RUNTIME_PREFIX +
"shuffle.fetcher.use-shared-pool";
public static final boolean TEZ_RUNTIME_SHUFFLE_FETCHER_USE_SHARED_POOL_DEFAULT = false;
@ConfigurationProperty(type = "float")
public static final String TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT = TEZ_RUNTIME_PREFIX +
"task.input.post-merge.buffer.percent";
public static final float TEZ_RUNTIME_INPUT_BUFFER_PERCENT_DEFAULT = 0.0f;
@ConfigurationProperty
public static final String TEZ_RUNTIME_GROUP_COMPARATOR_CLASS = TEZ_RUNTIME_PREFIX +
"group.comparator.class";
@ConfigurationProperty
public static final String TEZ_RUNTIME_INTERNAL_SORTER_CLASS = TEZ_RUNTIME_PREFIX +
"internal.sorter.class";
@ConfigurationProperty
public static final String TEZ_RUNTIME_KEY_COMPARATOR_CLASS =
TEZ_RUNTIME_PREFIX + "key.comparator.class";
@ConfigurationProperty
public static final String TEZ_RUNTIME_KEY_CLASS = TEZ_RUNTIME_PREFIX + "key.class";
@ConfigurationProperty
public static final String TEZ_RUNTIME_VALUE_CLASS = TEZ_RUNTIME_PREFIX + "value.class";
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_COMPRESS = TEZ_RUNTIME_PREFIX + "compress";
@ConfigurationProperty
public static final String TEZ_RUNTIME_COMPRESS_CODEC = TEZ_RUNTIME_PREFIX + "compress.codec";
// TODO Move this key to MapReduce
@ConfigurationProperty
public static final String TEZ_RUNTIME_KEY_SECONDARY_COMPARATOR_CLASS =
TEZ_RUNTIME_PREFIX + "key.secondary.comparator.class";
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_EMPTY_PARTITION_INFO_VIA_EVENTS_ENABLED =
TEZ_RUNTIME_PREFIX +
"empty.partitions.info-via-events.enabled";
public static final boolean TEZ_RUNTIME_EMPTY_PARTITION_INFO_VIA_EVENTS_ENABLED_DEFAULT = true;
/**
* If the shuffle input is on the local host bypass the http fetch and access the files directly
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH = TEZ_RUNTIME_PREFIX + "optimize.local.fetch";
public static final boolean TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH_DEFAULT = true;
/**
* Expert level setting. Enable pipelined shuffle in ordered outputs and in unordered
* partitioned outputs. In ordered cases, it works with PipelinedSorter.
* set tez.runtime.sort.threads to greater than 1 to enable pipelinedsorter.
* Ensure to set tez.runtime.enable.final-merge.in.output=false.
* Speculative execution needs to be turned off when using this parameter. //TODO: TEZ-2132
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_PIPELINED_SHUFFLE_ENABLED =
TEZ_RUNTIME_PREFIX + "pipelined-shuffle.enabled";
public static final boolean TEZ_RUNTIME_PIPELINED_SHUFFLE_ENABLED_DEFAULT = false;
/**
* Expert level setting. Enable final merge in ordered (defaultsorter/pipelinedsorter) outputs.
* Speculative execution needs to be turned off when disabling this parameter. //TODO: TEZ-2132
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_ENABLE_FINAL_MERGE_IN_OUTPUT =
TEZ_RUNTIME_PREFIX + "enable.final-merge.in.output";
public static final boolean TEZ_RUNTIME_ENABLE_FINAL_MERGE_IN_OUTPUT_DEFAULT = true;
/**
* Expert level setting. How long should @link{ShuffleManager} wait for batching
* before sending the events in milliseconds. Set to -1 to not wait.
*/
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_SHUFFLE_BATCH_WAIT =
TEZ_RUNTIME_PREFIX + "shuffle.batch.wait";
public static final int TEZ_RUNTIME_SHUFFLE_BATCH_WAIT_DEFAULT = -1;
/**
* Share data fetched between tasks running on the same host if applicable
*/
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH = TEZ_RUNTIME_PREFIX
+ "optimize.shared.fetch";
/**
* shared mode bypassing the http fetch is not enabled by default till we have unit tests in.
*/
public static final boolean TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH_DEFAULT = false;
/**
* Used only for internal testing. Strictly not recommended to be used elsewhere. This
* parameter could be changed/dropped later.
*/
@Unstable
@Private
@ConfigurationProperty(type = "boolean")
public static final String TEZ_RUNTIME_CLEANUP_FILES_ON_INTERRUPT = TEZ_RUNTIME_PREFIX
+ "cleanup.files.on.interrupt";
public static final boolean TEZ_RUNTIME_CLEANUP_FILES_ON_INTERRUPT_DEFAULT = false;
// TODO TEZ-1233 - allow this property to be set per vertex
// TODO TEZ-1231 - move these properties out since they are not relevant for Inputs / Outputs
/**
* Value: Boolean
* Whether to publish configuration information to History logger. Default false.
*/
@ConfigurationProperty
public static final String TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT =
TEZ_RUNTIME_PREFIX + "convert.user-payload.to.history-text";
public static final boolean TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT_DEFAULT = false;
@Unstable
@Private
@ConfigurationProperty(type = "integer")
public static final String TEZ_RUNTIME_RECORDS_BEFORE_PROGRESS = TEZ_RUNTIME_PREFIX +
"merge.progress.records";
public static final long TEZ_RUNTIME_RECORDS_BEFORE_PROGRESS_DEFAULT = 10000;
static {
tezRuntimeKeys.add(TEZ_RUNTIME_IFILE_READAHEAD);
tezRuntimeKeys.add(TEZ_RUNTIME_IFILE_READAHEAD_BYTES);
tezRuntimeKeys.add(TEZ_RUNTIME_IO_FILE_BUFFER_SIZE);
tezRuntimeKeys.add(TEZ_RUNTIME_IO_SORT_FACTOR);
tezRuntimeKeys.add(TEZ_RUNTIME_SORT_SPILL_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_IO_SORT_MB);
tezRuntimeKeys.add(TEZ_RUNTIME_INDEX_CACHE_MEMORY_LIMIT_BYTES);
tezRuntimeKeys.add(TEZ_RUNTIME_COMBINE_MIN_SPILLS);
tezRuntimeKeys.add(TEZ_RUNTIME_PIPELINED_SORTER_SORT_THREADS);
tezRuntimeKeys.add(
TEZ_RUNTIME_PIPELINED_SORTER_MIN_BLOCK_SIZE_IN_MB);
tezRuntimeKeys.add(TEZ_RUNTIME_PIPELINED_SORTER_LAZY_ALLOCATE_MEMORY);
tezRuntimeKeys.add(TEZ_RUNTIME_UNORDERED_OUTPUT_BUFFER_SIZE_MB);
tezRuntimeKeys.add(TEZ_RUNTIME_UNORDERED_OUTPUT_MAX_PER_BUFFER_SIZE_BYTES);
tezRuntimeKeys.add(TEZ_RUNTIME_PARTITIONER_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_COMBINER_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_USE_ASYNC_HTTP);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FETCH_FAILURES_LIMIT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FETCH_MAX_TASK_OUTPUT_AT_ONCE);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_NOTIFY_READERROR);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_CONNECT_TIMEOUT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_ENABLED);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_KEEP_ALIVE_MAX_CONNECTIONS);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_READ_TIMEOUT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_BUFFER_SIZE);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_ENABLE_SSL);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FETCH_VERIFY_DISK_CHECKSUM);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FETCH_BUFFER_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MEMORY_LIMIT_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MERGE_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MEMTOMEM_SEGMENTS);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_ENABLE_MEMTOMEM);
tezRuntimeKeys.add
(TEZ_RUNTIME_SHUFFLE_ACCEPTABLE_HOST_FETCH_FAILURE_FRACTION);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MIN_FAILURES_PER_HOST);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MAX_STALL_TIME_FRACTION);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_SOURCE_ATTEMPT_ABORT_LIMIT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MAX_ALLOWED_FAILED_FETCH_ATTEMPT_FRACTION);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_MIN_REQUIRED_PROGRESS_FRACTION);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FAILED_CHECK_SINCE_LAST_COMPLETION);
tezRuntimeKeys.add(TEZ_RUNTIME_REPORT_PARTITION_STATS);
tezRuntimeKeys.add(TEZ_RUNTIME_INPUT_POST_MERGE_BUFFER_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_GROUP_COMPARATOR_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_INTERNAL_SORTER_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_KEY_COMPARATOR_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_KEY_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_VALUE_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_COMPRESS);
tezRuntimeKeys.add(TEZ_RUNTIME_COMPRESS_CODEC);
tezRuntimeKeys.add(TEZ_RUNTIME_KEY_SECONDARY_COMPARATOR_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_EMPTY_PARTITION_INFO_VIA_EVENTS_ENABLED);
tezRuntimeKeys.add(TEZ_RUNTIME_PIPELINED_SHUFFLE_ENABLED);
tezRuntimeKeys.add(TEZ_RUNTIME_ENABLE_FINAL_MERGE_IN_OUTPUT);
tezRuntimeKeys.add(TEZ_RUNTIME_RECORDS_BEFORE_PROGRESS);
tezRuntimeKeys.add(TEZ_RUNTIME_OPTIMIZE_LOCAL_FETCH);
tezRuntimeKeys.add(TEZ_RUNTIME_OPTIMIZE_SHARED_FETCH);
tezRuntimeKeys.add(TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT);
tezRuntimeKeys.add(TEZ_RUNTIME_SORTER_CLASS);
tezRuntimeKeys.add(TEZ_RUNTIME_CLEANUP_FILES_ON_INTERRUPT);
tezRuntimeKeys.add(TEZ_RUNTIME_UNORDERED_PARTITIONED_KVWRITER_BUFFER_MERGE_PERCENT);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_FETCHER_USE_SHARED_POOL);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_HOST_PENALTY_TIME_LIMIT_MS);
tezRuntimeKeys.add(TEZ_RUNTIME_SHUFFLE_BATCH_WAIT);
defaultConf.addResource("core-default.xml");
defaultConf.addResource("core-site.xml");
defaultConf.addResource("tez-site.xml");
for (Map.Entry<String, String> confEntry : defaultConf) {
if (tezRuntimeKeys.contains(confEntry.getKey())) {
tezRuntimeConfMap.put(confEntry.getKey(), confEntry.getValue());
} else {
// TODO TEZ-1232 Filter out parameters from TezConfiguration, and Task specific confs
otherConfMap.put(confEntry.getKey(), confEntry.getValue());
otherKeys.add(confEntry.getKey());
}
}
// Do NOT need all prefixes from the following list. Only specific ones are allowed
// "hadoop.", "hadoop.security", "io.", "fs.", "ipc.", "net.", "file.", "dfs.", "ha.", "s3.", "nfs3.", "rpc."
allowedPrefixes.add("io.");
allowedPrefixes.add("file.");
allowedPrefixes.add("fs.");
umnodifiableTezRuntimeKeySet = Collections.unmodifiableSet(tezRuntimeKeys);
unmodifiableOtherKeySet = Collections.unmodifiableSet(otherKeys);
unmodifiableAllowedPrefixes = Collections.unmodifiableList(allowedPrefixes);
}
@Private
public static Set<String> getRuntimeConfigKeySet() {
return umnodifiableTezRuntimeKeySet;
}
@Private
public static Set<String> getRuntimeAdditionalConfigKeySet() {
return unmodifiableOtherKeySet;
}
@Private
public static List<String> getAllowedPrefixes() {
return allowedPrefixes;
}
@Private
public static Map<String, String> getTezRuntimeConfigDefaults() {
return Collections.unmodifiableMap(tezRuntimeConfMap);
}
@Private
public static Map<String, String> getOtherConfigDefaults() {
return Collections.unmodifiableMap(otherConfMap);
}
public enum ReportPartitionStats {
@Deprecated
/**
* Don't report partition stats. It is the same as NONE.
* It is defined to maintain backward compatibility given
* Configuration @link{#TEZ_RUNTIME_REPORT_PARTITION_STATS} used
* to be boolean type.
*/
DISABLED("false"),
@Deprecated
/**
* Report partition stats. It is the same as MEMORY_OPTIMIZED.
* It is defined to maintain backward compatibility given
* Configuration @link{#TEZ_RUNTIME_REPORT_PARTITION_STATS} used
* to be boolean type.
*/
ENABLED("true"),
/**
* Don't report partition stats.
*/
NONE("none"),
/**
* Report partition stats with less precision to reduce
* memory and CPU overhead
*/
MEMORY_OPTIMIZED("memory_optimized"),
/**
* Report precise partition stats in MB.
*/
PRECISE("precise");
private final String type;
private ReportPartitionStats(String type) {
this.type = type;
}
public final String getType() {
return type;
}
public boolean isEnabled() {
return !equals(ReportPartitionStats.DISABLED) &&
!equals(ReportPartitionStats.NONE);
}
public boolean isPrecise() {
return equals(ReportPartitionStats.PRECISE);
}
public static ReportPartitionStats fromString(String type) {
if (type != null) {
for (ReportPartitionStats b : ReportPartitionStats.values()) {
if (type.equalsIgnoreCase(b.type)) {
return b;
}
}
}
throw new IllegalArgumentException("Invalid type " + type);
}
}
}
| |
/*
* Copyright 2007-2010 The Kuali Foundation
*
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.routeheader;
import org.junit.Test;
import org.kuali.rice.kew.api.KewApiServiceLocator;
import org.kuali.rice.kew.api.WorkflowDocument;
import org.kuali.rice.kew.api.WorkflowDocumentFactory;
import org.kuali.rice.kew.api.WorkflowRuntimeException;
import org.kuali.rice.kew.api.action.ActionRequest;
import org.kuali.rice.kew.api.action.ActionRequestType;
import org.kuali.rice.kew.api.document.Document;
import org.kuali.rice.kew.test.KEWTestCase;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.*;
public class AppDocStatusTest extends KEWTestCase {
protected void loadTestData() throws Exception {
super.loadTestData();
loadXmlFile("AppDocStatusTestConfig.xml");
}
/**
*
* This method performs several positive tests related to Application Document Status
* For these tests the doctype definition defines a valid set of statuses.
* It also defines two status transitions in the route path
* It tests:
* - That the AppDocStatus is properly set by the workflow engine during
* appropriate transitions.
* - That the AppDocStatus may be retrieved by the client API
* - That the AppDocStatus may be set by the client API
* - That a history of AppDocStatus transitions is created.
*
*/
@Test public void testValidAppDocStatus() throws Exception {
// Create document
WorkflowDocument document = WorkflowDocumentFactory.createDocument(getPrincipalIdForName("ewestfal"), "TestAppDocStatusDoc2");
document.saveDocumentData();
assertNotNull(document.getDocumentId());
assertTrue("Document should be initiatied", document.isInitiated());
assertTrue("Invalid route level.", document.getNodeNames().contains("Initiated"));
// route document to first stop and check status, etc.
document.route("Test Routing.");
String appDocStatus = document.getDocument().getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Approval in Progress".equalsIgnoreCase(appDocStatus));
// should have generated a request to "bmcgough"
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("bmcgough"), document.getDocumentId());
assertTrue("Document should be enroute", document.isEnroute());
Set<String> nodeNames = document.getNodeNames();
assertEquals("Wrong number of node names.", 1, nodeNames.size());
assertTrue("Wrong node name.", document.getNodeNames().contains("DestinationApproval"));
// check action request
List<ActionRequest> requests = document.getRootActionRequests();
assertEquals(1, requests.size());
ActionRequest request = requests.get(0);
assertEquals(getPrincipalIdForName("bmcgough"), request.getPrincipalId());
assertEquals(ActionRequestType.APPROVE, request.getActionRequested());
assertEquals("DestinationApproval", request.getNodeName());
assertTrue(document.isApprovalRequested());
// approve the document to send it to its next route node
document.approve("Test approve by bmcgough");
// check status
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("temay"), document.getDocumentId());
Document rh = document.getDocument();
appDocStatus = rh.getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Submitted".equalsIgnoreCase(appDocStatus));
// should have generated a request to "temay"
assertTrue("Document should be enroute", document.isEnroute());
nodeNames = document.getNodeNames();
assertEquals("Wrong number of node names.", 1, nodeNames.size());
assertTrue("Wrong node name.", nodeNames.contains("TravelerApproval"));
document.approve("Test approve by temay");
// update the AppDocStatus via client API
document.setApplicationDocumentStatus("Completed");
document.saveDocumentData();
// get a refreshed document and check it out
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("temay"), document.getDocumentId());
// assertTrue("Document should be processed.", document.isProcessed());
rh = document.getDocument();
appDocStatus = rh.getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Completed".equalsIgnoreCase(appDocStatus));
// check app doc status transition history
List<org.kuali.rice.kew.api.document.DocumentStatusTransition> history = KewApiServiceLocator.getWorkflowDocumentService().getDocumentStatusTransitionHistory(
document.getDocumentId());
assertEquals(3, history.size());
assertTrue("First History record has incorrect status", "Approval In Progress".equalsIgnoreCase(history.get(0).getNewStatus()));
assertTrue("Second History record has incorrect old status", "Approval In Progress".equalsIgnoreCase(
history.get(1).getOldStatus()));
assertTrue("Second History record has incorrect new status", "Submitted".equalsIgnoreCase(history.get(1).getNewStatus()));
assertTrue("Third History record has incorrect old status", "Submitted".equalsIgnoreCase(history.get(2).getOldStatus()));
assertTrue("Third History record has incorrect new status", "Completed".equalsIgnoreCase(history.get(2).getNewStatus()));
// TODO when we are able to, we should also verify the RouteNodeInstances are correct
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("ewestfal"), document.getDocumentId());
assertTrue("Document should be final.", document.isFinal());
}
/**
*
* This method is similar to the above test, except that the doctype definition
* does NOT specify a valid set of values. This means that the value can be any valid string.
*
* @throws Exception
*/
@Test public void testAppDocStatusValuesNotDefined() throws Exception {
// Create document
WorkflowDocument document = WorkflowDocumentFactory.createDocument(getPrincipalIdForName("ewestfal"), "TestAppDocStatusDoc1");
document.saveDocumentData();
assertNotNull(document.getDocumentId());
assertTrue("Document should be initiatied", document.isInitiated());
assertTrue("Invalid route level.", document.getNodeNames().contains("Initiated"));
// route document to first stop and check status, etc.
document.route("Test Routing.");
Document rh = document.getDocument();
String appDocStatus = rh.getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Approval in Progress".equalsIgnoreCase(appDocStatus));
// should have generated a request to "bmcgough"
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("bmcgough"), document.getDocumentId());
assertTrue("Document should be enroute", document.isEnroute());
Set<String> nodeNames = document.getNodeNames();
assertEquals("Wrong number of node names.", 1, nodeNames.size());
assertTrue("Wrong node name.", nodeNames.contains("step1"));
// check action request
List<ActionRequest> requests = document.getRootActionRequests();
assertEquals(1, requests.size());
ActionRequest request = requests.get(0);
assertEquals(getPrincipalIdForName("bmcgough"), request.getPrincipalId());
assertEquals(ActionRequestType.APPROVE, request.getActionRequested());
assertEquals("step1", request.getNodeName());
assertTrue(document.isApprovalRequested());
// approve the document to send it to its next route node
document.approve("Test approve by bmcgough");
// check status
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("temay"), document.getDocumentId());
rh = document.getDocument();
appDocStatus = rh.getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Submitted".equalsIgnoreCase(appDocStatus));
// should have generated a request to "temay"
assertTrue("Document should be enroute", document.isEnroute());
nodeNames = document.getNodeNames();
assertEquals("Wrong number of node names.", 1, nodeNames.size());
assertTrue("Wrong node name.", nodeNames.contains("step2"));
document.approve("Test approve by temay");
// update the AppDocStatus via client API
document.setApplicationDocumentStatus("Some Random Value");
document.saveDocumentData();
// get a refreshed document and check it out
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("temay"), document.getDocumentId());
// assertTrue("Document should be processed.", document.isProcessed());
rh = document.getDocument();
appDocStatus = rh.getApplicationDocumentStatus();
assertTrue("Application Document Status:" + appDocStatus +" is invalid", "Some Random Value".equalsIgnoreCase(appDocStatus));
// check app doc status transition history
List<org.kuali.rice.kew.api.document.DocumentStatusTransition> history = KewApiServiceLocator.getWorkflowDocumentService().getDocumentStatusTransitionHistory(
document.getDocumentId());
assertEquals(3, history.size());
assertTrue("First History record has incorrect status", "Approval In Progress".equalsIgnoreCase(history.get(0)
.getNewStatus()));
assertTrue("Second History record has incorrect old status", "Approval In Progress".equalsIgnoreCase(
history.get(1).getOldStatus()));
assertTrue("Second History record has incorrect new status", "Submitted".equalsIgnoreCase(history.get(1)
.getNewStatus()));
assertTrue("Third History record has incorrect old status", "Submitted".equalsIgnoreCase(history.get(2).getOldStatus()));
assertTrue("Third History record has incorrect new status", "Some Random Value".equalsIgnoreCase(history.get(2)
.getNewStatus()));
// TODO when we are able to, we should also verify the RouteNodeInstances are correct
document = WorkflowDocumentFactory.loadDocument(getPrincipalIdForName("ewestfal"), document.getDocumentId());
assertTrue("Document should be final.", document.isFinal());
}
/**
*
* This test attempts to set an invalid status value for a document that has a valid set
* of statuses defined.
* It expects to throw a WorkflowRuntimeException when attempting to set the invalid status value.
*
* @throws Exception
*/
@Test public void testInvalidAppDocStatusValue() throws Exception {
WorkflowDocument document = WorkflowDocumentFactory.createDocument(getPrincipalIdForName("ewestfal"), "TestAppDocStatusDoc2");
document.saveDocumentData();
assertNotNull(document.getDocumentId());
assertTrue("Document should be initiatied", document.isInitiated());
assertTrue("Invalid route level.", document.getNodeNames().contains("Initiated"));
// update the AppDocStatus via client API
boolean gotException = false;
try {
document.setApplicationDocumentStatus("BAD STATUS");
document.saveDocumentData();
} catch (Throwable t){
gotException = true;
WorkflowRuntimeException ex = new WorkflowRuntimeException();
assertEquals("WrongExceptionType", t.getClass(), ex.getClass());
} finally {
assertTrue("Expected WorkflowRuntimeException not thrown.", gotException);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Random;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.json.JsopBuilder;
import org.apache.jackrabbit.oak.plugins.document.DocumentNodeState.Children;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
import org.junit.Rule;
import org.junit.Test;
import com.google.common.collect.Lists;
import com.mongodb.DB;
/**
* A set of simple tests.
*/
public class SimpleTest {
@Rule
public DocumentMKBuilderProvider builderProvider = new DocumentMKBuilderProvider();
@Rule
public MongoConnectionFactory connectionFactory = new MongoConnectionFactory();
private static final boolean MONGO_DB = false;
// private static final boolean MONGO_DB = true;
@Test
public void pathToId() {
assertEquals("0:/", Utils.getIdFromPath("/"));
assertEquals("/", Utils.getPathFromId("0:/"));
assertEquals("1:/test", Utils.getIdFromPath("/test"));
assertEquals("/test", Utils.getPathFromId("1:/test"));
assertEquals("10:/1/2/3/3/4/6/7/8/9/a", Utils.getIdFromPath("/1/2/3/3/4/6/7/8/9/a"));
assertEquals("/1/2/3/3/4/6/7/8/9/a", Utils.getPathFromId("10:/1/2/3/3/4/6/7/8/9/a"));
}
@Test
public void pathDepth() {
assertEquals(0, Utils.pathDepth(""));
assertEquals(0, Utils.pathDepth("/"));
assertEquals(1, Utils.pathDepth("1/"));
assertEquals(2, Utils.pathDepth("/a/"));
assertEquals(2, Utils.pathDepth("/a/b"));
assertEquals(3, Utils.pathDepth("/a/b/c"));
}
@Test
public void addNodeGetNode() {
DocumentMK mk = builderProvider.newBuilder().open();
DocumentStore s = mk.getDocumentStore();
DocumentNodeStore ns = mk.getNodeStore();
Revision rev = Revision.fromString(mk.getHeadRevision());
DocumentNodeState n = new DocumentNodeState(ns, "/test", rev);
n.setProperty("name", "\"Hello\"");
UpdateOp op = n.asOperation(true);
// mark as commit root
NodeDocument.setRevision(op, rev, "c");
assertTrue(s.create(Collection.NODES, Lists.newArrayList(op)));
DocumentNodeState n2 = ns.getNode("/test", rev);
assertNotNull(n2);
PropertyState p = n2.getProperty("name");
assertNotNull(p);
assertEquals("Hello", p.getValue(Type.STRING));
}
@Test
public void nodeIdentifier() {
DocumentMK mk = createMK(true);
String rev0 = mk.getHeadRevision();
String rev1 = mk.commit("/", "+\"test\":{}", null, null);
String rev2 = mk.commit("/test", "+\"a\":{}", null, null);
String rev3 = mk.commit("/test", "+\"b\":{}", null, null);
String rev4 = mk.commit("/test", "^\"a/x\":1", null, null);
String r0 = mk.getNodes("/", rev0, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/@r0-0-1\",\":childNodeCount\":0}", r0);
String r1 = mk.getNodes("/", rev1, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/@r1-0-1\",\"test\":{},\":childNodeCount\":1}", r1);
String r2 = mk.getNodes("/", rev2, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/@r2-0-1\",\"test\":{},\":childNodeCount\":1}", r2);
String r3;
r3 = mk.getNodes("/", rev3, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/@r3-0-1\",\"test\":{},\":childNodeCount\":1}", r3);
r3 = mk.getNodes("/test", rev3, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/test@r3-0-1\",\"a\":{},\"b\":{},\":childNodeCount\":2}", r3);
String r4;
r4 = mk.getNodes("/", rev4, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/@r4-0-1\",\"test\":{},\":childNodeCount\":1}", r4);
r4 = mk.getNodes("/test", rev4, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/test@r4-0-1\",\"a\":{},\"b\":{},\":childNodeCount\":2}", r4);
r4 = mk.getNodes("/test/a", rev4, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/test/a@r4-0-1\",\"x\":1,\":childNodeCount\":0}", r4);
r4 = mk.getNodes("/test/b", rev4, 0, 0, Integer.MAX_VALUE, ":id");
assertEquals("{\":id\":\"/test/b@r3-0-1\",\":childNodeCount\":0}", r4);
}
@Test
public void conflict() {
DocumentMK mk = createMK();
mk.commit("/", "+\"a\": {}", null, null);
try {
mk.commit("/", "+\"b\": {} +\"a\": {}", null, null);
fail();
} catch (DocumentStoreException e) {
// expected
}
// the previous commit should be rolled back now,
// so this should work
mk.commit("/", "+\"b\": {}", null, null);
}
@Test
public void diff() {
DocumentMK mk = createMK();
String rev0 = mk.getHeadRevision();
String rev1 = mk.commit("/", "+\"t1\":{}", null, null);
String rev2 = mk.commit("/", "+\"t2\":{}", null, null);
String rev3 = mk.commit("/", "+\"t3\":{}", null, null);
String rev4 = mk.commit("/", "^\"t3/x\":1", null, null);
String r0 = mk.getNodes("/", rev0, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\":childNodeCount\":0}", r0);
String r1 = mk.getNodes("/", rev1, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"t1\":{},\":childNodeCount\":1}", r1);
String r2 = mk.getNodes("/", rev2, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"t1\":{},\"t2\":{},\":childNodeCount\":2}", r2);
String r3 = mk.getNodes("/", rev3, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"t1\":{},\"t2\":{},\"t3\":{},\":childNodeCount\":3}", r3);
String diff01 = mk.diff(rev0, rev1, "/", 0).trim();
assertEquals("+\"/t1\":{}", diff01);
String diff12 = mk.diff(rev1, rev2, "/", 0).trim();
assertEquals("+\"/t2\":{}", diff12);
String diff23 = mk.diff(rev2, rev3, "/", 0).trim();
assertEquals("+\"/t3\":{}", diff23);
String diff13 = mk.diff(rev1, rev3, "/", 0).trim();
assertEquals("+\"/t2\":{}+\"/t3\":{}", diff13);
String diff34 = mk.diff(rev3, rev4, "/", 0).trim();
assertEquals("^\"/t3\":{}", diff34);
}
@Test
public void reAddDeleted() {
DocumentMK mk = createMK();
String rev0 = mk.getHeadRevision();
String rev1 = mk.commit("/", "+\"test\":{\"name\": \"Hello\"} ^ \"x\": 1", null, null);
String rev2 = mk.commit("/", "-\"test\" ^ \"x\": 2", null, null);
String rev3 = mk.commit("/", "+\"test\":{\"name\": \"Hallo\"} ^ \"x\": 3", null, null);
String test0 = mk.getNodes("/test", rev0, 0, 0, Integer.MAX_VALUE, null);
assertNull(null, test0);
String test1 = mk.getNodes("/test", rev1, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"name\":\"Hello\",\":childNodeCount\":0}", test1);
String test2 = mk.getNodes("/test", rev2, 0, 0, Integer.MAX_VALUE, null);
assertNull(null, test2);
String test3 = mk.getNodes("/test", rev3, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"name\":\"Hallo\",\":childNodeCount\":0}", test3);
mk.dispose();
}
@Test
public void reAddDeleted2() {
DocumentMK mk = createMK();
String rev = mk.commit("/", "+\"test\":{\"x\":\"1\",\"child\": {}}", null, null);
rev = mk.commit("/", "-\"test\"", rev, null);
rev = mk.commit("/", "+\"test\":{} +\"test2\": {}", null, null);
String test = mk.getNodes("/test", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\":childNodeCount\":0}", test);
String test2 = mk.getNodes("/test2", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\":childNodeCount\":0}", test2);
}
@Test
public void move() {
DocumentMK mk = createMK();
String rev = mk.commit("/", "+\"test\":{\"x\":\"1\",\"child\": {}}", null, null);
rev = mk.commit("/", ">\"test\": \"/test2\"", rev, null);
String test = mk.getNodes("/test2", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"x\":\"1\",\"child\":{},\":childNodeCount\":1}", test);
test = mk.getNodes("/test", rev, 0, 0, Integer.MAX_VALUE, null);
assertNull(test);
}
@Test
public void copy() {
DocumentMK mk = createMK();
String rev = mk.commit("/", "+\"test\":{\"x\":\"1\",\"child\": {}}", null, null);
rev = mk.commit("/", "*\"test\": \"/test2\"", rev, null);
String test = mk.getNodes("/test2", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"x\":\"1\",\"child\":{},\":childNodeCount\":1}", test);
test = mk.getNodes("/test", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"x\":\"1\",\"child\":{},\":childNodeCount\":1}", test);
}
@Test
public void escapePropertyName() {
DocumentMK mk = createMK();
String rev = mk.commit(
"/", "+\"test1\":{\"name.first\": \"Hello\"} +\"test2\":{\"_id\": \"a\"} +\"test3\":{\"$x\": \"1\"}", null, null);
String test1 = mk.getNodes("/test1", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"name.first\":\"Hello\",\":childNodeCount\":0}", test1);
String test2 = mk.getNodes("/test2", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"_id\":\"a\",\":childNodeCount\":0}", test2);
String test3 = mk.getNodes("/test3", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"$x\":\"1\",\":childNodeCount\":0}", test3);
mk.dispose();
}
@Test
public void commit() {
DocumentMK mk = createMK();
DocumentNodeStore ns = mk.getNodeStore();
String rev = mk.commit("/", "+\"test\":{\"name\": \"Hello\"}", null, null);
String test = mk.getNodes("/test", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"name\":\"Hello\",\":childNodeCount\":0}", test);
String r0 = mk.commit("/test", "+\"a\":{\"name\": \"World\"}", null, null);
String r1 = mk.commit("/test", "+\"b\":{\"name\": \"!\"}", null, null);
test = mk.getNodes("/test", r0, 0, 0, Integer.MAX_VALUE, null);
DocumentNodeState n = ns.getNode("/", Revision.fromString(r0));
assertNotNull(n);
Children c = ns.getChildren(n, null, Integer.MAX_VALUE);
assertEquals("[test]", c.toString());
n = ns.getNode("/test", Revision.fromString(r1));
assertNotNull(n);
c = ns.getChildren(n, null, Integer.MAX_VALUE);
assertEquals("[a, b]", c.toString());
rev = mk.commit("", "^\"/test\":1", null, null);
test = mk.getNodes("/", rev, 0, 0, Integer.MAX_VALUE, null);
assertEquals("{\"test\":1,\"test\":{},\":childNodeCount\":1}", test);
// System.out.println(test);
}
@Test
public void delete() {
DocumentMK mk = createMK();
DocumentNodeStore ns = mk.getNodeStore();
mk.commit("/", "+\"testDel\":{\"name\": \"Hello\"}", null, null);
mk.commit("/testDel", "+\"a\":{\"name\": \"World\"}", null, null);
mk.commit("/testDel", "+\"b\":{\"name\": \"!\"}", null, null);
String r1 = mk.commit("/testDel", "+\"c\":{\"name\": \"!\"}", null, null);
DocumentNodeState n = ns.getNode("/testDel", Revision.fromString(r1));
assertNotNull(n);
Children c = ns.getChildren(n, null, Integer.MAX_VALUE);
assertEquals(3, c.children.size());
String r2 = mk.commit("/testDel", "-\"c\"", null, null);
n = ns.getNode("/testDel", Revision.fromString(r2));
assertNotNull(n);
c = ns.getChildren(n, null, Integer.MAX_VALUE);
assertEquals(2, c.children.size());
String r3 = mk.commit("/", "-\"testDel\"", null, null);
n = ns.getNode("/testDel", Revision.fromString(r3));
assertNull(n);
}
@Test
public void escapeUnescape() {
DocumentMK mk = createMK();
String rev;
String nodes;
Random r = new Random(1);
for (int i = 0; i < 20; i++) {
int len = 1 + r.nextInt(5);
StringBuilder buff = new StringBuilder();
for (int j = 0; j < len; j++) {
buff.append((char) (32 + r.nextInt(128)));
}
String s = buff.toString();
String x2 = Utils.escapePropertyName(s);
String s2 = Utils.unescapePropertyName(x2);
if (!s.equals(s2)) {
assertEquals(s, s2);
}
if (s.indexOf('/') >= 0) {
continue;
}
JsopBuilder jsop = new JsopBuilder();
jsop.tag('+').key(s).object().key(s).value("x").endObject();
rev = mk.commit("/", jsop.toString(),
null, null);
nodes = mk.getNodes("/" + s, rev, 0, 0, 100, null);
jsop = new JsopBuilder();
jsop.object().key(s).value("x").
key(":childNodeCount").value(0).endObject();
String n = jsop.toString();
assertEquals(n, nodes);
nodes = mk.getNodes("/", rev, 0, 0, 100, null);
jsop = new JsopBuilder();
jsop.object().key(s).object().endObject().
key(":childNodeCount").value(1).endObject();
n = jsop.toString();
assertEquals(n, nodes);
jsop = new JsopBuilder();
jsop.tag('-').value(s);
rev = mk.commit("/", jsop.toString(), rev, null);
}
}
@Test
public void nodeAndPropertyNames() {
DocumentMK mk = createMK();
String rev;
String nodes;
for (String s : new String[] { "_", "$", "__", "_id", "$x", ".", ".\\", "x\\", "\\x", "first.name" }) {
String x2 = Utils.escapePropertyName(s);
String s2 = Utils.unescapePropertyName(x2);
if (!s.equals(s2)) {
assertEquals(s, s2);
}
JsopBuilder jsop = new JsopBuilder();
jsop.tag('+').key(s).object().key(s).value("x").endObject();
rev = mk.commit("/", jsop.toString(),
null, null);
nodes = mk.getNodes("/" + s, rev, 0, 0, 10, null);
jsop = new JsopBuilder();
jsop.object().key(s).value("x").
key(":childNodeCount").value(0).endObject();
String n = jsop.toString();
assertEquals(n, nodes);
nodes = mk.getNodes("/", rev, 0, 0, 10, null);
jsop = new JsopBuilder();
jsop.object().key(s).object().endObject().
key(":childNodeCount").value(1).endObject();
n = jsop.toString();
assertEquals(n, nodes);
jsop = new JsopBuilder();
jsop.tag('-').value(s);
rev = mk.commit("/", jsop.toString(), rev, null);
}
}
@Test
public void addAndMove() {
DocumentMK mk = createMK();
String head = mk.getHeadRevision();
head = mk.commit("",
"+\"/root\":{}\n" +
"+\"/root/a\":{}\n"+
"+\"/root/a/b\":{}\n",
head, "");
head = mk.commit("",
">\"/root/a\":\"/root/c\"\n",
head, "");
assertFalse(mk.nodeExists("/root/a", head));
assertTrue(mk.nodeExists("/root/c/b", head));
}
@Test
public void commitRoot() {
DocumentMK mk = createMK();
DocumentStore store = mk.getDocumentStore();
Revision head = Revision.fromString(mk.getHeadRevision());
head = Revision.fromString(mk.commit("", "+\"/test\":{\"foo\":{}}", head.toString(), null));
// root node must not have the revision
NodeDocument rootDoc = store.find(Collection.NODES, "0:/");
//As we update the childStatus flag the commit root would shift
//one layer above
// assertNotNull(rootDoc);
// assertFalse(rootDoc.containsRevision(head));
// test node must have head in revisions
NodeDocument node = store.find(Collection.NODES, "1:/test");
//assertNotNull(node);
//assertTrue(node.containsRevision(head));
// foo must not have head in revisions and must refer to test
// as commit root (depth = 1)
NodeDocument foo = store.find(Collection.NODES, "2:/test/foo");
assertNotNull(foo);
assertFalse(foo.containsRevision(head));
assertEquals("/", foo.getCommitRootPath(head));
head = Revision.fromString(mk.commit("", "+\"/bar\":{}+\"/test/foo/bar\":{}", head.toString(), null));
// root node is root of commit
rootDoc = store.find(Collection.NODES, "0:/");
assertNotNull(rootDoc);
assertTrue(rootDoc.containsRevision(head));
// /bar refers to root nodes a commit root
NodeDocument bar = store.find(Collection.NODES, "1:/bar");
assertNotNull(bar);
assertEquals("/", bar.getCommitRootPath(head));
// /test/foo/bar refers to root nodes a commit root
bar = store.find(Collection.NODES, "3:/test/foo/bar");
assertNotNull(bar);
assertEquals("/", bar.getCommitRootPath(head));
}
private DocumentMK createMK() {
return createMK(false);
}
private DocumentMK createMK(boolean useSimpleRevision) {
DocumentMK.Builder builder = builderProvider.newBuilder();
if (MONGO_DB) {
DB db = connectionFactory.getConnection().getDB();
MongoUtils.dropCollections(db);
builder.setMongoDB(db);
}
builder.setUseSimpleRevision(useSimpleRevision);
return builder.open();
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.nn.conf.dropout;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.workspace.ArrayType;
import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;
import org.nd4j.common.base.Preconditions;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.transforms.pairwise.arithmetic.MulOp;
import org.nd4j.linalg.api.ops.random.impl.DropOutInverted;
import org.nd4j.linalg.exception.ND4JOpProfilerException;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.schedule.ISchedule;
import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties;
import org.nd4j.shade.jackson.annotation.JsonProperty;
/**
* Implements standard (inverted) dropout.<br>
* <br>
* Regarding dropout probability. This is the probability of <it>retaining</it> each input activation value for a layer.
* Thus, each input activation x is independently set to:<br>
* x <- 0, with probability 1-p<br>
* x <- x/p with probability p<br>
* Note that this "inverted" dropout scheme maintains the expected value of activations - i.e., E(x) is the same before
* and after dropout.<br>
* Dropout schedules (i.e., varying probability p as a function of iteration/epoch) are also supported.<br>
* <br>
* Other libraries (notably, Keras) use p == probability(<i>dropping</i> an activation)<br>
* In DL4J, {@code new Dropout(x)} will keep an input activation with probability x, and set to 0 with probability 1-x.<br>
* Thus, a dropout value of 1.0 is functionally equivalent to no dropout: i.e., 100% probability of retaining
* each input activation.<br>
* <p>
* Note 1: As per all IDropout instances, dropout is applied at training time only - and is automatically not applied at
* test time (for evaluation, etc)<br>
* Note 2: Care should be taken when setting lower (probability of retaining) values for (too much information may be
* lost with aggressive (very low) dropout values).<br>
* Note 3: Frequently, dropout is not applied to (or, has higher retain probability for) input (first layer)
* layers. Dropout is also often not applied to output layers.<br>
* Note 4: Implementation detail (most users can ignore): DL4J uses inverted dropout, as described here:
* <a href="http://cs231n.github.io/neural-networks-2/">http://cs231n.github.io/neural-networks-2/</a>
* </p>
* <br>
* See: Srivastava et al. 2014: Dropout: A Simple Way to Prevent Neural Networks from Overfitting
* <a href="http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf">http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf</a>
*
* @author Alex Black
*/
@Data
@JsonIgnoreProperties({"mask", "helper", "helperCountFail"})
@EqualsAndHashCode(exclude = {"mask", "helper", "helperCountFail"})
@Slf4j
public class Dropout implements IDropout {
/**
* When using CuDNN and an error is encountered, should fallback to the non-CuDNN implementatation be allowed?
* If set to false, an exception in CuDNN will be propagated back to the user. If false, the built-in
* (non-CuDNN) implementation for LSTM/GravesLSTM will be used
*
*/
@Getter
@Setter
protected boolean helperAllowFallback = true;
private double p;
private ISchedule pSchedule;
private transient INDArray mask;
private transient DropoutHelper helper;
private boolean initializedHelper = false;
private int helperCountFail = 0;
/**
* @param activationRetainProbability Probability of retaining an activation - see {@link Dropout} javadoc
*/
public Dropout(double activationRetainProbability) {
this(activationRetainProbability, null);
if(activationRetainProbability < 0.0){
throw new IllegalArgumentException("Activation retain probability must be > 0. Got: " + activationRetainProbability);
}
if(activationRetainProbability == 0.0){
throw new IllegalArgumentException("Invalid probability value: Dropout with 0.0 probability of retaining "
+ "activations is not supported");
}
}
/**
* @param activationRetainProbabilitySchedule Schedule for probability of retaining an activation - see {@link Dropout} javadoc
*/
public Dropout(ISchedule activationRetainProbabilitySchedule){
this(Double.NaN, activationRetainProbabilitySchedule);
}
/**
* When using a helper (CuDNN or MKLDNN in some cases) and an error is encountered, should fallback to the non-helper implementation be allowed?
* If set to false, an exception in the helper will be propagated back to the user. If false, the built-in
* (non-helper) implementation for Dropout will be used
*
* @param allowFallback Whether fallback to non-helper implementation should be used
*/
public Dropout helperAllowFallback(boolean allowFallback) {
this.setHelperAllowFallback(allowFallback);
return this;
}
protected Dropout(@JsonProperty("p") double activationRetainProbability, @JsonProperty("pSchedule") ISchedule activationRetainProbabilitySchedule) {
this.p = activationRetainProbability;
this.pSchedule = activationRetainProbabilitySchedule;
}
/**
* Initialize the CuDNN dropout helper, if possible
*/
protected void initializeHelper(DataType dataType){
String backend = Nd4j.getExecutioner().getEnvironmentInformation().getProperty("backend");
if("CUDA".equalsIgnoreCase(backend)) {
try {
helper = Class.forName("org.deeplearning4j.cuda.dropout.CudnnDropoutHelper")
.asSubclass(DropoutHelper.class).getConstructor(DataType.class).newInstance(dataType);
log.debug("CudnnDropoutHelper successfully initialized");
if (!helper.checkSupported()) {
helper = null;
}
} catch (Throwable t) {
if (!(t instanceof ClassNotFoundException)) {
log.warn("Could not initialize CudnnDropoutHelper", t);
}
//Unlike other layers, don't warn here about CuDNN not found - if the user has any other layers that can
// benefit from them cudnn, they will get a warning from those
}
}
initializedHelper = true;
}
@Override
public INDArray applyDropout(INDArray inputActivations, INDArray output, int iteration, int epoch, LayerWorkspaceMgr workspaceMgr) {
Preconditions.checkState(output.dataType().isFPType(), "Output array must be a floating point type, got %s for array of shape %ndShape",
output.dataType(), output);
double currP;
if(pSchedule != null){
currP = pSchedule.valueAt(iteration, epoch);
} else {
currP = p;
}
if(!initializedHelper){
initializeHelper(output.dataType());
}
if(helper != null && (helperCountFail == 0 || !isHelperAllowFallback())){
boolean helperWorked = false;
try {
helper.applyDropout(inputActivations, output, p);
helperWorked = true;
}catch (ND4JOpProfilerException e){
throw e; //NaN panic etc for debugging
} catch (Exception e){
if(e.getMessage().contains("Failed to allocate")){
//This is a memory exception - don't fallback to built-in implementation
throw e;
}
if(isHelperAllowFallback()){
helperCountFail++;
log.warn("CuDNN execution failed - falling back on built-in implementation",e);
} else {
throw new RuntimeException("Error during Dropout CuDNN helper forward pass - helperAllowFallback() is set to false", e);
}
}
if(helperWorked)
return output;
}
INDArray inputCast = inputActivations;
if(inputCast != output && inputCast.dataType() != output.dataType()){
inputCast = inputCast.castTo(output.dataType());
}
mask = workspaceMgr.createUninitialized(ArrayType.INPUT, output.dataType(), output.shape(), output.ordering()).assign(1.0);
Nd4j.getExecutioner().exec(new DropOutInverted(mask, mask, currP));
Nd4j.getExecutioner().exec(new MulOp(inputCast, mask, output));
return output;
}
@Override
public INDArray backprop(INDArray gradAtOutput, INDArray gradAtInput, int iteration, int epoch) {
if(helper != null && (helperCountFail == 0 || !isHelperAllowFallback())){
boolean helperWorked = false;
try {
helper.backprop(gradAtOutput, gradAtInput);
helperWorked = true;
}catch (ND4JOpProfilerException e){
throw e; //NaN panic etc for debugging
} catch (Exception e){
if(e.getMessage().contains("Failed to allocate")){
//This is a memory exception - don't fallback to built-in implementation
throw e;
}
if(isHelperAllowFallback()){
helperCountFail++;
log.warn("CuDNN execution failed - falling back on built-in implementation",e);
} else {
throw new RuntimeException("Error during Dropout CuDNN helper backprop - helperAllowFallback() is set to false", e);
}
}
if(helperWorked)
return gradAtInput;
}
Preconditions.checkState(mask != null, "Cannot perform backprop: Dropout mask array is absent (already cleared?)");
//dL/dx = dL/dz * dz/dx, with z=0 or x/p
//Mask already contains either 0 or 1/p, so just muli
INDArray m = mask;
if(m.dataType() != gradAtInput.dataType()){
m = m.castTo(gradAtInput.dataType());
}
Nd4j.getExecutioner().exec(new MulOp(gradAtOutput, m, gradAtInput));
mask = null;
return gradAtInput;
}
@Override
public void clear() {
mask = null;
}
@Override
public Dropout clone() {
return new Dropout(p, pSchedule == null ? null : pSchedule.clone());
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.server.project.youngandroid;
import com.google.appengine.api.utils.SystemProperty;
import com.google.apphosting.api.ApiProxy;
import com.google.appinventor.common.utils.StringUtils;
import com.google.appinventor.common.version.GitBuildId;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.server.CrashReport;
import com.google.appinventor.server.FileExporter;
import com.google.appinventor.server.FileExporterImpl;
import com.google.appinventor.server.Server;
import com.google.appinventor.server.encryption.EncryptionException;
import com.google.appinventor.server.flags.Flag;
import com.google.appinventor.server.project.CommonProjectService;
import com.google.appinventor.server.project.utils.Security;
import com.google.appinventor.server.properties.json.ServerJsonParser;
import com.google.appinventor.server.storage.StorageIo;
import com.google.appinventor.shared.properties.json.JSONParser;
import com.google.appinventor.shared.rpc.RpcResult;
import com.google.appinventor.shared.rpc.ServerLayout;
import com.google.appinventor.shared.rpc.project.NewProjectParameters;
import com.google.appinventor.shared.rpc.project.Project;
import com.google.appinventor.shared.rpc.project.ProjectNode;
import com.google.appinventor.shared.rpc.project.ProjectRootNode;
import com.google.appinventor.shared.rpc.project.ProjectSourceZip;
import com.google.appinventor.shared.rpc.project.RawFile;
import com.google.appinventor.shared.rpc.project.TextFile;
import com.google.appinventor.shared.rpc.project.youngandroid.NewYoungAndroidProjectParameters;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidAssetsFolder;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidBlocksNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidFormNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidPackageNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidProjectNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidSourceFolderNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidSourceNode;
import com.google.appinventor.shared.rpc.project.youngandroid.YoungAndroidYailNode;
import com.google.appinventor.shared.rpc.user.User;
import com.google.appinventor.shared.settings.Settings;
import com.google.appinventor.shared.settings.SettingsConstants;
import com.google.appinventor.shared.storage.StorageUtil;
import com.google.appinventor.shared.youngandroid.YoungAndroidSourceAnalyzer;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import com.google.common.io.CharStreams;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.text.DecimalFormat;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Logger;
/**
* Provides support for Young Android projects.
*
* @author lizlooney@google.com (Liz Looney)
* @author markf@google.com (Mark Friedman)
*/
public final class YoungAndroidProjectService extends CommonProjectService {
private static int currentProgress = 0;
private static final Logger LOG = Logger.getLogger(YoungAndroidProjectService.class.getName());
// The value of this flag can be changed in appengine-web.xml
private static final Flag<Boolean> sendGitVersion =
Flag.createFlag("build.send.git.version", true);
// Project folder prefixes
public static final String SRC_FOLDER = YoungAndroidSourceAnalyzer.SRC_FOLDER;
protected static final String ASSETS_FOLDER = "assets";
static final String PROJECT_DIRECTORY = "youngandroidproject";
// TODO(user) Source these from a common constants library.
private static final String FORM_PROPERTIES_EXTENSION =
YoungAndroidSourceAnalyzer.FORM_PROPERTIES_EXTENSION;
private static final String CODEBLOCKS_SOURCE_EXTENSION =
YoungAndroidSourceAnalyzer.CODEBLOCKS_SOURCE_EXTENSION;
private static final String BLOCKLY_SOURCE_EXTENSION =
YoungAndroidSourceAnalyzer.BLOCKLY_SOURCE_EXTENSION;
private static final String YAIL_FILE_EXTENSION =
YoungAndroidSourceAnalyzer.YAIL_FILE_EXTENSION;
public static final String PROJECT_PROPERTIES_FILE_NAME = PROJECT_DIRECTORY + "/" +
"project.properties";
private static final JSONParser JSON_PARSER = new ServerJsonParser();
// Build folder path
private static final String BUILD_FOLDER = "build";
public static final String PROJECT_KEYSTORE_LOCATION = "android.keystore";
// host[:port] to use for connecting to the build server
private static final Flag<String> buildServerHost =
Flag.createFlag("build.server.host", "localhost:9990");
// host[:port] to tell build server app host url
private static final Flag<String> appengineHost =
Flag.createFlag("appengine.host", "");
public YoungAndroidProjectService(StorageIo storageIo) {
super(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE, storageIo);
}
/**
* Returns project settings that can be used when creating a new project.
*/
public static String getProjectSettings(String icon, String vCode, String vName,
String useslocation, String aName, String sizing) {
icon = Strings.nullToEmpty(icon);
vCode = Strings.nullToEmpty(vCode);
vName = Strings.nullToEmpty(vName);
useslocation = Strings.nullToEmpty(useslocation);
sizing = Strings.nullToEmpty(sizing);
aName = Strings.nullToEmpty(aName);
return "{\"" + SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS + "\":{" +
"\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON + "\":\"" + icon +
"\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE + "\":\"" + vCode +
"\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME + "\":\"" + vName +
"\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION + "\":\"" + useslocation +
"\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME + "\":\"" + aName +
"\",\"" + SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING + "\":\"" + sizing +
"\"}}";
}
/**
* Returns the contents of the project properties file for a new Young Android
* project.
*
* @param projectName the name of the project
* @param qualifiedName the qualified name of Screen1 in the project
* @param icon the name of the asset to use as the application icon
* @param vcode the version code
* @param vname the version name
*/
public static String getProjectPropertiesFileContents(String projectName, String qualifiedName,
String icon, String vcode, String vname, String useslocation, String aname, String sizing) {
String contents = "main=" + qualifiedName + "\n" +
"name=" + projectName + '\n' +
"assets=../" + ASSETS_FOLDER + "\n" +
"source=../" + SRC_FOLDER + "\n" +
"build=../build\n";
if (icon != null && !icon.isEmpty()) {
contents += "icon=" + icon + "\n";
}
if (vcode != null && !vcode.isEmpty()) {
contents += "versioncode=" + vcode + "\n";
}
if (vname != null && !vname.isEmpty()) {
contents += "versionname=" + vname + "\n";
}
if (useslocation != null && !useslocation.isEmpty()) {
contents += "useslocation=" + useslocation + "\n";
}
if (aname != null) {
contents += "aname=" + aname + "\n";
}
if (sizing != null && !sizing.isEmpty()) {
contents += "sizing=" + sizing + "\n";
}
return contents;
}
/**
* Returns the contents of a new Young Android form file.
* @param qualifiedName the qualified name of the form.
* @return the contents of a new Young Android form file.
*/
@VisibleForTesting
public static String getInitialFormPropertiesFileContents(String qualifiedName) {
final int lastDotPos = qualifiedName.lastIndexOf('.');
String packageName = qualifiedName.split("\\.")[2];
String formName = qualifiedName.substring(lastDotPos + 1);
// The initial Uuid is set to zero here since (as far as we know) we can't get random numbers
// in ode.shared. This shouldn't actually matter since all Uuid's are random int's anyway (and
// 0 was randomly chosen, I promise). The TODO(user) in MockComponent.java indicates that
// there will someday be assurance that these random Uuid's are unique. Once that happens
// this will be perfectly acceptable. Until that happens, choosing 0 is just as safe as
// allowing a random number to be chosen when the MockComponent is first created.
return "#|\n$JSON\n" +
"{\"YaVersion\":\"" + YaVersion.YOUNG_ANDROID_VERSION + "\",\"Source\":\"Form\"," +
"\"Properties\":{\"$Name\":\"" + formName + "\",\"$Type\":\"Form\"," +
"\"$Version\":\"" + YaVersion.FORM_COMPONENT_VERSION + "\",\"Uuid\":\"" + 0 + "\"," +
"\"Title\":\"" + formName + "\",\"AppName\":\"" + packageName +"\"}}\n|#";
}
/**
* Returns the initial contents of a Young Android blockly blocks file.
*/
private static String getInitialBlocklySourceFileContents(String qualifiedName) {
return "";
}
private static String packageNameToPath(String packageName) {
return SRC_FOLDER + '/' + packageName.replace('.', '/');
}
public static String getSourceDirectory(String qualifiedName) {
return StorageUtil.dirname(packageNameToPath(qualifiedName));
}
// CommonProjectService implementation
@Override
public void storeProjectSettings(String userId, long projectId, String projectSettings) {
super.storeProjectSettings(userId, projectId, projectSettings);
// If the icon has been changed, update the project properties file.
// Extract the new icon from the projectSettings parameter.
Settings settings = new Settings(JSON_PARSER, projectSettings);
String newIcon = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON));
String newVCode = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE));
String newVName = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME));
String newUsesLocation = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION));
String newSizing = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING));
String newAName = Strings.nullToEmpty(settings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME));
// Extract the old icon from the project.properties file from storageIo.
String projectProperties = storageIo.downloadFile(userId, projectId,
PROJECT_PROPERTIES_FILE_NAME, StorageUtil.DEFAULT_CHARSET);
Properties properties = new Properties();
try {
properties.load(new StringReader(projectProperties));
} catch (IOException e) {
// Since we are reading from a String, I don't think this exception can actually happen.
e.printStackTrace();
return;
}
String oldIcon = Strings.nullToEmpty(properties.getProperty("icon"));
String oldVCode = Strings.nullToEmpty(properties.getProperty("versioncode"));
String oldVName = Strings.nullToEmpty(properties.getProperty("versionname"));
String oldUsesLocation = Strings.nullToEmpty(properties.getProperty("useslocation"));
String oldSizing = Strings.nullToEmpty(properties.getProperty("sizing"));
String oldAName = Strings.nullToEmpty(properties.getProperty("aname"));
if (!newIcon.equals(oldIcon) || !newVCode.equals(oldVCode) || !newVName.equals(oldVName)
|| !newUsesLocation.equals(oldUsesLocation) ||
!newAName.equals(oldAName) || !newSizing.equals(oldSizing)) {
// Recreate the project.properties and upload it to storageIo.
String projectName = properties.getProperty("name");
String qualifiedName = properties.getProperty("main");
String newContent = getProjectPropertiesFileContents(projectName, qualifiedName, newIcon,
newVCode, newVName, newUsesLocation, newAName, newSizing);
storageIo.uploadFileForce(projectId, PROJECT_PROPERTIES_FILE_NAME, userId,
newContent, StorageUtil.DEFAULT_CHARSET);
}
}
/**
* {@inheritDoc}
*
* {@code params} needs to be an instance of
* {@link NewYoungAndroidProjectParameters}.
*/
@Override
public long newProject(String userId, String projectName, NewProjectParameters params) {
NewYoungAndroidProjectParameters youngAndroidParams = (NewYoungAndroidProjectParameters) params;
String qualifiedFormName = youngAndroidParams.getQualifiedFormName();
String propertiesFileName = PROJECT_PROPERTIES_FILE_NAME;
String propertiesFileContents = getProjectPropertiesFileContents(projectName,
qualifiedFormName, null, null, null, null, null, null);
String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName);
String formFileContents = getInitialFormPropertiesFileContents(qualifiedFormName);
String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName);
String blocklyFileContents = getInitialBlocklySourceFileContents(qualifiedFormName);
String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName);
String yailFileContents = "";
Project project = new Project(projectName);
project.setProjectType(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE);
// Project history not supported in legacy ode new project wizard
project.addTextFile(new TextFile(propertiesFileName, propertiesFileContents));
project.addTextFile(new TextFile(formFileName, formFileContents));
project.addTextFile(new TextFile(blocklyFileName, blocklyFileContents));
project.addTextFile(new TextFile(yailFileName, yailFileContents));
// Create new project
return storageIo.createProject(userId, project, getProjectSettings("", "1", "1.0", "false", projectName, "Fixed"));
}
@Override
public long copyProject(String userId, long oldProjectId, String newName) {
String oldName = storageIo.getProjectName(userId, oldProjectId);
String oldProjectSettings = storageIo.loadProjectSettings(userId, oldProjectId);
String oldProjectHistory = storageIo.getProjectHistory(userId, oldProjectId);
Settings oldSettings = new Settings(JSON_PARSER, oldProjectSettings);
String icon = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_ICON);
String vcode = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_CODE);
String vname = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_VERSION_NAME);
String useslocation = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_USES_LOCATION);
String aname = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_APP_NAME);
String sizing = oldSettings.getSetting(
SettingsConstants.PROJECT_YOUNG_ANDROID_SETTINGS,
SettingsConstants.YOUNG_ANDROID_SETTINGS_SIZING);
Project newProject = new Project(newName);
newProject.setProjectType(YoungAndroidProjectNode.YOUNG_ANDROID_PROJECT_TYPE);
newProject.setProjectHistory(oldProjectHistory);
// Get the old project's source files and add them to new project, modifying where necessary.
for (String oldSourceFileName : storageIo.getProjectSourceFiles(userId, oldProjectId)) {
String newSourceFileName;
String newContents = null;
if (oldSourceFileName.equals(PROJECT_PROPERTIES_FILE_NAME)) {
// This is the project properties file. The name of the file doesn't contain the old
// project name.
newSourceFileName = oldSourceFileName;
// For the contents of the project properties file, generate the file with the new project
// name and qualified name.
String qualifiedFormName = StringUtils.getQualifiedFormName(
storageIo.getUser(userId).getUserEmail(), newName);
newContents = getProjectPropertiesFileContents(newName, qualifiedFormName, icon, vcode, vname, useslocation, aname, sizing);
} else {
// This is some file other than the project properties file.
// oldSourceFileName may contain the old project name as a path segment, surrounded by /.
// Replace the old name with the new name.
newSourceFileName = StringUtils.replaceLastOccurrence(oldSourceFileName,
"/" + oldName + "/", "/" + newName + "/");
}
if (newContents != null) {
// We've determined (above) that the contents of the file must change for the new project.
// Use newContents when adding the file to the new project.
newProject.addTextFile(new TextFile(newSourceFileName, newContents));
} else {
// If we get here, we know that the contents of the file can just be copied from the old
// project. Since it might be a binary file, we copy it as a raw file (that works for both
// text and binary files).
byte[] contents = storageIo.downloadRawFile(userId, oldProjectId, oldSourceFileName);
newProject.addRawFile(new RawFile(newSourceFileName, contents));
}
}
// Create the new project and return the new project's id.
return storageIo.createProject(userId, newProject, getProjectSettings(icon, vcode, vname,
useslocation, aname, sizing));
}
@Override
public ProjectRootNode getRootNode(String userId, long projectId) {
// Create root, assets, and source nodes (they are mocked nodes as they don't really
// have to exist like this on the file system)
ProjectRootNode rootNode =
new YoungAndroidProjectNode(storageIo.getProjectName(userId, projectId),
projectId);
ProjectNode assetsNode = new YoungAndroidAssetsFolder(ASSETS_FOLDER);
ProjectNode sourcesNode = new YoungAndroidSourceFolderNode(SRC_FOLDER);
rootNode.addChild(assetsNode);
rootNode.addChild(sourcesNode);
// Sources contains nested folders that are interpreted as packages
Map<String, ProjectNode> packagesMap = Maps.newHashMap();
// Retrieve project information
List<String> sourceFiles = storageIo.getProjectSourceFiles(userId, projectId);
for (String fileId : sourceFiles) {
if (fileId.startsWith(ASSETS_FOLDER + '/')) {
// Assets is a flat folder
assetsNode.addChild(new YoungAndroidAssetNode(StorageUtil.basename(fileId), fileId));
} else if (fileId.startsWith(SRC_FOLDER + '/')) {
// We send form (.scm), blocks (.blk), and yail (.yail) nodes to the ODE client.
YoungAndroidSourceNode sourceNode = null;
if (fileId.endsWith(FORM_PROPERTIES_EXTENSION)) {
sourceNode = new YoungAndroidFormNode(fileId);
} else if (fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) {
sourceNode = new YoungAndroidBlocksNode(fileId);
} else if (fileId.endsWith(CODEBLOCKS_SOURCE_EXTENSION)) {
String blocklyFileName =
fileId.substring(0, fileId.lastIndexOf(CODEBLOCKS_SOURCE_EXTENSION))
+ BLOCKLY_SOURCE_EXTENSION;
if (!sourceFiles.contains(blocklyFileName)) {
// This is an old project that hasn't been converted yet. Convert
// the blocks file to Blockly format and name. Leave the old
// codeblocks file around for now (for debugging) but don't send it to the client.
String blocklyFileContents = convertCodeblocksToBlockly(userId, projectId, fileId);
storageIo.addSourceFilesToProject(userId, projectId, false, blocklyFileName);
storageIo.uploadFileForce(projectId, blocklyFileName, userId, blocklyFileContents,
StorageUtil.DEFAULT_CHARSET);
sourceNode = new YoungAndroidBlocksNode(blocklyFileName);
}
} else if (fileId.endsWith(YAIL_FILE_EXTENSION)) {
sourceNode = new YoungAndroidYailNode(fileId);
}
if (sourceNode != null) {
String packageName = StorageUtil.getPackageName(sourceNode.getQualifiedName());
ProjectNode packageNode = packagesMap.get(packageName);
if (packageNode == null) {
packageNode = new YoungAndroidPackageNode(packageName, packageNameToPath(packageName));
packagesMap.put(packageName, packageNode);
sourcesNode.addChild(packageNode);
}
packageNode.addChild(sourceNode);
}
}
}
return rootNode;
}
/*
* Convert the contents of the codeblocks file named codeblocksFileId
* to blockly format and return the blockly contents.
*/
private String convertCodeblocksToBlockly(String userId, long projectId,
String codeblocksFileId) {
// TODO(sharon): implement this!
return "";
}
@Override
public long addFile(String userId, long projectId, String fileId) {
if (fileId.endsWith(FORM_PROPERTIES_EXTENSION) ||
fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) {
// If the file to be added is a form file or a blocks file, add a new form file, a new
// blocks file, and a new yail file (as a placeholder for later code generation)
String qualifiedFormName = YoungAndroidSourceNode.getQualifiedName(fileId);
String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName);
String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName);
String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName);
List<String> sourceFiles = storageIo.getProjectSourceFiles(userId, projectId);
if (!sourceFiles.contains(formFileName) &&
!sourceFiles.contains(blocklyFileName) &&
!sourceFiles.contains(yailFileName)) {
String formFileContents = getInitialFormPropertiesFileContents(qualifiedFormName);
storageIo.addSourceFilesToProject(userId, projectId, false, formFileName);
storageIo.uploadFileForce(projectId, formFileName, userId, formFileContents,
StorageUtil.DEFAULT_CHARSET);
String blocklyFileContents = getInitialBlocklySourceFileContents(qualifiedFormName);
storageIo.addSourceFilesToProject(userId, projectId, false, blocklyFileName);
storageIo.uploadFileForce(projectId, blocklyFileName, userId, blocklyFileContents,
StorageUtil.DEFAULT_CHARSET);
String yailFileContents = ""; // start empty
storageIo.addSourceFilesToProject(userId, projectId, false, yailFileName);
return storageIo.uploadFileForce(projectId, yailFileName, userId, yailFileContents,
StorageUtil.DEFAULT_CHARSET);
} else {
throw new IllegalStateException("One or more files to be added already exists.");
}
} else {
return super.addFile(userId, projectId, fileId);
}
}
@Override
public long deleteFile(String userId, long projectId, String fileId) {
if (fileId.endsWith(FORM_PROPERTIES_EXTENSION) ||
fileId.endsWith(BLOCKLY_SOURCE_EXTENSION)) {
// If the file to be deleted is a form file or a blocks file, delete both the form file
// and the blocks file. Also, if there was a codeblocks file laying around
// for that same form, delete it too (if it doesn't exist the delete
// for it will be a no-op).
String qualifiedFormName = YoungAndroidSourceNode.getQualifiedName(fileId);
String formFileName = YoungAndroidFormNode.getFormFileId(qualifiedFormName);
String blocklyFileName = YoungAndroidBlocksNode.getBlocklyFileId(qualifiedFormName);
String codeblocksFileName = YoungAndroidBlocksNode.getCodeblocksFileId(qualifiedFormName);
String yailFileName = YoungAndroidYailNode.getYailFileId(qualifiedFormName);
storageIo.deleteFile(userId, projectId, formFileName);
storageIo.deleteFile(userId, projectId, blocklyFileName);
storageIo.deleteFile(userId, projectId, codeblocksFileName);
storageIo.deleteFile(userId, projectId, yailFileName);
storageIo.removeSourceFilesFromProject(userId, projectId, true,
formFileName, blocklyFileName, codeblocksFileName, yailFileName);
return storageIo.getProjectDateModified(userId, projectId);
} else {
return super.deleteFile(userId, projectId, fileId);
}
}
/**
* Make a request to the Build Server to build a project. The Build Server will asynchronously
* post the results of the build via the {@link com.google.appinventor.server.ReceiveBuildServlet}
* A later call will need to be made by the client in order to get those results.
*
* @param user the User that owns the {@code projectId}.
* @param projectId project id to be built
* @param nonce random string used to find resulting APK from unauth context
* @param target build target (optional, implementation dependent)
*
* @return an RpcResult reflecting the call to the Build Server
*/
@Override
public RpcResult build(User user, long projectId, String nonce, String target) {
String userId = user.getUserId();
String projectName = storageIo.getProjectName(userId, projectId);
String outputFileDir = BUILD_FOLDER + '/' + target;
// Store the userId and projectId based on the nonce
storageIo.storeNonce(nonce, userId, projectId);
// Delete the existing build output files, if any, so that future attempts to get it won't get
// old versions.
List<String> buildOutputFiles = storageIo.getProjectOutputFiles(userId, projectId);
for (String buildOutputFile : buildOutputFiles) {
storageIo.deleteFile(userId, projectId, buildOutputFile);
}
URL buildServerUrl = null;
ProjectSourceZip zipFile = null;
try {
buildServerUrl = new URL(getBuildServerUrlStr(
user.getUserEmail(),
userId,
projectId,
outputFileDir));
HttpURLConnection connection = (HttpURLConnection) buildServerUrl.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(connection.getOutputStream());
FileExporter fileExporter = new FileExporterImpl();
zipFile = fileExporter.exportProjectSourceZip(userId, projectId, false,
/* includeAndroidKeystore */ true,
projectName + ".aia", true);
bufferedOutputStream.write(zipFile.getContent());
bufferedOutputStream.flush();
bufferedOutputStream.close();
int responseCode = 0;
responseCode = connection.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
// Put the HTTP response code into the RpcResult so the client code in BuildCommand.java
// can provide an appropriate error message to the user.
// NOTE(lizlooney) - There is some weird bug/problem with HttpURLConnection. When the
// responseCode is 503, connection.getResponseMessage() returns "OK", but it should return
// "Service Unavailable". If I make the request with curl and look at the headers, they
// have the expected error message.
// For now, the moral of the story is: don't use connection.getResponseMessage().
String error = "Build server responded with response code " + responseCode + ".";
try {
String content = readContent(connection.getInputStream());
if (content != null && !content.isEmpty()) {
error += "\n" + content;
}
} catch (IOException e) {
// No content. That's ok.
}
try {
String errorContent = readContent(connection.getErrorStream());
if (errorContent != null && !errorContent.isEmpty()) {
error += "\n" + errorContent;
}
} catch (IOException e) {
// No error content. That's ok.
}
if (responseCode == HttpURLConnection.HTTP_CONFLICT) {
// The build server is not compatible with this App Inventor instance. Log this as severe
// so the owner of the app engine instance will know about it.
LOG.severe(error);
}
return new RpcResult(responseCode, "", StringUtils.escape(error));
}
} catch (MalformedURLException e) {
CrashReport.createAndLogError(LOG, null,
buildErrorMsg("MalformedURLException", buildServerUrl, userId, projectId), e);
return new RpcResult(false, "", e.getMessage());
} catch (IOException e) {
// As of App Engine 1.9.0 we get these when UrlFetch is asked to send too much data
Throwable wrappedException = e;
int zipFileLength = zipFile.getContent().length;
if (zipFileLength >= (5 * 1024 * 1024) /* 5 MB */) {
String lengthMbs = format((zipFileLength * 1.0)/(1024*1024));
wrappedException = new IllegalArgumentException(
"Sorry, can't package projects larger than 5MB."
+ " Yours is " + lengthMbs + "MB.", e);
}
CrashReport.createAndLogError(LOG, null,
buildErrorMsg("IOException", buildServerUrl, userId, projectId), wrappedException);
return new RpcResult(false, "", wrappedException.getMessage());
} catch (EncryptionException e) {
CrashReport.createAndLogError(LOG, null,
buildErrorMsg("EncryptionException", buildServerUrl, userId, projectId), e);
return new RpcResult(false, "", e.getMessage());
} catch (RuntimeException e) {
// In particular, we often see RequestTooLargeException (if the zip is too
// big) and ApiProxyException. There may be others.
Throwable wrappedException = e;
if (e instanceof ApiProxy.RequestTooLargeException && zipFile != null) {
int zipFileLength = zipFile.getContent().length;
if (zipFileLength >= (5 * 1024 * 1024) /* 5 MB */) {
String lengthMbs = format((zipFileLength * 1.0)/(1024*1024));
wrappedException = new IllegalArgumentException(
"Sorry, can't package projects larger than 5MB."
+ " Yours is " + lengthMbs + "MB.", e);
} else {
wrappedException = new IllegalArgumentException(
"Sorry, project was too large to package (" + zipFileLength + " bytes)");
}
}
CrashReport.createAndLogError(LOG, null,
buildErrorMsg("RuntimeException", buildServerUrl, userId, projectId), wrappedException);
return new RpcResult(false, "", wrappedException.getMessage());
}
return new RpcResult(true, "Building " + projectName, "");
}
private String buildErrorMsg(String exceptionName, URL buildURL, String userId, long projectId) {
return "Request to build failed with " + exceptionName + ", user=" + userId
+ ", project=" + projectId + ", build URL is " + buildURL
+ " [" + buildURL.toString().length() + "]";
}
// Note that this is a function rather than just a constant because we assume it will get
// a little more complicated when we want to get the URL from an App Engine config file or
// command line argument.
private String getBuildServerUrlStr(String userName, String userId,
long projectId, String fileName)
throws UnsupportedEncodingException, EncryptionException {
return "http://" + buildServerHost.get() + "/buildserver/build-all-from-zip-async"
+ "?uname=" + URLEncoder.encode(userName, "UTF-8")
+ (sendGitVersion.get()
? "&gitBuildVersion="
+ URLEncoder.encode(GitBuildId.getVersion(), "UTF-8")
: "")
+ "&callback="
+ URLEncoder.encode("http://" + getCurrentHost() + ServerLayout.ODE_BASEURL_NOAUTH
+ ServerLayout.RECEIVE_BUILD_SERVLET + "/"
+ Security.encryptUserAndProjectId(userId, projectId)
+ "/" + fileName,
"UTF-8");
}
private String getCurrentHost() {
if (Server.isProductionServer()) {
if (appengineHost.get()=="") {
String applicationVersionId = SystemProperty.applicationVersion.get();
String applicationId = SystemProperty.applicationId.get();
return applicationVersionId + "." + applicationId + ".appspot.com";
} else {
return appengineHost.get();
}
} else {
// TODO(user): Figure out how to make this more generic
return "localhost:8888";
}
}
/*
* Reads the UTF-8 content from the given input stream.
*/
private static String readContent(InputStream stream) throws IOException {
if (stream != null) {
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
try {
return CharStreams.toString(reader);
} finally {
reader.close();
}
}
return null;
}
/**
* Check if there are any build results available for the given user's project
*
* @param user the User that owns the {@code projectId}.
* @param projectId project id to be built
* @param target build target (optional, implementation dependent)
* @return an RpcResult reflecting the call to the Build Server. The following values may be in
* RpcResult.result:
* 0: Build is done and was successful
* 1: Build is done and was unsuccessful
* 2: Yail generation failed
* -1: Build is not yet done.
*/
@Override
public RpcResult getBuildResult(User user, long projectId, String target) {
String userId = user.getUserId();
String buildOutputFileName = BUILD_FOLDER + '/' + target + '/' + "build.out";
List<String> outputFiles = storageIo.getProjectOutputFiles(userId, projectId);
updateCurrentProgress(user, projectId, target);
RpcResult buildResult = new RpcResult(-1, ""+currentProgress, ""); // Build not finished
for (String outputFile : outputFiles) {
if (buildOutputFileName.equals(outputFile)) {
String outputStr = storageIo.downloadFile(userId, projectId, outputFile, "UTF-8");
try {
JSONObject buildResultJsonObj = new JSONObject(outputStr);
buildResult = new RpcResult(buildResultJsonObj.getInt("result"),
buildResultJsonObj.getString("output"),
buildResultJsonObj.getString("error"),
outputStr);
} catch (JSONException e) {
buildResult = new RpcResult(1, "", "");
}
break;
}
}
return buildResult;
}
/**
* Check if there are any build progress available for the given user's project
*
* @param user the User that owns the {@code projectId}.
* @param projectId project id to be built
* @param target build target (optional, implementation dependent)
*/
public void updateCurrentProgress(User user, long projectId, String target) {
try {
String userId = user.getUserId();
String projectName = storageIo.getProjectName(userId, projectId);
String outputFileDir = BUILD_FOLDER + '/' + target;
URL buildServerUrl = null;
ProjectSourceZip zipFile = null;
buildServerUrl = new URL(getBuildServerUrlStr(user.getUserEmail(),
userId, projectId, outputFileDir));
HttpURLConnection connection = (HttpURLConnection) buildServerUrl.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
try {
String content = readContent(connection.getInputStream());
if (content != null && !content.isEmpty()) {
LOG.info("The current progress is " + content + "%.");
currentProgress = Integer.parseInt(content);
}
} catch (IOException e) {
// No content. That's ok.
}
}
} catch (MalformedURLException e) {
// that's ok, nothing to do
} catch (IOException e) {
// that's ok, nothing to do
} catch (EncryptionException e) {
// that's ok, nothing to do
} catch (RuntimeException e) {
// that's ok, nothing to do
}
}
// Nicely format floating number using only two decimal places
private String format(double input) {
DecimalFormat formatter = new DecimalFormat("###.##");
return formatter.format(input);
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package com.pentaho.big.data.bundles.impl.shim.hbase.table;
import com.pentaho.big.data.bundles.impl.shim.hbase.HBaseConnectionWrapper;
import com.pentaho.big.data.bundles.impl.shim.hbase.connectionPool.HBaseConnectionHandle;
import com.pentaho.big.data.bundles.impl.shim.hbase.connectionPool.HBaseConnectionPool;
import com.pentaho.big.data.bundles.impl.shim.hbase.meta.HBaseValueMetaInterfaceFactoryImpl;
import org.junit.Before;
import org.junit.Test;
import org.mockito.stubbing.OngoingStubbing;
import org.pentaho.bigdata.api.hbase.mapping.Mapping;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.hbase.shim.spi.HBaseBytesUtilShim;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
/**
* Created by bryan on 2/26/16.
*/
public class HBaseTableImplTest {
private final byte[] testBytes = "testbytes".getBytes( Charset.forName( "UTF-8" ) );
private HBaseConnectionPool hBaseConnectionPool;
private HBaseValueMetaInterfaceFactoryImpl
hBaseValueMetaInterfaceFactory;
private HBaseBytesUtilShim hBaseBytesUtilShim;
private String testName;
private HBaseTableImpl hBaseTable;
private final IOERunnable existsRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.exists();
}
};
private final IOERunnable disabledRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.disabled();
}
};
private final IOERunnable availableRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.available();
}
};
private final IOERunnable disableRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.disable();
}
};
private final IOERunnable enableRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.enable();
}
};
private final IOERunnable deleteRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.delete();
}
};
private final IOERunnable createRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.create( new ArrayList<String>(), new Properties() );
}
};
private final IOERunnable getColumnFamiliesRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.getColumnFamilies();
}
};
private final IOERunnable keyExistsRunnable = new IOERunnable() {
@Override public void run() throws IOException {
hBaseTable.keyExists( testBytes );
}
};
private HBaseConnectionHandle hBaseConnectionHandle;
private HBaseConnectionHandle hBaseConnectionHandleNamed;
private HBaseConnectionWrapper hBaseConnectionWrapper;
private Mapping tableMapping;
private LogChannelInterface logChannelInterface;
private VariableSpace variableSpace;
@Before
public void setup() {
hBaseConnectionPool = mock( HBaseConnectionPool.class );
hBaseValueMetaInterfaceFactory = mock( HBaseValueMetaInterfaceFactoryImpl.class );
hBaseBytesUtilShim = mock( HBaseBytesUtilShim.class );
testName = "testName";
hBaseTable =
new HBaseTableImpl( hBaseConnectionPool, hBaseValueMetaInterfaceFactory, hBaseBytesUtilShim, testName );
hBaseConnectionHandle = mock( HBaseConnectionHandle.class );
hBaseConnectionHandleNamed = mock( HBaseConnectionHandle.class );
hBaseConnectionWrapper = mock( HBaseConnectionWrapper.class );
tableMapping = mock( Mapping.class );
logChannelInterface = mock( LogChannelInterface.class );
variableSpace = mock( VariableSpace.class );
when( hBaseConnectionHandle.getConnection() ).thenReturn( hBaseConnectionWrapper );
when( hBaseConnectionHandleNamed.getConnection() ).thenReturn( hBaseConnectionWrapper );
}
@Test
public void testExistsSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
when( hBaseConnectionWrapper.tableExists( testName ) ).thenReturn( true ).thenReturn( false );
assertTrue( hBaseTable.exists() );
assertFalse( hBaseTable.exists() );
}
@Test( expected = IOException.class )
public void testExistsErrorGettingHandle() throws IOException {
testIOEGettingHandle( existsRunnable );
}
@Test( expected = IOException.class )
public void testExistsErrorClosingHandle() throws IOException {
testIOEClosingHandle( existsRunnable );
}
@Test( expected = IOException.class )
public void testExistsHandleClosedWhenException() throws Exception {
testEStillClosesHandle( when( hBaseConnectionWrapper.tableExists( testName ) ), existsRunnable );
}
@Test
public void testDisabledSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
when( hBaseConnectionWrapper.isTableDisabled( testName ) ).thenReturn( true ).thenReturn( false );
assertTrue( hBaseTable.disabled() );
assertFalse( hBaseTable.disabled() );
}
@Test( expected = IOException.class )
public void testDisabledErrorGettingHandle() throws IOException {
testIOEGettingHandle( disabledRunnable );
}
@Test( expected = IOException.class )
public void testDisabledErrorClosingHandle() throws IOException {
testIOEClosingHandle( disabledRunnable );
}
@Test( expected = IOException.class )
public void testDisabledHandleClosedWhenException() throws Exception {
testEStillClosesHandle( when( hBaseConnectionWrapper.isTableDisabled( testName ) ), disabledRunnable );
}
@Test
public void testAvailableSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
when( hBaseConnectionWrapper.isTableAvailable( testName ) ).thenReturn( true ).thenReturn( false );
assertTrue( hBaseTable.available() );
assertFalse( hBaseTable.available() );
}
@Test( expected = IOException.class )
public void testAvailableErrorGettingHandle() throws IOException {
testIOEGettingHandle( availableRunnable );
}
@Test( expected = IOException.class )
public void testAvailableErrorClosingHandle() throws IOException {
testIOEClosingHandle( availableRunnable );
}
@Test( expected = IOException.class )
public void testAvailableHandleClosedWhenException() throws Exception {
testEStillClosesHandle( when( hBaseConnectionWrapper.isTableAvailable( testName ) ), availableRunnable );
}
@Test
public void testDisableSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
hBaseTable.disable();
verify( hBaseConnectionWrapper ).disableTable( testName );
}
@Test( expected = IOException.class )
public void testDisableErrorGettingHandle() throws IOException {
testIOEGettingHandle( disableRunnable );
}
@Test( expected = IOException.class )
public void testDisableErrorClosingHandle() throws IOException {
testIOEClosingHandle( disableRunnable );
}
@Test( expected = IOException.class )
public void testDisableHandleClosedWhenException() throws Exception {
StubberReturn stubberReturn = testEStillClosesHandle( disableRunnable );
doThrow( stubberReturn.exception ).when( hBaseConnectionWrapper ).disableTable( testName );
stubberReturn.ioeRunnable.run();
}
@Test
public void testEnableSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
hBaseTable.enable();
verify( hBaseConnectionWrapper ).enableTable( testName );
}
@Test( expected = IOException.class )
public void testEnableErrorGettingHandle() throws IOException {
testIOEGettingHandle( enableRunnable );
}
@Test( expected = IOException.class )
public void testEnableErrorClosingHandle() throws IOException {
testIOEClosingHandle( enableRunnable );
}
@Test( expected = IOException.class )
public void testEnableHandleClosedWhenException() throws Exception {
StubberReturn stubberReturn = testEStillClosesHandle( enableRunnable );
doThrow( stubberReturn.exception ).when( hBaseConnectionWrapper ).enableTable( testName );
stubberReturn.ioeRunnable.run();
}
@Test
public void testDeleteSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
hBaseTable.delete();
verify( hBaseConnectionWrapper ).deleteTable( testName );
}
@Test( expected = IOException.class )
public void testDeleteErrorGettingHandle() throws IOException {
testIOEGettingHandle( deleteRunnable );
}
@Test( expected = IOException.class )
public void testDeleteErrorClosingHandle() throws IOException {
testIOEClosingHandle( deleteRunnable );
}
@Test( expected = IOException.class )
public void testDeleteHandleClosedWhenException() throws Exception {
StubberReturn stubberReturn = testEStillClosesHandle( deleteRunnable );
doThrow( stubberReturn.exception ).when( hBaseConnectionWrapper ).deleteTable( testName );
stubberReturn.ioeRunnable.run();
}
@Test
public void testCreateSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
List list = mock( List.class );
Properties props = mock( Properties.class );
hBaseTable.create( list, props );
verify( hBaseConnectionWrapper ).createTable( testName, list, props );
}
@Test( expected = IOException.class )
public void testCreateErrorGettingHandle() throws IOException {
testIOEGettingHandle( createRunnable );
}
@Test( expected = IOException.class )
public void testCreateErrorClosingHandle() throws IOException {
testIOEClosingHandle( createRunnable );
}
@Test( expected = IOException.class )
public void testCreateHandleClosedWhenException() throws Exception {
StubberReturn stubberReturn = testEStillClosesHandle( createRunnable );
doThrow( stubberReturn.exception ).when( hBaseConnectionWrapper )
.createTable( eq( testName ), eq( new ArrayList<String>() ), eq( new Properties() ) );
stubberReturn.ioeRunnable.run();
}
@Test
public void testGetColumnFamiliesSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
List families = mock( List.class );
when( hBaseConnectionWrapper.getTableFamiles( testName ) ).thenReturn( families );
assertEquals( families, hBaseTable.getColumnFamilies() );
}
@Test( expected = IOException.class )
public void testGetColumnFamiliesErrorGettingHandle() throws IOException {
testIOEGettingHandle( getColumnFamiliesRunnable );
}
@Test( expected = IOException.class )
public void testGetColumnFamiliesErrorClosingHandle() throws IOException {
testIOEClosingHandle( getColumnFamiliesRunnable );
}
@Test( expected = IOException.class )
public void testGetColumnFamiliesHandleClosedWhenException() throws Exception {
testEStillClosesHandle( when( hBaseConnectionWrapper.getTableFamiles( testName ) ), getColumnFamiliesRunnable );
}
@Test
public void testKeyExistsSuccess() throws Exception {
when( hBaseConnectionPool.getConnectionHandle( testName ) ).thenReturn( hBaseConnectionHandle );
when( hBaseConnectionWrapper.sourceTableRowExists( testBytes ) ).thenReturn( true ).thenReturn( false );
assertTrue( hBaseTable.keyExists( testBytes ) );
assertFalse( hBaseTable.keyExists( testBytes ) );
}
@Test( expected = IOException.class )
public void testKeyExistsErrorGettingHandle() throws IOException {
testIOEGettingHandle( keyExistsRunnable, testName );
}
@Test( expected = IOException.class )
public void testKeyExistsErrorClosingHandle() throws IOException {
testIOEClosingHandle( keyExistsRunnable, testName );
}
@Test( expected = IOException.class )
public void testKeyExistsHandleClosedWhenException() throws Exception {
testEStillClosesHandle( testName, when( hBaseConnectionWrapper.sourceTableRowExists( testBytes ) ),
keyExistsRunnable );
}
@Test
public void testCreateWriteOperationManagerSuccessNullBufferSize() throws IOException {
assertNotNull( hBaseTable.createWriteOperationManager( null ) );
verify( hBaseConnectionPool ).getConnectionHandle( eq( testName ), eq( new Properties() ) );
}
@Test
public void testCreateWriteOperationManagerSuccessNotNullBufferSize() throws IOException {
Long writeBufferSize = 10L;
assertNotNull( hBaseTable.createWriteOperationManager( writeBufferSize ) );
Properties properties = new Properties();
properties.setProperty( org.pentaho.hbase.shim.spi.HBaseConnection.HTABLE_WRITE_BUFFER_SIZE_KEY,
writeBufferSize.toString() );
verify( hBaseConnectionPool ).getConnectionHandle( eq( testName ), eq( properties ) );
}
@Test
public void testClose() throws IOException {
hBaseTable.close();
verifyNoMoreInteractions( hBaseConnectionPool, hBaseValueMetaInterfaceFactory, hBaseBytesUtilShim );
}
@Test
public void testCreateScannerBuilderKeyBounds() {
assertTrue( hBaseTable.createScannerBuilder( new byte[] {}, new byte[] {} ) instanceof ResultScannerBuilderImpl );
}
@Test
public void testCreateScannerBuilderFullArgsEmptyKeyStart() throws KettleException {
when( tableMapping.getKeyType() ).thenReturn( Mapping.KeyType.LONG );
String scannerCacheSizeVar = "scannerCacheSizeVar";
String scannerCacheSize = "100";
when( variableSpace.environmentSubstitute( scannerCacheSizeVar ) ).thenReturn( scannerCacheSize );
assertNotNull( hBaseTable.createScannerBuilder( tableMapping, "testConvMask", null, null, scannerCacheSizeVar, logChannelInterface, variableSpace ) );
}
private void testIOEGettingHandle( IOERunnable runnable ) throws IOException {
testIOEGettingHandle( runnable, when( hBaseConnectionPool.getConnectionHandle() ) );
}
private void testIOEGettingHandle( IOERunnable runnable, String name ) throws IOException {
testIOEGettingHandle( runnable, when( hBaseConnectionPool.getConnectionHandle( name ) ) );
}
private void testIOEGettingHandle( IOERunnable runnable, OngoingStubbing<HBaseConnectionHandle> ongoingStubbing )
throws IOException {
IOException ioException = new IOException();
ongoingStubbing.thenThrow( ioException );
try {
runnable.run();
} catch ( IOException e ) {
assertEquals( ioException, e.getCause() );
throw e;
}
}
private void testIOEClosingHandle( IOERunnable runnable ) throws IOException {
testIOEClosingHandle( runnable, when( hBaseConnectionPool.getConnectionHandle() ) );
}
private void testIOEClosingHandle( IOERunnable runnable, String name ) throws IOException {
testIOEClosingHandle( runnable, when( hBaseConnectionPool.getConnectionHandle( name ) ) );
}
private void testIOEClosingHandle( IOERunnable runnable, OngoingStubbing<HBaseConnectionHandle> ongoingStubbing )
throws IOException {
IOException ioException = new IOException();
ongoingStubbing.thenReturn( hBaseConnectionHandle );
doThrow( ioException ).when( hBaseConnectionHandle ).close();
try {
runnable.run();
} catch ( IOException e ) {
assertEquals( ioException, e.getCause() );
throw e;
}
}
private void testEStillClosesHandle( OngoingStubbing<?> ongoingStubbing, IOERunnable runnable ) throws IOException {
Exception exception = new Exception();
ongoingStubbing.thenThrow( exception );
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
new EStillClosesRunnable( runnable, exception ).run();
}
private void testEStillClosesHandle( String name, OngoingStubbing<?> ongoingStubbing, IOERunnable runnable )
throws IOException {
Exception exception = new Exception();
ongoingStubbing.thenThrow( exception );
when( hBaseConnectionPool.getConnectionHandle( name ) ).thenReturn( hBaseConnectionHandle );
new EStillClosesRunnable( runnable, exception ).run();
}
private StubberReturn testEStillClosesHandle( final IOERunnable runnable ) throws IOException {
final Exception exception = new Exception();
when( hBaseConnectionPool.getConnectionHandle() ).thenReturn( hBaseConnectionHandle );
return new StubberReturn( exception, new EStillClosesRunnable( runnable, exception ) );
}
private interface IOERunnable {
void run() throws IOException;
}
private class StubberReturn {
private final Exception exception;
private final IOERunnable ioeRunnable;
private StubberReturn( Exception exception, IOERunnable ioeRunnable ) {
this.exception = exception;
this.ioeRunnable = ioeRunnable;
}
}
private class EStillClosesRunnable implements IOERunnable {
private final IOERunnable runnable;
private final Exception exception;
private EStillClosesRunnable( IOERunnable runnable, Exception exception ) {
this.runnable = runnable;
this.exception = exception;
}
@Override public void run() throws IOException {
try {
runnable.run();
} catch ( IOException e ) {
assertEquals( exception, e.getCause() );
verify( hBaseConnectionHandle ).close();
throw e;
}
}
}
}
| |
/**
* Copyright (C) 2012 KRM Associates, Inc. healtheme@krminc.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.krminc.phr.api.converter.clinical;
import com.krminc.phr.api.converter.UriResolver;
import com.krminc.phr.domain.clinical.Resource;
import com.krminc.phr.domain.clinical.ResourceCredential;
import java.net.URI;
import java.util.Date;
import java.util.List;
import javax.persistence.EntityManager;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlAttribute;
import javax.ws.rs.core.UriBuilder;
/**
*
* @author dshaw
*/
@XmlRootElement(name = "resource")
public class ResourceConverter {
private Resource entity;
private URI uri;
private int expandLevel;
/** Creates a new instance of ResourceConverter */
public ResourceConverter() {
entity = new Resource();
}
/**
* Creates a new instance of ResourceConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded@param isUriExtendable indicates whether the uri can be extended
*/
public ResourceConverter(Resource entity, URI uri, int expandLevel, boolean isUriExtendable) {
this.entity = entity;
this.uri = (isUriExtendable) ? UriBuilder.fromUri(uri).path(entity.getId() + "/").build() : uri;
this.expandLevel = expandLevel;
// getResourceCredentials();
}
/**
* Creates a new instance of ResourceConverter.
*
* @param entity associated entity
* @param uri associated uri
* @param expandLevel indicates the number of levels the entity graph should be expanded
*/
public ResourceConverter(Resource entity, URI uri, int expandLevel) {
this(entity, uri, expandLevel, false);
}
/**
* Getter for id.
*
* @return value for id
*/
@XmlElement
public Long getId() {
return (expandLevel > 0) ? entity.getId() : null;
}
/**
* Setter for id.
*
* @param value the value to set
*/
public void setId(Long value) {
entity.setId(value);
}
/**
* Getter for active.
*
* @return value for active
*/
@XmlElement
public Boolean getActive() {
return (expandLevel > 0) ? entity.getActive() : null;
}
/**
* Setter for active.
*
* @param value the value to set
*/
public void setActive(Boolean value) {
entity.setActive(value);
}
/**
* Getter for type.
*
* @return value for type
*/
@XmlElement
public String getType() {
return (expandLevel > 0) ? entity.getType() : null;
}
/**
* Setter for type.
*
* @param value the value to set
*/
public void setType(String value) {
entity.setType(value);
}
/**
* Getter for displayName.
*
* @return value for displayName
*/
@XmlElement
public String getDisplayName() {
return (expandLevel > 0) ? entity.getDisplayName() : null;
}
/**
* Setter for displayName.
*
* @param value the value to set
*/
public void setDisplayName(String value) {
entity.setDisplayName(value);
}
/**
* Getter for resourceLocationPath.
*
* @return value for resourceLocationPath
*/
@XmlElement
public String getResourceLocationPath() {
return (expandLevel > 0) ? entity.getResourceLocationPath() : null;
}
/**
* Setter for resourceLocationPath.
*
* @param value the value to set
*/
public void setResourceLocationPath(String value) {
entity.setResourceLocationPath(value);
}
/**
* Getter for resourceNamespace.
*
* @return value for resourceNamespace
*/
@XmlElement
public String getResourceNamespace() {
return (expandLevel > 0) ? entity.getResourceNamespace() : null;
}
/**
* Setter for resourceNamespace.
*
* @param value the value to set
*/
public void setResourceNamespace(String value) {
entity.setResourceNamespace(value);
}
/**
* Getter for resourceLocalPart.
*
* @return value for resourceLocalPart
*/
@XmlElement
public String getResourceLocalPart() {
return (expandLevel > 0) ? entity.getResourceLocalPart() : null;
}
/**
* Setter for resourceLocalPart.
*
* @param value the value to set
*/
public void setResourceLocalPart(String value) {
entity.setResourceLocalPart(value);
}
/**
* Getter for createDate.
*
* @return value for createDate
*/
@XmlElement
public Date getCreateDate() {
return (expandLevel > 0) ? entity.getCreateDate() : null;
}
/**
* Setter for createDate.
*
* @param value the value to set
*/
public void setCreateDate(Date value) {
entity.setCreateDate(value);
}
/**
* Getter for lastUpdate.
*
* @return value for lastUpdate
*/
@XmlElement
public Date getLastUpdate() {
return (expandLevel > 0) ? entity.getLastUpdate() : null;
}
/**
* Setter for lastUpdate.
*
* @param value the value to set
*/
public void setLastUpdate(Date value) {
entity.setLastUpdate(value);
}
/**
* Getter for resourceCredentials.
*
* @return value for resourceCredentials
*/
// @XmlElement
// public ResourceCredentialsConverter getResourceCredentials() {
// if (expandLevel > 0) {
// if (entity.getResourceCredentials() != null) {
// return new ResourceCredentialsConverter(entity.getResourceCredentials(), uri.resolve("resourceCredentials/"), expandLevel - 1);
// }
// }
// return null;
// }
/**
* Setter for resourceCredentials.
*
* @param value the value to set
*/
// public void setResourceCredentials(ResourceCredentialsConverter value) {
// entity.setResourceCredentials((value != null) ? new java.util.ArrayList<ResourceCredential>(value.getEntities()) : null);
// }
/**
* Returns the URI associated with this converter.
*
* @return the uri
*/
@XmlAttribute
public URI getUri() {
return uri;
}
/**
* Sets the URI for this reference converter.
*
*/
public void setUri(URI uri) {
this.uri = uri;
}
/**
* Returns the Resource entity.
*
* @return an entity
*/
@XmlTransient
public Resource getEntity() {
if (entity.getId() == null) {
ResourceConverter converter = UriResolver.getInstance().resolve(ResourceConverter.class, uri);
if (converter != null) {
entity = converter.getEntity();
}
}
return entity;
}
/**
* Returns the resolved Resource entity.
*
* @return an resolved entity
*/
public Resource resolveEntity(EntityManager em) {
List<ResourceCredential> resourceCredentials = entity.getResourceCredentials();
List<ResourceCredential> newresourceCredentials = new java.util.ArrayList<ResourceCredential>();
for (ResourceCredential item : resourceCredentials) {
newresourceCredentials.add(em.getReference(ResourceCredential.class, item.getId()));
}
entity.setResourceCredentials(newresourceCredentials);
return entity;
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Traps;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Trap_CrushingRoom extends StdTrap
{
@Override
public String ID()
{
return "Trap_CrushingRoom";
}
private final static String localizedName = CMLib.lang().L("crushing room");
@Override
public String name()
{
return localizedName;
}
@Override
protected int canAffectCode()
{
return Ability.CAN_ROOMS;
}
@Override
protected int canTargetCode()
{
return 0;
}
@Override
protected int trapLevel()
{
return 24;
}
@Override
public String requiresToSet()
{
return "100 pounds of stone";
}
@Override
public int baseRejuvTime(int level)
{
return 16;
}
@Override
public Trap setTrap(MOB mob, Physical P, int trapBonus, int qualifyingClassLevel, boolean perm)
{
if(P==null)
return null;
if(mob!=null)
{
final Item I=findMostOfMaterial(mob.location(),RawMaterial.MATERIAL_ROCK);
if(I!=null)
super.destroyResources(mob.location(),I.material(),100);
}
return super.setTrap(mob,P,trapBonus,qualifyingClassLevel,perm);
}
@Override
public List<Item> getTrapComponents()
{
final List<Item> V=new Vector<Item>();
for(int i=0;i<100;i++)
V.add(CMLib.materials().makeItemResource(RawMaterial.RESOURCE_STONE));
return V;
}
@Override
public boolean canSetTrapOn(MOB mob, Physical P)
{
if(!super.canSetTrapOn(mob,P))
return false;
if(mob!=null)
{
final Item I=findMostOfMaterial(mob.location(),RawMaterial.MATERIAL_ROCK);
if((I==null)
||(super.findNumberOfResource(mob.location(),I.material())<100))
{
mob.tell(L("You'll need to set down at least 100 pounds of stone first."));
return false;
}
}
if(P instanceof Room)
{
final Room R=(Room)P;
if((R.domainType()&Room.INDOORS)==0)
{
if(mob!=null)
mob.tell(L("You can only set this trap indoors."));
return false;
}
}
return true;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if((sprung)
&&(affected!=null)
&&(!disabled())
&&(tickDown>=0))
{
if(((msg.targetMinor()==CMMsg.TYP_LEAVE)
||(msg.targetMinor()==CMMsg.TYP_FLEE))
&&(msg.amITarget(affected)))
{
msg.source().tell(L("The exits are blocked! You can't get out!"));
return false;
}
else
if((msg.targetMinor()==CMMsg.TYP_ENTER)
&&(msg.amITarget(affected)))
{
msg.source().tell(L("The entry to that room is blocked!"));
return false;
}
}
return super.okMessage(myHost,msg);
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if((tickID==Tickable.TICKID_TRAP_RESET)&&(getReset()>0))
{
if((sprung)
&&(affected instanceof Room)
&&(!disabled())
&&(tickDown>=0))
{
final Room R=(Room)affected;
if(tickDown>13)
R.showHappens(CMMsg.MSG_OK_VISUAL,L("The walls start closing in around you!"));
else
if(tickDown>4)
{
for(int i=0;i<R.numInhabitants();i++)
{
final MOB M=R.fetchInhabitant(i);
if((M!=null)&&(M!=invoker()))
if(invoker().mayIFight(M))
{
final int damage=CMLib.dice().roll(trapLevel()+abilityCode(),30,1);
CMLib.combat().postDamage(invoker(),M,this,damage,CMMsg.MASK_MALICIOUS|CMMsg.MASK_ALWAYS|CMMsg.TYP_JUSTICE,Weapon.TYPE_BASHING,L("The crushing walls <DAMAGE> <T-NAME>!"));
}
}
}
else
{
R.showHappens(CMMsg.MSG_OK_VISUAL,L("The walls begin retracting..."));
}
}
}
return super.tick(ticking,tickID);
}
@Override
public void spring(MOB target)
{
if((target!=invoker())&&(target.location()!=null))
{
if((doesSaveVsTraps(target))
||(invoker().getGroupMembers(new HashSet<MOB>()).contains(target)))
target.location().show(target,null,null,CMMsg.MASK_ALWAYS|CMMsg.MSG_NOISE,L("<S-NAME> avoid(s) setting off a trap!"));
else
if(target.location().show(target,target,this,CMMsg.MASK_ALWAYS|CMMsg.MSG_NOISE,L("<S-NAME> trigger(s) a trap!")))
{
super.spring(target);
target.location().showHappens(CMMsg.MSG_OK_VISUAL,L("The exits are blocked off! The walls start closing in!"));
}
}
}
}
| |
package org.umlg.sqlg.test.gremlincompile;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.DefaultGraphTraversal;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.umlg.sqlg.structure.SqlgGraph;
import org.umlg.sqlg.test.BaseTest;
import java.util.List;
/**
* Date: 2015/01/01
* Time: 4:38 PM
*/
public class TestGremlinCompileV extends BaseTest {
@BeforeClass
public static void beforeClass() {
BaseTest.beforeClass();
if (isPostgres()) {
configuration.addProperty("distributed", true);
}
}
@Test
public void testSimpleOutOut() throws InterruptedException {
Vertex a = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b = this.sqlgGraph.addVertex(T.label, "B", "name", "b");
a.addEdge("ab", b);
this.sqlgGraph.tx().commit();
testSimpleOutOut_assert(this.sqlgGraph, a);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testSimpleOutOut_assert(this.sqlgGraph1, a);
}
}
private void testSimpleOutOut_assert(SqlgGraph sqlgGraph, Vertex a) {
DefaultGraphTraversal<Vertex, Vertex> traversal = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a).out();
Assert.assertEquals(2, traversal.getSteps().size());
List<Vertex> vertices = traversal.toList();
Assert.assertEquals(1, traversal.getSteps().size());
Assert.assertEquals(1, vertices.size());
}
@Test
public void testOutOut() throws InterruptedException {
Vertex a = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b = this.sqlgGraph.addVertex(T.label, "B", "name", "b");
Vertex c = this.sqlgGraph.addVertex(T.label, "C", "nAmE", "c");
Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d1");
Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d2");
Vertex e = this.sqlgGraph.addVertex(T.label, "E", "NAME", "e");
a.addEdge("outB", b);
a.addEdge("outE", e);
b.addEdge("outC", c);
b.addEdge("outC", c);
b.addEdge("outD", d1);
b.addEdge("outD", d2);
this.sqlgGraph.tx().commit();
tetOutOut_assert(this.sqlgGraph, a, c, d1, d2);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
tetOutOut_assert(this.sqlgGraph1, a, c, d1, d2);
}
}
private void tetOutOut_assert(SqlgGraph sqlgGraph, Vertex a, Vertex c, Vertex d1, Vertex d2) {
DefaultGraphTraversal<Vertex, Vertex> traversal = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a).out().out();
Assert.assertEquals(3, traversal.getSteps().size());
List<Vertex> vertices = traversal.toList();
Assert.assertEquals(1, traversal.getSteps().size());
Assert.assertEquals(4, vertices.size());
Assert.assertTrue(vertices.contains(c));
Assert.assertTrue(vertices.contains(d1));
Assert.assertTrue(vertices.contains(d2));
int count = 0;
for (Vertex vertex : vertices) {
if (vertex.equals(c)) {
count++;
}
}
Assert.assertEquals(2, count);
Assert.assertEquals("c", vertices.get(vertices.indexOf(c)).value("nAmE"));
Assert.assertEquals("d1", vertices.get(vertices.indexOf(d1)).value("NAME"));
Assert.assertEquals("d2", vertices.get(vertices.indexOf(d2)).value("NAME"));
}
@Test
public void testOutOutWithLabels() throws InterruptedException {
Vertex a = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b = this.sqlgGraph.addVertex(T.label, "B", "name", "b");
Vertex c = this.sqlgGraph.addVertex(T.label, "C", "nAmE", "c");
Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d1");
Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d2");
Vertex e = this.sqlgGraph.addVertex(T.label, "E", "NAME", "e");
a.addEdge("outB", b);
a.addEdge("outE", e);
b.addEdge("outC", c);
b.addEdge("outC", c);
b.addEdge("outD", d1);
b.addEdge("outD", d2);
this.sqlgGraph.tx().commit();
testOutOutWithLabels_assert(this.sqlgGraph, a, c, d1, d2);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testOutOutWithLabels_assert(this.sqlgGraph1, a, c, d1, d2);
}
}
private void testOutOutWithLabels_assert(SqlgGraph sqlgGraph, Vertex a, Vertex c, Vertex d1, Vertex d2) {
DefaultGraphTraversal<Vertex, Vertex> traversal = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a)
.out("outB", "outE").out("outC", "outD");
Assert.assertEquals(3, traversal.getSteps().size());
List<Vertex> vertices = traversal.toList();
Assert.assertEquals(1, traversal.getSteps().size());
Assert.assertEquals(4, vertices.size());
Assert.assertTrue(vertices.contains(c));
Assert.assertTrue(vertices.contains(d1));
Assert.assertTrue(vertices.contains(d2));
int count = 0;
for (Vertex vertex : vertices) {
if (vertex.equals(c)) {
count++;
}
}
Assert.assertEquals(2, count);
Assert.assertEquals("c", vertices.get(vertices.indexOf(c)).value("nAmE"));
Assert.assertEquals("d1", vertices.get(vertices.indexOf(d1)).value("NAME"));
Assert.assertEquals("d2", vertices.get(vertices.indexOf(d2)).value("NAME"));
}
@Test
public void testOutOutWithLabels2() throws InterruptedException {
Vertex a = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b = this.sqlgGraph.addVertex(T.label, "B", "name", "b");
Vertex c = this.sqlgGraph.addVertex(T.label, "C", "nAmE", "c");
Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d1");
Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d2");
Vertex e = this.sqlgGraph.addVertex(T.label, "E", "NAME", "e");
a.addEdge("outB", b);
a.addEdge("outE", e);
b.addEdge("outC", c);
b.addEdge("outC", c);
b.addEdge("outD", d1);
b.addEdge("outD", d2);
this.sqlgGraph.tx().commit();
testOutOutWithLabels2_assert(this.sqlgGraph, a, c);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testOutOutWithLabels2_assert(this.sqlgGraph1, a, c);
}
}
private void testOutOutWithLabels2_assert(SqlgGraph sqlgGraph, Vertex a, Vertex c) {
DefaultGraphTraversal<Vertex, Vertex> traversal = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a).out("outB").out("outC");
Assert.assertEquals(3, traversal.getSteps().size());
List<Vertex> vertices = traversal.toList();
Assert.assertEquals(1, traversal.getSteps().size());
Assert.assertEquals(2, vertices.size());
Assert.assertTrue(vertices.contains(c));
int count = 0;
for (Vertex vertex : vertices) {
if (vertex.equals(c)) {
count++;
}
}
Assert.assertEquals(2, count);
Assert.assertEquals("c", vertices.get(vertices.indexOf(c)).value("nAmE"));
}
@Test
public void testInIn() throws InterruptedException {
Vertex a = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b = this.sqlgGraph.addVertex(T.label, "B", "name", "b");
Vertex c = this.sqlgGraph.addVertex(T.label, "C", "nAmE", "c");
Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d1");
Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "NAME", "d2");
Vertex e = this.sqlgGraph.addVertex(T.label, "E", "NAME", "e");
a.addEdge("outB", b);
a.addEdge("outE", e);
b.addEdge("outC", c);
b.addEdge("outC", c);
b.addEdge("outD", d1);
b.addEdge("outD", d2);
this.sqlgGraph.tx().commit();
testInIn_assert(this.sqlgGraph, a, d1);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testInIn_assert(this.sqlgGraph1, a, d1);
}
}
private void testInIn_assert(SqlgGraph sqlgGraph, Vertex a, Vertex d1) {
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(sqlgGraph, d1).in().in().count();
Assert.assertEquals(4, traversal.getSteps().size());
Assert.assertEquals(1, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
Assert.assertEquals(a, vertexTraversal(sqlgGraph, d1).in().in().next());
}
@Test
public void testInOutInOut() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
a1.addEdge("a_outB", b1);
a1.addEdge("a_outB", b2);
a1.addEdge("a_outB", b3);
c1.addEdge("c_outB", b1);
c2.addEdge("c_outB", b2);
c3.addEdge("c_outB", b3);
this.sqlgGraph.tx().commit();
Assert.assertEquals(6, vertexTraversal(this.sqlgGraph, a1).out().in().count().next().intValue());
Vertex e1 = this.sqlgGraph.addVertex(T.label, "E", "name", "e1");
Vertex e2 = this.sqlgGraph.addVertex(T.label, "E", "name", "e2");
Vertex e3 = this.sqlgGraph.addVertex(T.label, "E", "name", "e3");
Vertex e4 = this.sqlgGraph.addVertex(T.label, "E", "name", "e4");
Vertex e5 = this.sqlgGraph.addVertex(T.label, "E", "name", "e5");
Vertex e6 = this.sqlgGraph.addVertex(T.label, "E", "name", "e6");
Vertex e7 = this.sqlgGraph.addVertex(T.label, "E", "name", "e7");
c1.addEdge("outE", e1);
c2.addEdge("outE", e2);
c2.addEdge("outE", e3);
c2.addEdge("outE", e4);
c3.addEdge("outE", e5);
c3.addEdge("outE", e6);
c3.addEdge("outE", e7);
this.sqlgGraph.tx().commit();
testInOutInOut_assert(this.sqlgGraph, a1);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testInOutInOut_assert(this.sqlgGraph1, a1);
}
}
private void testInOutInOut_assert(SqlgGraph sqlgGraph, Vertex a1) {
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(sqlgGraph, a1).out().in().out().count();
Assert.assertEquals(5, traversal.getSteps().size());
Assert.assertEquals(19, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
}
@Test
public void testInOutInOut3() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
Vertex e1 = this.sqlgGraph.addVertex(T.label, "E", "name", "e1");
Vertex e2 = this.sqlgGraph.addVertex(T.label, "E", "name", "e2");
Vertex e3 = this.sqlgGraph.addVertex(T.label, "E", "name", "e3");
Vertex e4 = this.sqlgGraph.addVertex(T.label, "E", "name", "e4");
Vertex e5 = this.sqlgGraph.addVertex(T.label, "E", "name", "e5");
Vertex e6 = this.sqlgGraph.addVertex(T.label, "E", "name", "e6");
Vertex e7 = this.sqlgGraph.addVertex(T.label, "E", "name", "e7");
a1.addEdge("a_outB", b1);
a1.addEdge("a_outB", b2);
a1.addEdge("a_outB", b3);
c1.addEdge("c_outB", b1);
c2.addEdge("c_outB", b2);
c3.addEdge("c_outB", b3);
c1.addEdge("outE", e1);
c2.addEdge("outE", e2);
c2.addEdge("outE", e3);
c2.addEdge("outE", e4);
c3.addEdge("outE", e5);
c3.addEdge("outE", e6);
c3.addEdge("outE", e7);
this.sqlgGraph.tx().commit();
testInOutinOut3_assert(this.sqlgGraph, a1);
if (this.sqlgGraph1 != null) {
Thread.sleep(1000);
testInOutinOut3_assert(this.sqlgGraph1, a1);
}
}
private void testInOutinOut3_assert(SqlgGraph sqlgGraph, Vertex a1) {
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) sqlgGraph.traversal().V(a1.id()).out().in().out().count();
Assert.assertEquals(5, traversal.getSteps().size());
Assert.assertEquals(19, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
}
@Test
public void testInOutToSelf() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
Vertex a2 = this.sqlgGraph.addVertex(T.label, "A", "name", "a2");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
a1.addEdge("knows", b1);
b1.addEdge("knownBy", a2);
//and another
Vertex a3 = this.sqlgGraph.addVertex(T.label, "A", "name", "a3");
Vertex a4 = this.sqlgGraph.addVertex(T.label, "A", "name", "a4");
Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
a3.addEdge("knows", b2);
b2.addEdge("knownBy", a4);
this.sqlgGraph.tx().commit();
testInOutToSelf_assert(this.sqlgGraph, a1, a2);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testInOutToSelf_assert(this.sqlgGraph1, a1, a2);
}
}
private void testInOutToSelf_assert(SqlgGraph sqlgGraph, Vertex a1, Vertex a2) {
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(sqlgGraph, a1).out().out().count();
Assert.assertEquals(4, traversal.getSteps().size());
Assert.assertEquals(1, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
DefaultGraphTraversal<Vertex, Vertex> traversal1 = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a1).out().out();
Assert.assertEquals(3, traversal1.getSteps().size());
Assert.assertEquals(a2, traversal1.next());
Assert.assertEquals(1, traversal1.getSteps().size());
}
@Test
public void testOutOutOutToSelf() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
a1.addEdge("aOutB", b1);
b1.addEdge("bOutC", c1);
c1.addEdge("cOutB", b2);
this.sqlgGraph.tx().commit();
testOutOutoutToSelf_assert(this.sqlgGraph, a1, b2);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testOutOutoutToSelf_assert(this.sqlgGraph1, a1, b2);
}
}
private void testOutOutoutToSelf_assert(SqlgGraph sqlgGraph, Vertex a1, Vertex b2) {
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(sqlgGraph, a1).out().out().out().count();
Assert.assertEquals(5, traversal.getSteps().size());
Assert.assertEquals(1, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
DefaultGraphTraversal<Vertex, Vertex> traversal1 = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a1).out().out().out();
Assert.assertEquals(4, traversal1.getSteps().size());
Assert.assertEquals(b2, traversal1.next());
Assert.assertEquals(1, traversal1.getSteps().size());
}
@Test
public void testOutInToSelf() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
a1.addEdge("aOutB", b1);
this.sqlgGraph.tx().commit();
Assert.assertEquals(1, vertexTraversal(this.sqlgGraph, a1).out().in().count().next().intValue());
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
Assert.assertEquals(1, vertexTraversal(this.sqlgGraph1, a1).out().in().count().next().intValue());
}
}
@Test
public void testInOutInOut2() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a");
Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
a1.addEdge("a_outB", b1);
a1.addEdge("a_outB", b2);
a1.addEdge("a_outB", b3);
this.sqlgGraph.tx().commit();
DefaultGraphTraversal<Vertex, Long> traversal = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(this.sqlgGraph, a1).out().in().out().count();
Assert.assertEquals(5, traversal.getSteps().size());
Assert.assertEquals(9, traversal.next().intValue());
Assert.assertEquals(3, traversal.getSteps().size());
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
DefaultGraphTraversal<Vertex, Long> traversal1 = (DefaultGraphTraversal<Vertex, Long>) vertexTraversal(this.sqlgGraph1, a1).out().in().out().count();
Assert.assertEquals(5, traversal1.getSteps().size());
Assert.assertEquals(9, traversal1.next().intValue());
Assert.assertEquals(3, traversal1.getSteps().size());
}
}
@Test
public void testEmptyTraversal() {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "A");
Vertex v2 = this.sqlgGraph.addVertex(T.label, "B"); // v1.addEdge("ab", v2);
this.sqlgGraph.tx().commit();
vertexTraversal(this.sqlgGraph, v1).out("test");
}
@Test
public void testOutOutToSelf() throws InterruptedException {
Vertex a1 = this.sqlgGraph.addVertex(T.label, "ManagedObject", "name", "a1");
Vertex a2 = this.sqlgGraph.addVertex(T.label, "ManagedObject", "name", "a2");
a1.addEdge("hierarchyParent_hierarchy", a2);
this.sqlgGraph.tx().commit();
testOutOutToSelf_assert(this.sqlgGraph, a1, a2);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testOutOutToSelf_assert(this.sqlgGraph1, a1, a2);
}
}
private void testOutOutToSelf_assert(SqlgGraph sqlgGraph, Vertex a1, Vertex a2) {
DefaultGraphTraversal<Vertex, Vertex> traversal = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a1).out();
Assert.assertEquals(2, traversal.getSteps().size());
Assert.assertTrue(traversal.hasNext());
Assert.assertEquals(1, traversal.getSteps().size());
DefaultGraphTraversal<Vertex, Vertex> traversal1 = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a2).out();
Assert.assertEquals(2, traversal1.getSteps().size());
Assert.assertFalse(traversal1.hasNext());
Assert.assertEquals(1, traversal1.getSteps().size());
DefaultGraphTraversal<Vertex, Vertex> traversal2 = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a1).in();
Assert.assertEquals(2, traversal2.getSteps().size());
Assert.assertFalse(traversal2.hasNext());
Assert.assertEquals(1, traversal2.getSteps().size());
DefaultGraphTraversal<Vertex, Vertex> traversal3 = (DefaultGraphTraversal<Vertex, Vertex>) vertexTraversal(sqlgGraph, a2).in();
Assert.assertEquals(2, traversal3.getSteps().size());
Assert.assertTrue(traversal3.hasNext());
Assert.assertEquals(1, traversal3.getSteps().size());
}
}
| |
package com.dubboclub.dk.admin.service.impl;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.URL;
import com.alibaba.dubbo.common.utils.StringUtils;
import com.dubboclub.dk.admin.model.Override;
import com.dubboclub.dk.admin.model.Provider;
import com.dubboclub.dk.admin.service.AbstractService;
import com.dubboclub.dk.admin.service.OverrideService;
import com.dubboclub.dk.admin.service.ProviderService;
import com.dubboclub.dk.admin.sync.util.Pair;
import com.dubboclub.dk.admin.sync.util.SyncUtils;
import com.dubboclub.dk.admin.sync.util.Tool;
import java.util.*;
/**
* Created by bieber on 2015/6/3.
*/
public class ProviderServiceImpl extends AbstractService implements ProviderService {
private OverrideService overrideService;
@java.lang.Override
public List<Provider> listAllProvider() {
return filterCategoryData(new ConvertURL2Entity<Provider>() {
public Provider convert(Pair<Long, URL> pair) {
return SyncUtils.url2Provider(pair);
}
},Constants.PROVIDERS_CATEGORY);
}
public List<Provider> listProviderByApplication(String appName) {
return filterCategoryData(new ConvertURL2Entity<Provider>() {
public Provider convert(Pair<Long, URL> pair) {
return SyncUtils.url2Provider(pair);
}
},Constants.PROVIDERS_CATEGORY,Constants.APPLICATION_KEY,appName);
}
public List<Provider> listProviderByService(String service) {
return filterCategoryData(new ConvertURL2Entity<Provider>() {
List<String> hadContained = new ArrayList<String>();
public Provider convert(Pair<Long, URL> pair) {
if(hadContained.contains(pair.getValue().getHost()+":"+pair.getValue().getPort())){
return null;
}
hadContained.add(pair.getValue().getHost()+":"+pair.getValue().getPort());
return SyncUtils.url2Provider(pair);
}
},Constants.PROVIDERS_CATEGORY,Constants.INTERFACE_KEY,service);
}
public List<Provider> listProviderByConditions(String... conditions) {
return filterCategoryData(new ConvertURL2Entity<Provider>() {
public Provider convert(Pair<Long, URL> pair) {
Provider provider = SyncUtils.url2Provider(pair);
if(provider.isDynamic()){
return overrideService.configProvider(provider);
}
return provider;
}
},Constants.PROVIDERS_CATEGORY,conditions);
}
@java.lang.Override
public List<Provider> listProviderByServiceKey(String serviceKey) {
return listProviderByConditions(Constants.INTERFACE_KEY, Tool.getInterface(serviceKey),Constants.GROUP_KEY,Tool.getGroup(serviceKey),Constants.VERSION_KEY,Tool.getVersion(serviceKey));
}
public Provider getProviderById(long id) {
URL url = getOneById(Constants.PROVIDERS_CATEGORY,id);
if(url!=null){
Provider provider = SyncUtils.url2Provider(new Pair<Long, URL>(id, url));
if(provider.isDynamic()){
return overrideService.configProvider(provider);
}else{
return provider;
}
}
return null;
}
public void updateProvider(Provider newProvider) {
Provider oldProvider = getProviderById(newProvider.getId());
if(newProvider.isDynamic()){
Map<String,String> params = Tool.convertParametersMap(newProvider.getParameters());
Override override = generateDefaultOverride(newProvider);
if(params.containsKey(Constants.ENABLED_KEY)&&!Boolean.parseBoolean(params.get(Constants.ENABLED_KEY))){
override.setParams(Constants.DISABLED_KEY + "=true");
}else{
override.setParams(Constants.DISABLED_KEY + "=false");
}
overrideService.add(override);
List<Override> overrides = overrideService.listByProvider(oldProvider);
URL editOverrideUrl = override.toUrl();
for(Override item:overrides){
URL overrideUrl = item.toUrl();
if(overrideUrl.getParameter(Constants.DISABLED_KEY,false)!=editOverrideUrl.getParameter(Constants.DISABLED_KEY,false)){
overrideService.delete(item.getId());
}else if(!StringUtils.isEmpty(params.get(Constants.WEIGHT_KEY))&&!params.get(Constants.WEIGHT_KEY).equals(overrideUrl.getParameter(Constants.WEIGHT_KEY))){
overrideService.delete(item.getId());
}
}
}else{
URL newURL = SyncUtils.provider2URL(newProvider);
update(SyncUtils.provider2URL(oldProvider),newURL);
}
}
private Override generateDefaultOverride(Provider provider){
Map<String,String> params = Tool.convertParametersMap(provider.getParameters());
Override override = new Override();
override.setAddress(provider.getAddress());
override.setService(Tool.getInterface(provider.getServiceKey()));
override.setEnabled(true);
if(!StringUtils.isEmpty(params.get(Constants.WEIGHT_KEY))){
override.setParams(Constants.WEIGHT_KEY+"="+params.get(Constants.WEIGHT_KEY));
}
override.setParams(Constants.ANYHOST_KEY+"="+params.get(Constants.ANYHOST_KEY));
override.setParams(Constants.APPLICATION_KEY+"="+Constants.ANY_VALUE);
if(!StringUtils.isEmpty(params.get(Constants.GROUP_KEY))){
override.setParams(Constants.GROUP_KEY+"="+params.get(Constants.GROUP_KEY));
}
if(!StringUtils.isEmpty(params.get(Constants.VERSION_KEY))){
override.setParams(Constants.VERSION_KEY+"="+ params.get(Constants.VERSION_KEY));
}
override.setParams("owner="+params.get("owner"));
return override;
}
@java.lang.Override
public void disable(Long id) {
Provider provider = getProviderById(id);
if(provider.isDynamic()){
URL url = overrideService.configProviderURL(provider);
url=url.addParameter(Constants.ENABLED_KEY, false);
updateProvider(SyncUtils.url2Provider(new Pair<Long, URL>(id,url)));
}else{
provider.setEnabled(false);
updateProvider(provider);
}
}
@java.lang.Override
public void enable(Long id) {
Provider provider = getProviderById(id);
if(provider.isDynamic()){
URL url = overrideService.configProviderURL(provider);
url=url.addParameter(Constants.ENABLED_KEY, true);
updateProvider(SyncUtils.url2Provider(new Pair<Long, URL>(id,url)));
}else{
provider.setEnabled(true);
updateProvider(provider);
}
}
@java.lang.Override
public void halfWeight(Long id) {
Provider provider = getProviderById(id);
if(provider.isDynamic()){
URL url = overrideService.configProviderURL(provider);
url=url.addParameter(Constants.WEIGHT_KEY, (int)(url.getParameter(Constants.WEIGHT_KEY,Constants.DEFAULT_WEIGHT)/2));
updateProvider(SyncUtils.url2Provider(new Pair<Long, URL>(id,url)));
}else{
provider.setWeight(provider.getWeight()/2);
updateProvider(provider);
}
}
@java.lang.Override
public void doubleWeight(Long id) {
Provider provider = getProviderById(id);
if(provider.isDynamic()){
URL url = overrideService.configProviderURL(provider);
url=url.addParameter(Constants.WEIGHT_KEY, url.getParameter(Constants.WEIGHT_KEY,Constants.DEFAULT_WEIGHT)*2);
updateProvider(SyncUtils.url2Provider(new Pair<Long, URL>(id,url)));
}else{
provider.setWeight(provider.getWeight()*2);
updateProvider(provider);
}
}
@java.lang.Override
public void delete(Long id) {
Provider provider = getProviderById(id);
if(!provider.isDynamic()){
delete(SyncUtils.provider2URL(provider));
}
}
@java.lang.Override
public void copy(Long id) {
Provider provider = getProviderById(id);
if(provider.isDynamic()){
provider.setDynamic(false);
}
provider.setEnabled(false);
URL url = SyncUtils.provider2URL(provider);
url=url.addParameter(Constants.TIMESTAMP_KEY,System.currentTimeMillis());
add(url);
}
public void setOverrideService(OverrideService overrideService) {
this.overrideService = overrideService;
}
}
| |
package org.smoothbuild.acceptance.lang.assign.nongeneric;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BLOB;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BLOB_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BLOB_ARRAY2;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BOOL;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BOOL_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.BOOL_ARRAY2;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.NOTHING;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.NOTHING_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.NOTHING_ARRAY2;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRING;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRING_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRING_ARRAY2;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_BOOL;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_BOOL_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_BOOL_ARRAY2;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_STRING;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_STRING_ARRAY;
import static org.smoothbuild.acceptance.lang.assign.spec.TestedType.STRUCT_WITH_STRING_ARRAY2;
import java.io.IOException;
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.smoothbuild.acceptance.AcceptanceTestCase;
import org.smoothbuild.acceptance.lang.assign.spec.TestSpec;
import org.smoothbuild.acceptance.lang.assign.spec.TestedType;
import org.smoothbuild.acceptance.testing.ReportError;
public abstract class AbstractAssignmentTestCase extends AcceptanceTestCase {
@ParameterizedTest
@MethodSource("assignment_test_specs")
public void assignment_is_verified(AssignmentTestSpec testSpec) throws IOException {
createNativeJar(ReportError.class);
createUserModule(createTestScript(testSpec));
runSmoothList();
if (testSpec.allowed) {
assertFinishedWithSuccess();
} else {
assertFinishedWithError();
assertAssignmentError(testSpec.target.name, testSpec.source.name);
}
}
protected abstract String createTestScript(AssignmentTestSpec testSpec);
protected abstract void assertAssignmentError(String targetType, String sourceType);
public static Stream<AssignmentTestSpec> assignment_test_specs() {
return Stream.of(
// Blob
allowedAssignment(BLOB, BLOB),
illegalAssignment(BLOB, BOOL),
allowedAssignment(BLOB, NOTHING),
illegalAssignment(BLOB, STRING),
illegalAssignment(BLOB, STRUCT_WITH_STRING),
illegalAssignment(BLOB, BLOB_ARRAY),
illegalAssignment(BLOB, BOOL_ARRAY),
illegalAssignment(BLOB, NOTHING_ARRAY),
illegalAssignment(BLOB, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BLOB, STRING_ARRAY),
illegalAssignment(BLOB, BLOB_ARRAY2),
illegalAssignment(BLOB, BOOL_ARRAY2),
illegalAssignment(BLOB, NOTHING_ARRAY2),
illegalAssignment(BLOB, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BLOB, STRING_ARRAY2),
// Bool
illegalAssignment(BOOL, BLOB),
allowedAssignment(BOOL, BOOL),
allowedAssignment(BOOL, NOTHING),
illegalAssignment(BOOL, STRING),
illegalAssignment(BOOL, STRUCT_WITH_STRING),
illegalAssignment(BOOL, BLOB_ARRAY),
illegalAssignment(BOOL, BOOL_ARRAY),
illegalAssignment(BOOL, NOTHING_ARRAY),
illegalAssignment(BOOL, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BOOL, STRING_ARRAY),
illegalAssignment(BOOL, BLOB_ARRAY2),
illegalAssignment(BOOL, BOOL_ARRAY2),
illegalAssignment(BOOL, NOTHING_ARRAY2),
illegalAssignment(BOOL, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BOOL, STRING_ARRAY2),
// Nothing
illegalAssignment(NOTHING, BLOB),
illegalAssignment(NOTHING, BOOL),
allowedAssignment(NOTHING, NOTHING),
illegalAssignment(NOTHING, STRING),
illegalAssignment(NOTHING, STRUCT_WITH_STRING),
illegalAssignment(NOTHING, BLOB_ARRAY),
illegalAssignment(NOTHING, BOOL_ARRAY),
illegalAssignment(NOTHING, NOTHING_ARRAY),
illegalAssignment(NOTHING, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(NOTHING, STRING_ARRAY),
illegalAssignment(NOTHING, BLOB_ARRAY2),
illegalAssignment(NOTHING, BOOL_ARRAY2),
illegalAssignment(NOTHING, NOTHING_ARRAY2),
illegalAssignment(NOTHING, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(NOTHING, STRING_ARRAY2),
// String
illegalAssignment(STRING, BLOB),
illegalAssignment(STRING, BOOL),
allowedAssignment(STRING, NOTHING),
allowedAssignment(STRING, STRING),
allowedAssignment(STRING, STRUCT_WITH_STRING),
illegalAssignment(STRING, BLOB_ARRAY),
illegalAssignment(STRING, BOOL_ARRAY),
illegalAssignment(STRING, NOTHING_ARRAY),
illegalAssignment(STRING, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(STRING, STRING_ARRAY),
illegalAssignment(STRING, BLOB_ARRAY2),
illegalAssignment(STRING, BOOL_ARRAY2),
illegalAssignment(STRING, NOTHING_ARRAY2),
illegalAssignment(STRING, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(STRING, STRING_ARRAY2),
// Struct
illegalAssignment(STRUCT_WITH_STRING, BLOB),
illegalAssignment(STRUCT_WITH_STRING, BOOL),
allowedAssignment(STRUCT_WITH_STRING, NOTHING),
illegalAssignment(STRUCT_WITH_STRING, STRING),
allowedAssignment(STRUCT_WITH_STRING, STRUCT_WITH_STRING),
illegalAssignment(STRUCT_WITH_STRING, BLOB_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, BOOL_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, NOTHING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, BLOB_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING, BOOL_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING, NOTHING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING, STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING, STRUCT_WITH_BOOL),
illegalAssignment(STRUCT_WITH_STRING, STRUCT_WITH_BOOL_ARRAY),
illegalAssignment(STRUCT_WITH_STRING, STRUCT_WITH_BOOL_ARRAY2),
// [Blob]
illegalAssignment(BLOB_ARRAY, BLOB),
illegalAssignment(BLOB_ARRAY, BOOL),
allowedAssignment(BLOB_ARRAY, NOTHING),
illegalAssignment(BLOB_ARRAY, STRING),
illegalAssignment(BLOB_ARRAY, STRUCT_WITH_STRING),
allowedAssignment(BLOB_ARRAY, BLOB_ARRAY),
illegalAssignment(BLOB_ARRAY, BOOL_ARRAY),
allowedAssignment(BLOB_ARRAY, NOTHING_ARRAY),
illegalAssignment(BLOB_ARRAY, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BLOB_ARRAY, STRING_ARRAY),
illegalAssignment(BLOB_ARRAY, BLOB_ARRAY2),
illegalAssignment(BLOB_ARRAY, BOOL_ARRAY2),
illegalAssignment(BLOB_ARRAY, NOTHING_ARRAY2),
illegalAssignment(BLOB_ARRAY, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BLOB_ARRAY, STRING_ARRAY2),
// [Bool]
illegalAssignment(BOOL_ARRAY, BLOB),
illegalAssignment(BOOL_ARRAY, BOOL),
allowedAssignment(BOOL_ARRAY, NOTHING),
illegalAssignment(BOOL_ARRAY, STRING),
illegalAssignment(BOOL_ARRAY, STRUCT_WITH_STRING),
illegalAssignment(BOOL_ARRAY, BLOB_ARRAY),
allowedAssignment(BOOL_ARRAY, BOOL_ARRAY),
allowedAssignment(BOOL_ARRAY, NOTHING_ARRAY),
illegalAssignment(BOOL_ARRAY, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BOOL_ARRAY, STRING_ARRAY),
illegalAssignment(BOOL_ARRAY, BLOB_ARRAY2),
illegalAssignment(BOOL_ARRAY, BOOL_ARRAY2),
illegalAssignment(BOOL_ARRAY, NOTHING_ARRAY2),
illegalAssignment(BOOL_ARRAY, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BOOL_ARRAY, STRING_ARRAY2),
// [Nothing]
illegalAssignment(NOTHING_ARRAY, BLOB),
illegalAssignment(NOTHING_ARRAY, BOOL),
allowedAssignment(NOTHING_ARRAY, NOTHING),
illegalAssignment(NOTHING_ARRAY, STRING),
illegalAssignment(NOTHING_ARRAY, STRUCT_WITH_STRING),
illegalAssignment(NOTHING_ARRAY, BLOB_ARRAY),
illegalAssignment(NOTHING_ARRAY, BOOL_ARRAY),
allowedAssignment(NOTHING_ARRAY, NOTHING_ARRAY),
illegalAssignment(NOTHING_ARRAY, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(NOTHING_ARRAY, STRING_ARRAY),
illegalAssignment(NOTHING_ARRAY, BLOB_ARRAY2),
illegalAssignment(NOTHING_ARRAY, BOOL_ARRAY2),
illegalAssignment(NOTHING_ARRAY, NOTHING_ARRAY2),
illegalAssignment(NOTHING_ARRAY, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(NOTHING_ARRAY, STRING_ARRAY2),
// [String]
illegalAssignment(STRING_ARRAY, BLOB),
illegalAssignment(STRING_ARRAY, BOOL),
allowedAssignment(STRING_ARRAY, NOTHING),
illegalAssignment(STRING_ARRAY, STRING),
illegalAssignment(STRING_ARRAY, STRUCT_WITH_STRING),
illegalAssignment(STRING_ARRAY, BLOB_ARRAY),
illegalAssignment(STRING_ARRAY, BOOL_ARRAY),
allowedAssignment(STRING_ARRAY, NOTHING_ARRAY),
allowedAssignment(STRING_ARRAY, STRUCT_WITH_STRING_ARRAY),
allowedAssignment(STRING_ARRAY, STRING_ARRAY),
illegalAssignment(STRING_ARRAY, BLOB_ARRAY2),
illegalAssignment(STRING_ARRAY, BOOL_ARRAY2),
illegalAssignment(STRING_ARRAY, NOTHING_ARRAY2),
illegalAssignment(STRING_ARRAY, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(STRING_ARRAY, STRING_ARRAY2),
// [Struct]
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BLOB),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BOOL),
allowedAssignment(STRUCT_WITH_STRING_ARRAY, NOTHING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_STRING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BLOB_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BOOL_ARRAY),
allowedAssignment(STRUCT_WITH_STRING_ARRAY, NOTHING_ARRAY),
allowedAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BLOB_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, BOOL_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, NOTHING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_BOOL),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_BOOL_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY, STRUCT_WITH_BOOL_ARRAY2),
// [[Blob]]
illegalAssignment(BLOB_ARRAY2, BLOB),
illegalAssignment(BLOB_ARRAY2, BOOL),
allowedAssignment(BLOB_ARRAY2, NOTHING),
illegalAssignment(BLOB_ARRAY2, STRING),
illegalAssignment(BLOB_ARRAY2, STRUCT_WITH_STRING),
illegalAssignment(BLOB_ARRAY2, BLOB_ARRAY),
illegalAssignment(BLOB_ARRAY2, BOOL_ARRAY),
allowedAssignment(BLOB_ARRAY2, NOTHING_ARRAY),
illegalAssignment(BLOB_ARRAY2, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BLOB_ARRAY2, STRING_ARRAY),
allowedAssignment(BLOB_ARRAY2, BLOB_ARRAY2),
illegalAssignment(BLOB_ARRAY2, BOOL_ARRAY2),
allowedAssignment(BLOB_ARRAY2, NOTHING_ARRAY2),
illegalAssignment(BLOB_ARRAY2, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BLOB_ARRAY2, STRING_ARRAY2),
// [[Bool]]
illegalAssignment(BOOL_ARRAY2, BLOB),
illegalAssignment(BOOL_ARRAY2, BOOL),
allowedAssignment(BOOL_ARRAY2, NOTHING),
illegalAssignment(BOOL_ARRAY2, STRING),
illegalAssignment(BOOL_ARRAY2, STRUCT_WITH_STRING),
illegalAssignment(BOOL_ARRAY2, BLOB_ARRAY),
illegalAssignment(BOOL_ARRAY2, BOOL_ARRAY),
allowedAssignment(BOOL_ARRAY2, NOTHING_ARRAY),
illegalAssignment(BOOL_ARRAY2, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(BOOL_ARRAY2, STRING_ARRAY),
illegalAssignment(BOOL_ARRAY2, BLOB_ARRAY2),
allowedAssignment(BOOL_ARRAY2, BOOL_ARRAY2),
allowedAssignment(BOOL_ARRAY2, NOTHING_ARRAY2),
illegalAssignment(BOOL_ARRAY2, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(BOOL_ARRAY2, STRING_ARRAY2),
// [[Nothing]]
illegalAssignment(NOTHING_ARRAY2, BLOB),
illegalAssignment(NOTHING_ARRAY2, BOOL),
allowedAssignment(NOTHING_ARRAY2, NOTHING),
illegalAssignment(NOTHING_ARRAY2, STRING),
illegalAssignment(NOTHING_ARRAY2, STRUCT_WITH_STRING),
illegalAssignment(NOTHING_ARRAY2, BLOB_ARRAY),
illegalAssignment(NOTHING_ARRAY2, BOOL_ARRAY),
allowedAssignment(NOTHING_ARRAY2, NOTHING_ARRAY),
illegalAssignment(NOTHING_ARRAY2, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(NOTHING_ARRAY2, STRING_ARRAY),
illegalAssignment(NOTHING_ARRAY2, BLOB_ARRAY2),
illegalAssignment(NOTHING_ARRAY2, BOOL_ARRAY2),
allowedAssignment(NOTHING_ARRAY2, NOTHING_ARRAY2),
illegalAssignment(NOTHING_ARRAY2, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(NOTHING_ARRAY2, STRING_ARRAY2),
// [[String]]
illegalAssignment(STRING_ARRAY2, BLOB),
illegalAssignment(STRING_ARRAY2, BOOL),
allowedAssignment(STRING_ARRAY2, NOTHING),
illegalAssignment(STRING_ARRAY2, STRING),
illegalAssignment(STRING_ARRAY2, STRUCT_WITH_STRING),
illegalAssignment(STRING_ARRAY2, BLOB_ARRAY),
illegalAssignment(STRING_ARRAY2, BOOL_ARRAY),
allowedAssignment(STRING_ARRAY2, NOTHING_ARRAY),
illegalAssignment(STRING_ARRAY2, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(STRING_ARRAY2, STRING_ARRAY),
illegalAssignment(STRING_ARRAY2, BLOB_ARRAY2),
illegalAssignment(STRING_ARRAY2, BOOL_ARRAY2),
allowedAssignment(STRING_ARRAY2, NOTHING_ARRAY2),
allowedAssignment(STRING_ARRAY2, STRUCT_WITH_STRING_ARRAY2),
allowedAssignment(STRING_ARRAY2, STRING_ARRAY2),
// [[Struct]]
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BLOB),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BOOL),
allowedAssignment(STRUCT_WITH_STRING_ARRAY2, NOTHING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_STRING),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BLOB_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BOOL_ARRAY),
allowedAssignment(STRUCT_WITH_STRING_ARRAY2, NOTHING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRING_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BLOB_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, BOOL_ARRAY2),
allowedAssignment(STRUCT_WITH_STRING_ARRAY2, NOTHING_ARRAY2),
allowedAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRING_ARRAY2),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_BOOL),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_BOOL_ARRAY),
illegalAssignment(STRUCT_WITH_STRING_ARRAY2, STRUCT_WITH_BOOL_ARRAY2)
);
}
public static AssignmentTestSpec allowedAssignment(TestedType target, TestedType source) {
return new AssignmentTestSpec(target, source, true);
}
public static AssignmentTestSpec illegalAssignment(TestedType target, TestedType source) {
return new AssignmentTestSpec(target, source, false);
}
public static class AssignmentTestSpec extends TestSpec {
public final boolean allowed;
private AssignmentTestSpec(TestedType target, TestedType source, boolean allowed) {
super(target, source);
this.allowed = allowed;
}
}
}
| |
/*
* This file is part of "lunisolar-magma".
*
* (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.lunisolar.magma.func.supplier;
import javax.annotation.Nonnull; // NOSONAR
import javax.annotation.Nullable; // NOSONAR
import javax.annotation.concurrent.NotThreadSafe; // NOSONAR
import java.util.Objects; // NOSONAR
import eu.lunisolar.magma.basics.*; // NOSONAR
import eu.lunisolar.magma.basics.builder.*; // NOSONAR
import eu.lunisolar.magma.basics.exceptions.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.aType.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR
import eu.lunisolar.magma.func.IA;
import eu.lunisolar.magma.func.SA;
import eu.lunisolar.magma.func.*; // NOSONAR
import eu.lunisolar.magma.func.tuple.*; // NOSONAR
import java.util.function.*; // NOSONAR
import java.util.concurrent.*; // NOSONAR
import java.util.function.*; // NOSONAR
import java.util.*; // NOSONAR
import java.lang.reflect.*; // NOSONAR
import eu.lunisolar.magma.func.action.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR
import eu.lunisolar.magma.func.function.*; // NOSONAR
import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR
import eu.lunisolar.magma.func.function.from.*; // NOSONAR
import eu.lunisolar.magma.func.function.to.*; // NOSONAR
import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR
import eu.lunisolar.magma.func.predicate.*; // NOSONAR
import eu.lunisolar.magma.func.supplier.*; // NOSONAR
/**
* Non-throwing functional interface (lambda) LSrtSupplier for Java 8.
*
* Type: supplier
*
* Domain (lvl: 0): none
*
* Co-domain: short
*
*/
@FunctionalInterface
@SuppressWarnings("UnusedDeclaration")
public interface LSrtSupplier extends MetaSupplier, MetaInterface.NonThrowing, Codomain<aShort>, Domain0 { // NOSONAR
String DESCRIPTION = "LSrtSupplier: short getAsSrt()";
// short getAsSrt() ;
default short getAsSrt() {
// return nestingGetAsSrt();
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.nestCheckedAndThrow(e);
}
}
/**
* Implement this, but call getAsSrt()
*/
short getAsSrtX() throws Throwable;
default short tupleGetAsSrt(LTuple.Void args) {
return getAsSrt();
}
/** Function call that handles exceptions according to the instructions. */
default short handlingGetAsSrt(HandlingInstructions<Throwable, RuntimeException> handling) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handler.handleOrNest(e, handling);
}
}
default LSrtSupplier handling(HandlingInstructions<Throwable, RuntimeException> handling) {
return () -> handlingGetAsSrt(handling);
}
default short getAsSrt(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.wrap(e, factory, newMessage);
}
}
default short getAsSrt(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.wrap(e, factory, newMessage, param1);
}
}
default short getAsSrt(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.wrap(e, factory, newMessage, param1, param2);
}
}
default short getAsSrt(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.wrap(e, factory, newMessage, param1, param2, param3);
}
}
default LSrtSupplier trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) {
return () -> getAsSrt(factory, newMessage);
}
default LSrtSupplier trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) {
return () -> getAsSrt(factory, newMessage, param1);
}
default LSrtSupplier trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) {
return () -> getAsSrt(factory, newMessage, param1, param1);
}
default LSrtSupplier trying(@Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) {
return () -> getAsSrt(factory, newMessage, param1, param2, param3);
}
default short getAsSrt(@Nonnull ExWF<RuntimeException> factory) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.wrap(e, factory);
}
}
default LSrtSupplier trying(@Nonnull ExWF<RuntimeException> factory) {
return () -> getAsSrt(factory);
}
default short getAsSrtThen(@Nonnull LToSrtFunction<Throwable> handler) {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
Handling.handleErrors(e);
return handler.applyAsSrt(e);
}
}
default LSrtSupplier tryingThen(@Nonnull LToSrtFunction<Throwable> handler) {
return () -> getAsSrtThen(handler);
}
/** Function call that handles exceptions by always nesting checked exceptions and propagating the others as is. */
default short nestingGetAsSrt() {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.nestCheckedAndThrow(e);
}
}
/** Function call that handles exceptions by always propagating them as is, even when they are undeclared checked ones. */
default short shovingGetAsSrt() {
try {
return this.getAsSrtX();
} catch (Throwable e) { // NOSONAR
throw Handling.shoveIt(e);
}
}
static short shovingGetAsSrt(LSrtSupplier func) {
Null.nonNullArg(func, "func");
return func.shovingGetAsSrt();
}
static short handlingGetAsSrt(LSrtSupplier func, HandlingInstructions<Throwable, RuntimeException> handling) { // <-
Null.nonNullArg(func, "func");
return func.handlingGetAsSrt(handling);
}
static short tryGetAsSrt(LSrtSupplier func) {
Null.nonNullArg(func, "func");
return func.nestingGetAsSrt();
}
static short tryGetAsSrt(LSrtSupplier func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage) {
Null.nonNullArg(func, "func");
return func.getAsSrt(factory, newMessage);
}
static short tryGetAsSrt(LSrtSupplier func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1) {
Null.nonNullArg(func, "func");
return func.getAsSrt(factory, newMessage, param1);
}
static short tryGetAsSrt(LSrtSupplier func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2) {
Null.nonNullArg(func, "func");
return func.getAsSrt(factory, newMessage, param1, param2);
}
static short tryGetAsSrt(LSrtSupplier func, @Nonnull ExWMF<RuntimeException> factory, @Nonnull String newMessage, @Nullable Object param1, @Nullable Object param2, @Nullable Object param3) {
Null.nonNullArg(func, "func");
return func.getAsSrt(factory, newMessage, param1, param2, param3);
}
static short tryGetAsSrt(LSrtSupplier func, @Nonnull ExWF<RuntimeException> factory) {
Null.nonNullArg(func, "func");
return func.getAsSrt(factory);
}
static short tryGetAsSrtThen(LSrtSupplier func, @Nonnull LToSrtFunction<Throwable> handler) {
Null.nonNullArg(func, "func");
return func.getAsSrtThen(handler);
}
default short failSafeGetAsSrt(@Nonnull LSrtSupplier failSafe) {
try {
return getAsSrt();
} catch (Throwable e) { // NOSONAR
Handling.handleErrors(e);
return failSafe.getAsSrt();
}
}
static short failSafeGetAsSrt(LSrtSupplier func, @Nonnull LSrtSupplier failSafe) {
Null.nonNullArg(failSafe, "failSafe");
if (func == null) {
return failSafe.getAsSrt();
} else {
return func.failSafeGetAsSrt(failSafe);
}
}
static LSrtSupplier failSafe(LSrtSupplier func, @Nonnull LSrtSupplier failSafe) {
Null.nonNullArg(failSafe, "failSafe");
return () -> failSafeGetAsSrt(func, failSafe);
}
/** Just to mirror the method: Ensures the result is not null */
default short nonNullGetAsSrt() {
return getAsSrt();
}
/** Returns description of the functional interface. */
@Nonnull
default String functionalInterfaceDescription() {
return LSrtSupplier.DESCRIPTION;
}
/** From-To. Intended to be used with non-capturing lambda. */
public static void fromTo(int min_i, int max_i, @Nonnull LSrtSupplier func) {
Null.nonNullArg(func, "func");
if (min_i <= max_i) {
for (int i = min_i; i <= max_i; i++) {
func.getAsSrt();
}
} else {
for (int i = min_i; i >= max_i; i--) {
func.getAsSrt();
}
}
}
/** From-To. Intended to be used with non-capturing lambda. */
public static void fromTill(int min_i, int max_i, @Nonnull LSrtSupplier func) {
Null.nonNullArg(func, "func");
if (min_i <= max_i) {
for (int i = min_i; i < max_i; i++) {
func.getAsSrt();
}
} else {
for (int i = min_i; i > max_i; i--) {
func.getAsSrt();
}
}
}
/** From-To. Intended to be used with non-capturing lambda. */
public static void times(int max_i, @Nonnull LSrtSupplier func) {
if (max_i < 0)
return;
fromTill(0, max_i, func);
}
/** Change function to consumer that ignores output. */
default LAction toConsumer() {
return this::getAsSrt;
}
/** Calls codomain consumer after main function. */
default LSrtSupplier afterDo(@Nonnull LSrtConsumer after) {
Null.nonNullArg(after, "after");
return () -> {
final short retval = getAsSrt();
after.accept(retval);
return retval;
};
}
/** Creates function that always returns the same value. */
static LSrtSupplier of(short r) {
return () -> r;
}
/** Convenient method in case lambda expression is ambiguous for the compiler (that might happen for overloaded methods accepting different interfaces). */
@Nonnull
static LSrtSupplier srtSup(final @Nonnull LSrtSupplier lambda) {
Null.nonNullArg(lambda, "lambda");
return lambda;
}
final class S implements LSrtSupplier {
private LSrtSupplier target = null;
@Override
public short getAsSrtX() throws Throwable {
return target.getAsSrtX();
}
}
@Nonnull
static LSrtSupplier recursive(final @Nonnull LFunction<LSrtSupplier, LSrtSupplier> selfLambda) {
final S single = new S();
LSrtSupplier func = selfLambda.apply(single);
single.target = func;
return func;
}
public static M mementoOf(LSrtSupplier function) {
var initialValue = function.getAsSrt();
return initializedMementoOf(initialValue, function);
}
public static M initializedMementoOf(short initialValue, LSrtSupplier function) {
return memento(initialValue, initialValue, function, (m, x1, x2) -> x2);
}
public static M deltaOf(LSrtSupplier function, LSrtBinaryOperator deltaFunction) {
var initialValue = function.getAsSrt();
return initializedDeltaOf(initialValue, function, deltaFunction);
}
public static M deltaOf(LSrtSupplier function) {
var initialValue = function.getAsSrt();
return initializedDeltaOf(initialValue, function, (x1, x2) -> (short) (x2 - x1));
}
public static M initializedDeltaOf(short initialValue, LSrtSupplier function, LSrtBinaryOperator deltaFunction) {
return memento(initialValue, deltaFunction.applyAsSrt(initialValue, initialValue), function, (m, x1, x2) -> deltaFunction.applyAsSrt(x1, x2));
}
public static M memento(short initialBaseValue, short initialValue, LSrtSupplier baseFunction, LSrtTernaryOperator mementoFunction) {
return new M(initialBaseValue, initialValue, baseFunction, mementoFunction);
}
/**
* Implementation that allows to create derivative functions (do not confuse it with math concepts). Very short name is intended to be used with parent (LSrtSupplier.M)
*/
@NotThreadSafe
final class M implements LSrtSupplier {
private final LSrtSupplier baseFunction;
private short lastBaseValue;
private short lastValue;
private final LSrtTernaryOperator mementoFunction;
private M(short lastBaseValue, short lastValue, LSrtSupplier baseFunction, LSrtTernaryOperator mementoFunction) {
this.baseFunction = baseFunction;
this.lastBaseValue = lastBaseValue;
this.lastValue = lastValue;
this.mementoFunction = mementoFunction;
}
@Override
public short getAsSrtX() throws Throwable {
short x1 = lastBaseValue;
short x2 = lastBaseValue = baseFunction.getAsSrtX();
return lastValue = mementoFunction.applyAsSrt(lastValue, x1, x2);
}
public short lastValue() {
return lastValue;
};
public short lastBaseValue() {
return lastBaseValue;
};
}
@Nonnull
static LSrtSupplier srtSupThrowing(final @Nonnull ExF<Throwable> exF) {
Null.nonNullArg(exF, "exF");
return () -> {
throw exF.produce();
};
}
@Nonnull
static LSrtSupplier srtSupThrowing(final String message, final @Nonnull ExMF<Throwable> exF) {
Null.nonNullArg(exF, "exF");
return () -> {
throw exF.produce(message);
};
}
static short call(final @Nonnull LSrtSupplier lambda) {
Null.nonNullArg(lambda, "lambda");
return lambda.getAsSrt();
}
// <editor-fold desc="wrap">
// </editor-fold>
// <editor-fold desc="then (functional)">
/** Combines two functions together in a order. */
@Nonnull
default <V> LSupplier<V> toSup(@Nonnull LSrtFunction<? extends V> after) {
Null.nonNullArg(after, "after");
return () -> after.apply(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LByteSupplier toByteSup(@Nonnull LSrtToByteFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsByte(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LSrtSupplier toSrtSup(@Nonnull LSrtUnaryOperator after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsSrt(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LIntSupplier toIntSup(@Nonnull LSrtToIntFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsInt(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LLongSupplier toLongSup(@Nonnull LSrtToLongFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsLong(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LFltSupplier toFltSup(@Nonnull LSrtToFltFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsFlt(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LDblSupplier toDblSup(@Nonnull LSrtToDblFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsDbl(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LCharSupplier toCharSup(@Nonnull LSrtToCharFunction after) {
Null.nonNullArg(after, "after");
return () -> after.applyAsChar(this.getAsSrt());
}
/** Combines two functions together in a order. */
@Nonnull
default LBoolSupplier toBoolSup(@Nonnull LSrtPredicate after) {
Null.nonNullArg(after, "after");
return () -> after.test(this.getAsSrt());
}
// </editor-fold>
// <editor-fold desc="variant conversions">
// </editor-fold>
/** Does nothing (LSrtSupplier) Supplier */
public static short doNothing() {
return Function4U.defaultShort;
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Fighter;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Fighter_Charge extends FighterSkill
{
@Override
public String ID()
{
return "Fighter_Charge";
}
private final static String localizedName = CMLib.lang().L("Charge");
@Override
public String name()
{
return localizedName;
}
private static final String[] triggerStrings = I(new String[] { "CHARGE" });
@Override
public int abstractQuality()
{
return Ability.QUALITY_MALICIOUS;
}
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
private final static String localizedStaticDisplay = CMLib.lang().L("(Charging!!)");
@Override
public String displayText()
{
return localizedStaticDisplay;
}
@Override
protected int canAffectCode()
{
return Ability.CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return Ability.CAN_MOBS;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SKILL | Ability.DOMAIN_ACROBATIC;
}
@Override
public int usageType()
{
return USAGE_MOVEMENT;
}
@Override
public int minRange()
{
return 1;
}
@Override
public int maxRange()
{
return adjustedMaxInvokerRange(2);
}
protected int code = 0;
@Override
public int abilityCode()
{
return code;
}
@Override
public void setAbilityCode(final int c)
{
code = c;
}
public boolean done = false;
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
if((affected!=null)
&&(affected instanceof MOB)
&&(msg.amISource((MOB)affected))
&&(msg.targetMinor()==CMMsg.TYP_WEAPONATTACK))
done=true;
super.executeMsg(myHost,msg);
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
if(tickID==Tickable.TICKID_MOB)
{
if(done)
unInvoke();
}
return super.tick(ticking,tickID);
}
@Override
public void affectPhyStats(final Physical affected, final PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
final int xlvl=getXLEVELLevel(invoker());
affectableStats.setAttackAdjustment(affectableStats.attackAdjustment()+(2*(affected.phyStats().level()+xlvl)));
affectableStats.setDamage(affectableStats.damage()+(affected.phyStats().level())+abilityCode()+xlvl);
affectableStats.setArmor(affectableStats.armor()+(2*(xlvl+affected.phyStats().level())));
}
@Override
public int castingQuality(final MOB mob, final Physical target)
{
if((mob!=null)&&(target!=null))
{
if((mob.isInCombat())&&(mob.rangeToTarget()<=0))
return Ability.QUALITY_INDIFFERENT;
if((CMLib.flags().isSitting(mob))||(mob.riding()!=null))
return Ability.QUALITY_INDIFFERENT;
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final boolean notInCombat=!mob.isInCombat();
final MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null)
return false;
if((mob.isInCombat())
&&(mob.rangeToTarget()<=0))
{
mob.tell(L("You can not charge while in melee!"));
return false;
}
if((CMLib.flags().isSitting(mob))||(mob.riding()!=null))
{
mob.tell(L("You must be on your feet to charge!"));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
// now see if it worked
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,CMMsg.MASK_MALICIOUS|CMMsg.MSG_ADVANCE,L("^F^<FIGHT^><S-NAME> charge(s) at <T-NAMESELF>!^</FIGHT^>^?"));
CMLib.color().fixSourceFightColor(msg);
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(mob.getVictim()==target)
{
mob.setRangeToTarget(0);
target.setRangeToTarget(0);
beneficialAffect(mob,mob,asLevel,2);
mob.recoverPhyStats();
if(notInCombat)
{
done=true;
CMLib.combat().postAttack(mob,target,mob.fetchWieldedItem());
}
else
done=false;
if (mob.getVictim() == null)
mob.setVictim(null); // correct range
if (target.getVictim() == null)
target.setVictim(null); // correct range
}
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> attempt(s) to charge <T-NAME>, but then give(s) up."));
// return whether it worked
return success;
}
}
| |
/*
Copyright 2011 Selenium committers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.support.ui;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.openqa.selenium.TimeoutException;
import org.openqa.selenium.WebDriverException;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* An implementation of the {@link Wait} interface that may have its timeout and polling interval
* configured on the fly.
*
* <p>
* Each FluentWait instance defines the maximum amount of time to wait for a condition, as well as
* the frequency with which to check the condition. Furthermore, the user may configure the wait to
* ignore specific types of exceptions whilst waiting, such as
* {@link org.openqa.selenium.NoSuchElementException NoSuchElementExceptions} when searching for an
* element on the page.
*
* <p>
* Sample usage: <code><pre>
* // Waiting 30 seconds for an element to be present on the page, checking
* // for its presence once every 5 seconds.
* Wait<WebDriver> wait = new FluentWait<WebDriver>(driver)
* .withTimeout(30, SECONDS)
* .pollingEvery(5, SECONDS)
* .ignoring(NoSuchElementException.class);
*
* WebElement foo = wait.until(new Function<WebDriver, WebElement>() {
* public WebElement apply(WebDriver driver) {
* return driver.findElement(By.id("foo"));
* }
* });
* </pre></code>
*
* <p>
* <em>This class makes no thread safety guarantees.</em>
*
* @param <T> The input type for each condition used with this instance.
*/
public class FluentWait<T> implements Wait<T> {
public static Duration FIVE_HUNDRED_MILLIS = new Duration(500, MILLISECONDS);
private final T input;
private final Clock clock;
private final Sleeper sleeper;
private Duration timeout = FIVE_HUNDRED_MILLIS;
private Duration interval = FIVE_HUNDRED_MILLIS;
private String message = null;
private List<Class<? extends Throwable>> ignoredExceptions = Lists.newLinkedList();
/**
* @param input The input value to pass to the evaluated conditions.
*/
public FluentWait(T input) {
this(input, new SystemClock(), Sleeper.SYSTEM_SLEEPER);
}
/**
* @param input The input value to pass to the evaluated conditions.
* @param clock The clock to use when measuring the timeout.
* @param sleeper Used to put the thread to sleep between evaluation loops.
*/
public FluentWait(T input, Clock clock, Sleeper sleeper) {
this.input = checkNotNull(input);
this.clock = checkNotNull(clock);
this.sleeper = checkNotNull(sleeper);
}
/**
* Sets how long to wait for the evaluated condition to be true. The default timeout is
* {@link #FIVE_HUNDRED_MILLIS}.
*
* @param duration The timeout duration.
* @param unit The unit of time.
* @return A self reference.
*/
public FluentWait<T> withTimeout(long duration, TimeUnit unit) {
this.timeout = new Duration(duration, unit);
return this;
}
/**
* Sets the message to be displayed when time expires.
*
* @param message to be appended to default.
* @return A self reference.
*/
public FluentWait<T> withMessage(String message) {
this.message = message;
return this;
}
/**
* Sets how often the condition should be evaluated.
*
* <p>
* In reality, the interval may be greater as the cost of actually evaluating a condition function
* is not factored in. The default polling interval is {@link #FIVE_HUNDRED_MILLIS}.
*
* @param duration The timeout duration.
* @param unit The unit of time.
* @return A self reference.
*/
public FluentWait<T> pollingEvery(long duration, TimeUnit unit) {
this.interval = new Duration(duration, unit);
return this;
}
/**
* Configures this instance to ignore specific types of exceptions while waiting for a condition.
* Any exceptions not whitelisted will be allowed to propagate, terminating the wait.
*
* @param types The types of exceptions to ignore.
* @return A self reference.
*/
public FluentWait<T> ignoreAll(Collection<Class<? extends Throwable>> types) {
ignoredExceptions.addAll(types);
return this;
}
/**
* @see #ignoreAll(Collection)
*/
public FluentWait<T> ignoring(Class<? extends Throwable> exceptionType) {
return this.ignoreAll(ImmutableList.<Class<? extends Throwable>>of(exceptionType));
}
/**
* @see #ignoreAll(Collection)
*/
public FluentWait<T> ignoring(Class<? extends Throwable> firstType,
Class<? extends Throwable> secondType) {
return this.ignoreAll(ImmutableList.<Class<? extends Throwable>>of(firstType, secondType));
}
/**
* Repeatedly applies this instance's input value to the given predicate until the timeout expires
* or the predicate evaluates to true.
*
* @param isTrue The predicate to wait on.
* @throws TimeoutException If the timeout expires.
*/
public void until(final Predicate<T> isTrue) {
until(new Function<T, Boolean>() {
public Boolean apply(T input) {
return isTrue.apply(input);
}
});
}
/**
* Repeatedly applies this instance's input value to the given function until one of the following
* occurs:
* <ol>
* <li>the function returns neither null nor false,</li>
* <li>the function throws an unignored exception,</li>
* <li>the timeout expires,
* <li>
* <li>the current thread is interrupted</li>
* </ol>
*
* @param isTrue the parameter to pass to the {@link ExpectedCondition}
* @param <V> The function's expected return type.
* @return The functions' return value if the function returned something different
* from null or false before the timeout expired.
* @throws TimeoutException If the timeout expires.
*/
public <V> V until(Function<? super T, V> isTrue) {
long end = clock.laterBy(timeout.in(MILLISECONDS));
Throwable lastException = null;
while (true) {
try {
V value = isTrue.apply(input);
if (value != null && Boolean.class.equals(value.getClass())) {
if (Boolean.TRUE.equals(value)) {
return value;
}
} else if (value != null) {
return value;
}
} catch (Throwable e) {
lastException = propagateIfNotIngored(e);
}
// Check the timeout after evaluating the function to ensure conditions
// with a zero timeout can succeed.
if (!clock.isNowBefore(end)) {
String toAppend = message == null ?
" waiting for " + isTrue.toString() : ": " + message;
String timeoutMessage = String.format("Timed out after %d seconds%s",
timeout.in(SECONDS), toAppend);
throw timeoutException(timeoutMessage, lastException);
}
try {
sleeper.sleep(interval);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebDriverException(e);
}
}
}
private Throwable propagateIfNotIngored(Throwable e) {
for (Class<? extends Throwable> ignoredException : ignoredExceptions) {
if (ignoredException.isInstance(e)) {
return e;
}
}
throw Throwables.propagate(e);
}
/**
* Throws a timeout exception. This method may be overridden to throw an exception that is
* idiomatic for a particular test infrastructure, such as an AssertionError in JUnit4.
*
* @param message The timeout message.
* @param lastException The last exception to be thrown and subsequently suppressed while waiting
* on a function.
* @return Nothing will ever be returned; this return type is only specified as a convenience.
*/
protected RuntimeException timeoutException(String message, Throwable lastException) {
throw new TimeoutException(message, lastException);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.dialect;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.config.Lex;
import org.apache.calcite.config.NullCollation;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.sql.SqlAlienSystemTypeNameSpec;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIntervalLiteral;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlSetOperator;
import org.apache.calcite.sql.SqlSyntax;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.fun.SqlTrimFunction;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.BasicSqlType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeUtil;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import static java.util.Objects.requireNonNull;
/**
* A <code>SqlDialect</code> implementation for Google BigQuery's "Standard SQL"
* dialect.
*/
public class BigQuerySqlDialect extends SqlDialect {
public static final SqlDialect.Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(SqlDialect.DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("\\'")
.withIdentifierQuoteString("`")
.withNullCollation(NullCollation.LOW)
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new BigQuerySqlDialect(DEFAULT_CONTEXT);
private static final List<String> RESERVED_KEYWORDS =
ImmutableList.copyOf(
Arrays.asList("ALL", "AND", "ANY", "ARRAY", "AS", "ASC",
"ASSERT_ROWS_MODIFIED", "AT", "BETWEEN", "BY", "CASE", "CAST",
"COLLATE", "CONTAINS", "CREATE", "CROSS", "CUBE", "CURRENT",
"DEFAULT", "DEFINE", "DESC", "DISTINCT", "ELSE", "END", "ENUM",
"ESCAPE", "EXCEPT", "EXCLUDE", "EXISTS", "EXTRACT", "FALSE",
"FETCH", "FOLLOWING", "FOR", "FROM", "FULL", "GROUP", "GROUPING",
"GROUPS", "HASH", "HAVING", "IF", "IGNORE", "IN", "INNER",
"INTERSECT", "INTERVAL", "INTO", "IS", "JOIN", "LATERAL", "LEFT",
"LIKE", "LIMIT", "LOOKUP", "MERGE", "NATURAL", "NEW", "NO",
"NOT", "NULL", "NULLS", "OF", "ON", "OR", "ORDER", "OUTER",
"OVER", "PARTITION", "PRECEDING", "PROTO", "RANGE", "RECURSIVE",
"RESPECT", "RIGHT", "ROLLUP", "ROWS", "SELECT", "SET", "SOME",
"STRUCT", "TABLESAMPLE", "THEN", "TO", "TREAT", "TRUE",
"UNBOUNDED", "UNION", "UNNEST", "USING", "WHEN", "WHERE",
"WINDOW", "WITH", "WITHIN"));
/** An unquoted BigQuery identifier must start with a letter and be followed
* by zero or more letters, digits or _. */
private static final Pattern IDENTIFIER_REGEX =
Pattern.compile("[A-Za-z][A-Za-z0-9_]*");
/** Creates a BigQuerySqlDialect. */
public BigQuerySqlDialect(SqlDialect.Context context) {
super(context);
}
@Override protected boolean identifierNeedsQuote(String val) {
return !IDENTIFIER_REGEX.matcher(val).matches()
|| RESERVED_KEYWORDS.contains(val.toUpperCase(Locale.ROOT));
}
@Override public @Nullable SqlNode emulateNullDirection(SqlNode node,
boolean nullsFirst, boolean desc) {
return emulateNullDirectionWithIsNull(node, nullsFirst, desc);
}
@Override public boolean supportsImplicitTypeCoercion(RexCall call) {
return super.supportsImplicitTypeCoercion(call)
&& RexUtil.isLiteral(call.getOperands().get(0), false)
&& !SqlTypeUtil.isNumeric(call.type);
}
@Override public boolean supportsNestedAggregations() {
return false;
}
@Override public boolean supportsAggregateFunctionFilter() {
return false;
}
@Override public SqlParser.Config configureParser(
SqlParser.Config configBuilder) {
return super.configureParser(configBuilder)
.withCharLiteralStyles(Lex.BIG_QUERY.charLiteralStyles);
}
@Override public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset,
@Nullable SqlNode fetch) {
unparseFetchUsingLimit(writer, offset, fetch);
}
@Override public boolean supportsAliasedValues() {
return false;
}
@Override public void unparseCall(final SqlWriter writer, final SqlCall call, final int leftPrec,
final int rightPrec) {
switch (call.getKind()) {
case POSITION:
final SqlWriter.Frame frame = writer.startFunCall("STRPOS");
writer.sep(",");
call.operand(1).unparse(writer, leftPrec, rightPrec);
writer.sep(",");
call.operand(0).unparse(writer, leftPrec, rightPrec);
if (3 == call.operandCount()) {
throw new RuntimeException("3rd operand Not Supported for Function STRPOS in Big Query");
}
writer.endFunCall(frame);
break;
case UNION:
if (((SqlSetOperator) call.getOperator()).isAll()) {
super.unparseCall(writer, call, leftPrec, rightPrec);
} else {
SqlSyntax.BINARY.unparse(writer, UNION_DISTINCT, call, leftPrec,
rightPrec);
}
break;
case EXCEPT:
if (((SqlSetOperator) call.getOperator()).isAll()) {
throw new RuntimeException("BigQuery does not support EXCEPT ALL");
}
SqlSyntax.BINARY.unparse(writer, EXCEPT_DISTINCT, call, leftPrec,
rightPrec);
break;
case INTERSECT:
if (((SqlSetOperator) call.getOperator()).isAll()) {
throw new RuntimeException("BigQuery does not support INTERSECT ALL");
}
SqlSyntax.BINARY.unparse(writer, INTERSECT_DISTINCT, call, leftPrec,
rightPrec);
break;
case TRIM:
unparseTrim(writer, call, leftPrec, rightPrec);
break;
default:
super.unparseCall(writer, call, leftPrec, rightPrec);
}
}
/** BigQuery interval syntax: INTERVAL int64 time_unit. */
@Override public void unparseSqlIntervalLiteral(SqlWriter writer,
SqlIntervalLiteral literal, int leftPrec, int rightPrec) {
SqlIntervalLiteral.IntervalValue interval =
literal.getValueAs(SqlIntervalLiteral.IntervalValue.class);
writer.keyword("INTERVAL");
if (interval.getSign() == -1) {
writer.print("-");
}
try {
Long.parseLong(interval.getIntervalLiteral());
} catch (NumberFormatException e) {
throw new RuntimeException("Only INT64 is supported as the interval value for BigQuery.");
}
writer.literal(interval.getIntervalLiteral());
unparseSqlIntervalQualifier(writer, interval.getIntervalQualifier(),
RelDataTypeSystem.DEFAULT);
}
@Override public void unparseSqlIntervalQualifier(
SqlWriter writer, SqlIntervalQualifier qualifier, RelDataTypeSystem typeSystem) {
final String start = validate(qualifier.timeUnitRange.startUnit).name();
if (qualifier.timeUnitRange.endUnit == null) {
writer.keyword(start);
} else {
throw new RuntimeException("Range time unit is not supported for BigQuery.");
}
}
/**
* For usage of TRIM, LTRIM and RTRIM in BQ see
* <a href="https://cloud.google.com/bigquery/docs/reference/standard-sql/functions-and-operators#trim">
* BQ Trim Function</a>.
*/
private static void unparseTrim(SqlWriter writer, SqlCall call, int leftPrec,
int rightPrec) {
final String operatorName;
SqlLiteral trimFlag = call.operand(0);
SqlLiteral valueToTrim = call.operand(1);
switch (trimFlag.getValueAs(SqlTrimFunction.Flag.class)) {
case LEADING:
operatorName = "LTRIM";
break;
case TRAILING:
operatorName = "RTRIM";
break;
default:
operatorName = call.getOperator().getName();
break;
}
final SqlWriter.Frame trimFrame = writer.startFunCall(operatorName);
call.operand(2).unparse(writer, leftPrec, rightPrec);
// If the trimmed character is a non-space character, add it to the target SQL.
// eg: TRIM(BOTH 'A' from 'ABCD'
// Output Query: TRIM('ABC', 'A')
String value = requireNonNull(valueToTrim.toValue(), "valueToTrim.toValue()");
if (!value.matches("\\s+")) {
writer.literal(",");
call.operand(1).unparse(writer, leftPrec, rightPrec);
}
writer.endFunCall(trimFrame);
}
private static TimeUnit validate(TimeUnit timeUnit) {
switch (timeUnit) {
case MICROSECOND:
case MILLISECOND:
case SECOND:
case MINUTE:
case HOUR:
case DAY:
case WEEK:
case MONTH:
case QUARTER:
case YEAR:
case ISOYEAR:
return timeUnit;
default:
throw new RuntimeException("Time unit " + timeUnit + " is not supported for BigQuery.");
}
}
/** {@inheritDoc}
*
* <p>BigQuery data type reference:
* <a href="https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types">
* BigQuery Standard SQL Data Types</a>.
*/
@Override public @Nullable SqlNode getCastSpec(final RelDataType type) {
if (type instanceof BasicSqlType) {
final SqlTypeName typeName = type.getSqlTypeName();
switch (typeName) {
// BigQuery only supports INT64 for integer types.
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return createSqlDataTypeSpecByName("INT64", typeName);
// BigQuery only supports FLOAT64(aka. Double) for floating point types.
case FLOAT:
case DOUBLE:
return createSqlDataTypeSpecByName("FLOAT64", typeName);
case DECIMAL:
return createSqlDataTypeSpecByName("NUMERIC", typeName);
case BOOLEAN:
return createSqlDataTypeSpecByName("BOOL", typeName);
case CHAR:
case VARCHAR:
return createSqlDataTypeSpecByName("STRING", typeName);
case BINARY:
case VARBINARY:
return createSqlDataTypeSpecByName("BYTES", typeName);
case DATE:
return createSqlDataTypeSpecByName("DATE", typeName);
case TIME:
return createSqlDataTypeSpecByName("TIME", typeName);
case TIMESTAMP:
return createSqlDataTypeSpecByName("TIMESTAMP", typeName);
default:
break;
}
}
return super.getCastSpec(type);
}
private static SqlDataTypeSpec createSqlDataTypeSpecByName(String typeAlias,
SqlTypeName typeName) {
SqlAlienSystemTypeNameSpec typeNameSpec = new SqlAlienSystemTypeNameSpec(
typeAlias, typeName, SqlParserPos.ZERO);
return new SqlDataTypeSpec(typeNameSpec, SqlParserPos.ZERO);
}
/**
* List of BigQuery Specific Operators needed to form Syntactically Correct SQL.
*/
private static final SqlOperator UNION_DISTINCT = new SqlSetOperator(
"UNION DISTINCT", SqlKind.UNION, 14, false);
private static final SqlSetOperator EXCEPT_DISTINCT =
new SqlSetOperator("EXCEPT DISTINCT", SqlKind.EXCEPT, 14, false);
private static final SqlSetOperator INTERSECT_DISTINCT =
new SqlSetOperator("INTERSECT DISTINCT", SqlKind.INTERSECT, 18, false);
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
/**
* Tests class {@link org.apache.hadoop.hbase.client.HTableWrapper}
* by invoking its methods and briefly asserting the result is reasonable.
*/
@Category(MediumTests.class)
public class TestHTableWrapper {
private static final HBaseTestingUtility util = new HBaseTestingUtility();
private static final TableName TEST_TABLE = TableName.valueOf("test");
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] ROW_A = Bytes.toBytes("aaa");
private static final byte[] ROW_B = Bytes.toBytes("bbb");
private static final byte[] ROW_C = Bytes.toBytes("ccc");
private static final byte[] ROW_D = Bytes.toBytes("ddd");
private static final byte[] ROW_E = Bytes.toBytes("eee");
private static final byte[] qualifierCol1 = Bytes.toBytes("col1");
private static final byte[] bytes1 = Bytes.toBytes(1);
private static final byte[] bytes2 = Bytes.toBytes(2);
private static final byte[] bytes3 = Bytes.toBytes(3);
private static final byte[] bytes4 = Bytes.toBytes(4);
private static final byte[] bytes5 = Bytes.toBytes(5);
static class DummyRegionObserver extends BaseRegionObserver {
}
private HTableInterface hTableInterface;
private Table table;
@BeforeClass
public static void setupBeforeClass() throws Exception {
util.startMiniCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
@Before
public void before() throws Exception {
table = util.createTable(TEST_TABLE, TEST_FAMILY);
Put puta = new Put(ROW_A);
puta.add(TEST_FAMILY, qualifierCol1, bytes1);
table.put(puta);
Put putb = new Put(ROW_B);
putb.add(TEST_FAMILY, qualifierCol1, bytes2);
table.put(putb);
Put putc = new Put(ROW_C);
putc.add(TEST_FAMILY, qualifierCol1, bytes3);
table.put(putc);
}
@After
public void after() throws Exception {
try {
if (table != null) {
table.close();
}
} finally {
util.deleteTable(TEST_TABLE);
}
}
@Test
public void testHTableInterfaceMethods() throws Exception {
Configuration conf = util.getConfiguration();
MasterCoprocessorHost cpHost = util.getMiniHBaseCluster().getMaster().getMasterCoprocessorHost();
Class<?> implClazz = DummyRegionObserver.class;
cpHost.load(implClazz, Coprocessor.PRIORITY_HIGHEST, conf);
CoprocessorEnvironment env = cpHost.findCoprocessorEnvironment(implClazz.getName());
assertEquals(Coprocessor.VERSION, env.getVersion());
assertEquals(VersionInfo.getVersion(), env.getHBaseVersion());
hTableInterface = env.getTable(TEST_TABLE);
checkHTableInterfaceMethods();
cpHost.shutdown(env);
}
private void checkHTableInterfaceMethods() throws Exception {
checkConf();
checkNameAndDescriptor();
checkAutoFlush();
checkBufferSize();
checkExists();
checkGetRowOrBefore();
checkAppend();
checkPutsAndDeletes();
checkCheckAndPut();
checkCheckAndDelete();
checkIncrementColumnValue();
checkIncrement();
checkBatch();
checkCoprocessorService();
checkMutateRow();
checkResultScanner();
hTableInterface.flushCommits();
hTableInterface.close();
}
private void checkConf() {
Configuration confExpected = util.getConfiguration();
Configuration confActual = hTableInterface.getConfiguration();
assertTrue(confExpected == confActual);
}
private void checkNameAndDescriptor() throws IOException {
assertEquals(TEST_TABLE, hTableInterface.getName());
assertEquals(table.getTableDescriptor(), hTableInterface.getTableDescriptor());
}
private void checkAutoFlush() {
boolean initialAutoFlush = hTableInterface.isAutoFlush();
hTableInterface.setAutoFlush(false);
assertFalse(hTableInterface.isAutoFlush());
hTableInterface.setAutoFlush(true);
assertTrue(hTableInterface.isAutoFlush());
hTableInterface.setAutoFlush(initialAutoFlush);
}
private void checkBufferSize() throws IOException {
long initialWriteBufferSize = hTableInterface.getWriteBufferSize();
hTableInterface.setWriteBufferSize(12345L);
assertEquals(12345L, hTableInterface.getWriteBufferSize());
hTableInterface.setWriteBufferSize(initialWriteBufferSize);
}
private void checkExists() throws IOException {
boolean ex = hTableInterface.exists(new Get(ROW_A).addColumn(TEST_FAMILY, qualifierCol1));
assertTrue(ex);
Boolean[] exArray = hTableInterface.exists(Arrays.asList(new Get[] {
new Get(ROW_A).addColumn(TEST_FAMILY, qualifierCol1),
new Get(ROW_B).addColumn(TEST_FAMILY, qualifierCol1),
new Get(ROW_C).addColumn(TEST_FAMILY, qualifierCol1),
new Get(Bytes.toBytes("does not exist")).addColumn(TEST_FAMILY, qualifierCol1), }));
assertArrayEquals(new Boolean[] { Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, Boolean.FALSE },
exArray);
}
@SuppressWarnings("deprecation")
private void checkGetRowOrBefore() throws IOException {
Result rowOrBeforeResult = hTableInterface.getRowOrBefore(ROW_A, TEST_FAMILY);
assertArrayEquals(ROW_A, rowOrBeforeResult.getRow());
}
private void checkAppend() throws IOException {
final byte[] appendValue = Bytes.toBytes("append");
Append append = new Append(qualifierCol1).add(TEST_FAMILY, qualifierCol1, appendValue);
Result appendResult = hTableInterface.append(append);
byte[] appendedRow = appendResult.getRow();
checkRowValue(appendedRow, appendValue);
}
private void checkPutsAndDeletes() throws IOException {
// put:
Put putD = new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, bytes2);
hTableInterface.put(putD);
checkRowValue(ROW_D, bytes2);
// delete:
Delete delete = new Delete(ROW_D);
hTableInterface.delete(delete);
checkRowValue(ROW_D, null);
// multiple puts:
Put[] puts = new Put[] { new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, bytes2),
new Put(ROW_E).add(TEST_FAMILY, qualifierCol1, bytes3) };
hTableInterface.put(Arrays.asList(puts));
checkRowsValues(new byte[][] { ROW_D, ROW_E }, new byte[][] { bytes2, bytes3 });
// multiple deletes:
Delete[] deletes = new Delete[] { new Delete(ROW_D), new Delete(ROW_E) };
hTableInterface.delete(new ArrayList<Delete>(Arrays.asList(deletes)));
checkRowsValues(new byte[][] { ROW_D, ROW_E }, new byte[][] { null, null });
}
private void checkCheckAndPut() throws IOException {
Put putC = new Put(ROW_C).add(TEST_FAMILY, qualifierCol1, bytes5);
assertFalse(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, /* expect */bytes4,
putC/* newValue */));
assertTrue(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, /* expect */bytes3,
putC/* newValue */));
checkRowValue(ROW_C, bytes5);
}
private void checkCheckAndDelete() throws IOException {
Delete delete = new Delete(ROW_C);
assertFalse(hTableInterface.checkAndDelete(ROW_C, TEST_FAMILY, qualifierCol1, bytes4, delete));
assertTrue(hTableInterface.checkAndDelete(ROW_C, TEST_FAMILY, qualifierCol1, bytes5, delete));
checkRowValue(ROW_C, null);
}
private void checkIncrementColumnValue() throws IOException {
hTableInterface.put(new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, Bytes.toBytes(1L)));
checkRowValue(ROW_A, Bytes.toBytes(1L));
final long newVal = hTableInterface
.incrementColumnValue(ROW_A, TEST_FAMILY, qualifierCol1, 10L);
assertEquals(11L, newVal);
checkRowValue(ROW_A, Bytes.toBytes(11L));
final long newVal2 = hTableInterface.incrementColumnValue(ROW_A, TEST_FAMILY, qualifierCol1,
-10L, Durability.SYNC_WAL);
assertEquals(1L, newVal2);
checkRowValue(ROW_A, Bytes.toBytes(1L));
}
private void checkIncrement() throws IOException {
hTableInterface.increment(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, -5L));
checkRowValue(ROW_A, Bytes.toBytes(-4L));
}
private void checkBatch() throws IOException, InterruptedException {
Object[] results1 = hTableInterface.batch(Arrays.asList(new Row[] {
new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L),
new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L) }));
assertEquals(2, results1.length);
for (Object r2 : results1) {
assertTrue(r2 instanceof Result);
}
checkRowValue(ROW_A, Bytes.toBytes(0L));
Object[] results2 = new Result[2];
hTableInterface.batch(
Arrays.asList(new Row[] { new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L),
new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L) }), results2);
for (Object r2 : results2) {
assertTrue(r2 instanceof Result);
}
checkRowValue(ROW_A, Bytes.toBytes(4L));
// with callbacks:
final long[] updateCounter = new long[] { 0L };
Object[] results3 = hTableInterface.batchCallback(
Arrays.asList(new Row[] { new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L),
new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L) }),
new Batch.Callback<Result>() {
@Override
public void update(byte[] region, byte[] row, Result result) {
updateCounter[0]++;
}
});
assertEquals(2, updateCounter[0]);
assertEquals(2, results3.length);
for (Object r3 : results3) {
assertTrue(r3 instanceof Result);
}
checkRowValue(ROW_A, Bytes.toBytes(8L));
Object[] results4 = new Result[2];
updateCounter[0] = 0L;
hTableInterface.batchCallback(
Arrays.asList(new Row[] { new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L),
new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L) }), results4,
new Batch.Callback<Result>() {
@Override
public void update(byte[] region, byte[] row, Result result) {
updateCounter[0]++;
}
});
assertEquals(2, updateCounter[0]);
for (Object r2 : results4) {
assertTrue(r2 instanceof Result);
}
checkRowValue(ROW_A, Bytes.toBytes(12L));
}
private void checkCoprocessorService() {
CoprocessorRpcChannel crc = hTableInterface.coprocessorService(ROW_A);
assertNotNull(crc);
}
private void checkMutateRow() throws IOException {
Put put = new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, bytes1);
RowMutations rowMutations = new RowMutations(ROW_A);
rowMutations.add(put);
hTableInterface.mutateRow(rowMutations);
checkRowValue(ROW_A, bytes1);
}
private void checkResultScanner() throws IOException {
ResultScanner resultScanner = hTableInterface.getScanner(TEST_FAMILY);
Result[] results = resultScanner.next(10);
assertEquals(3, results.length);
resultScanner = hTableInterface.getScanner(TEST_FAMILY, qualifierCol1);
results = resultScanner.next(10);
assertEquals(3, results.length);
resultScanner = hTableInterface.getScanner(new Scan(ROW_A, ROW_C));
results = resultScanner.next(10);
assertEquals(2, results.length);
}
private void checkRowValue(byte[] row, byte[] expectedValue) throws IOException {
Get get = new Get(row).addColumn(TEST_FAMILY, qualifierCol1);
Result result = hTableInterface.get(get);
byte[] actualValue = result.getValue(TEST_FAMILY, qualifierCol1);
assertArrayEquals(expectedValue, actualValue);
}
private void checkRowsValues(byte[][] rows, byte[][] expectedValues) throws IOException {
if (rows.length != expectedValues.length) {
throw new IllegalArgumentException();
}
Get[] gets = new Get[rows.length];
for (int i = 0; i < gets.length; i++) {
gets[i] = new Get(rows[i]).addColumn(TEST_FAMILY, qualifierCol1);
}
Result[] results = hTableInterface.get(Arrays.asList(gets));
for (int i = 0; i < expectedValues.length; i++) {
byte[] actualValue = results[i].getValue(TEST_FAMILY, qualifierCol1);
assertArrayEquals(expectedValues[i], actualValue);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.apache.kafka.connect.data;
import org.apache.kafka.connect.errors.DataException;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
public class StructTest {
private static final Schema FLAT_STRUCT_SCHEMA = SchemaBuilder.struct()
.field("int8", Schema.INT8_SCHEMA)
.field("int16", Schema.INT16_SCHEMA)
.field("int32", Schema.INT32_SCHEMA)
.field("int64", Schema.INT64_SCHEMA)
.field("float32", Schema.FLOAT32_SCHEMA)
.field("float64", Schema.FLOAT64_SCHEMA)
.field("boolean", Schema.BOOLEAN_SCHEMA)
.field("string", Schema.STRING_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.build();
private static final Schema ARRAY_SCHEMA = SchemaBuilder.array(Schema.INT8_SCHEMA).build();
private static final Schema MAP_SCHEMA = SchemaBuilder.map(
Schema.INT32_SCHEMA,
Schema.STRING_SCHEMA
).build();
private static final Schema NESTED_CHILD_SCHEMA = SchemaBuilder.struct()
.field("int8", Schema.INT8_SCHEMA)
.build();
private static final Schema NESTED_SCHEMA = SchemaBuilder.struct()
.field("array", ARRAY_SCHEMA)
.field("map", MAP_SCHEMA)
.field("nested", NESTED_CHILD_SCHEMA)
.build();
private static final Schema REQUIRED_FIELD_SCHEMA = Schema.INT8_SCHEMA;
private static final Schema OPTIONAL_FIELD_SCHEMA = SchemaBuilder.int8().optional().build();
private static final Schema DEFAULT_FIELD_SCHEMA = SchemaBuilder.int8().defaultValue((byte) 0).build();
@Test
public void testFlatStruct() {
Struct struct = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "foobar")
.put("bytes", "foobar".getBytes());
// Test equality, and also the type-specific getters
assertEquals((byte) 12, (byte) struct.getInt8("int8"));
assertEquals((short) 12, (short) struct.getInt16("int16"));
assertEquals(12, (int) struct.getInt32("int32"));
assertEquals((long) 12, (long) struct.getInt64("int64"));
assertEquals((Float) 12.f, struct.getFloat32("float32"));
assertEquals((Double) 12., struct.getFloat64("float64"));
assertEquals(true, struct.getBoolean("boolean"));
assertEquals("foobar", struct.getString("string"));
assertEquals(ByteBuffer.wrap("foobar".getBytes()), ByteBuffer.wrap(struct.getBytes("bytes")));
struct.validate();
}
@Test
public void testComplexStruct() {
List<Byte> array = Arrays.asList((byte) 1, (byte) 2);
Map<Integer, String> map = Collections.singletonMap(1, "string");
Struct struct = new Struct(NESTED_SCHEMA)
.put("array", array)
.put("map", map)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
// Separate the call to get the array and map to validate the typed get methods work properly
List<Byte> arrayExtracted = struct.getArray("array");
assertEquals(array, arrayExtracted);
Map<Byte, Byte> mapExtracted = struct.getMap("map");
assertEquals(map, mapExtracted);
assertEquals((byte) 12, struct.getStruct("nested").get("int8"));
struct.validate();
}
// These don't test all the ways validation can fail, just one for each element. See more extensive validation
// tests in SchemaTest. These are meant to ensure that we are invoking the same code path and that we do deeper
// inspection than just checking the class of the object
@Test(expected = DataException.class)
public void testInvalidFieldType() {
new Struct(FLAT_STRUCT_SCHEMA).put("int8", "should fail because this is a string, not int8");
}
@Test(expected = DataException.class)
public void testInvalidArrayFieldElements() {
new Struct(NESTED_SCHEMA).put("array", Arrays.asList("should fail since elements should be int8s"));
}
@Test(expected = DataException.class)
public void testInvalidMapKeyElements() {
new Struct(NESTED_SCHEMA).put("map", Collections.singletonMap("should fail because keys should be int8s", (byte) 12));
}
@Test(expected = DataException.class)
public void testInvalidStructFieldSchema() {
new Struct(NESTED_SCHEMA).put("nested", new Struct(MAP_SCHEMA));
}
@Test(expected = DataException.class)
public void testInvalidStructFieldValue() {
new Struct(NESTED_SCHEMA).put("nested", new Struct(NESTED_CHILD_SCHEMA));
}
@Test(expected = DataException.class)
public void testMissingFieldValidation() {
// Required int8 field
Schema schema = SchemaBuilder.struct().field("field", REQUIRED_FIELD_SCHEMA).build();
Struct struct = new Struct(schema);
struct.validate();
}
@Test
public void testMissingOptionalFieldValidation() {
Schema schema = SchemaBuilder.struct().field("field", OPTIONAL_FIELD_SCHEMA).build();
Struct struct = new Struct(schema);
struct.validate();
}
@Test
public void testMissingFieldWithDefaultValidation() {
Schema schema = SchemaBuilder.struct().field("field", DEFAULT_FIELD_SCHEMA).build();
Struct struct = new Struct(schema);
struct.validate();
}
@Test
public void testEquals() {
Struct struct1 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "foobar")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
Struct struct2 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "foobar")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
Struct struct3 = new Struct(FLAT_STRUCT_SCHEMA)
.put("int8", (byte) 12)
.put("int16", (short) 12)
.put("int32", 12)
.put("int64", (long) 12)
.put("float32", 12.f)
.put("float64", 12.)
.put("boolean", true)
.put("string", "mismatching string")
.put("bytes", ByteBuffer.wrap("foobar".getBytes()));
assertEquals(struct1, struct2);
assertNotEquals(struct1, struct3);
List<Byte> array = Arrays.asList((byte) 1, (byte) 2);
Map<Integer, String> map = Collections.singletonMap(1, "string");
struct1 = new Struct(NESTED_SCHEMA)
.put("array", array)
.put("map", map)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array2 = Arrays.asList((byte) 1, (byte) 2);
Map<Integer, String> map2 = Collections.singletonMap(1, "string");
struct2 = new Struct(NESTED_SCHEMA)
.put("array", array2)
.put("map", map2)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array3 = Arrays.asList((byte) 1, (byte) 2, (byte) 3);
Map<Integer, String> map3 = Collections.singletonMap(2, "string");
struct3 = new Struct(NESTED_SCHEMA)
.put("array", array3)
.put("map", map3)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 13));
assertEquals(struct1, struct2);
assertNotEquals(struct1, struct3);
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.query;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.timeboundary.TimeBoundaryResultValue;
import io.druid.query.timeseries.TimeseriesResultValue;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Map;
public class TimewarpOperatorTest
{
public static final ImmutableMap<String, Object> CONTEXT = ImmutableMap.of();
TimewarpOperator<Result<TimeseriesResultValue>> testOperator = new TimewarpOperator<>(
new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")),
new Period("P1W"),
new DateTime("2014-01-06") // align on Monday
);
@Test
public void testComputeOffset() throws Exception
{
{
final DateTime t = new DateTime("2014-01-23");
final DateTime tOffset = new DateTime("2014-01-09");
Assert.assertEquals(
new DateTime(tOffset),
t.plus(testOperator.computeOffset(t.getMillis()))
);
}
{
final DateTime t = new DateTime("2014-08-02");
final DateTime tOffset = new DateTime("2014-01-11");
Assert.assertEquals(
new DateTime(tOffset),
t.plus(testOperator.computeOffset(t.getMillis()))
);
}
}
@Test
public void testPostProcess() throws Exception
{
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(
new QueryRunner<Result<TimeseriesResultValue>>()
{
@Override
public Sequence<Result<TimeseriesResultValue>> run(
Query<Result<TimeseriesResultValue>> query,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
new DateTime(new DateTime("2014-01-09")),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))
),
new Result<>(
new DateTime(new DateTime("2014-01-11")),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))
),
new Result<>(
query.getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5))
)
)
);
}
},
new DateTime("2014-08-02").getMillis()
);
final Query<Result<TimeseriesResultValue>> query =
Druids.newTimeseriesQueryBuilder()
.dataSource("dummy")
.intervals("2014-07-31/2014-08-05")
.aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count")))
.build();
Assert.assertEquals(
Lists.newArrayList(
new Result<>(
new DateTime("2014-07-31"),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))
),
new Result<>(
new DateTime("2014-08-02"),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))
),
new Result<>(
new DateTime("2014-08-02"),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 5))
)
),
Sequences.toList(queryRunner.run(query, CONTEXT), Lists.<Result<TimeseriesResultValue>>newArrayList())
);
TimewarpOperator<Result<TimeBoundaryResultValue>> timeBoundaryOperator = new TimewarpOperator<>(
new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")),
new Period("P1W"),
new DateTime("2014-01-06") // align on Monday
);
QueryRunner<Result<TimeBoundaryResultValue>> timeBoundaryRunner = timeBoundaryOperator.postProcess(
new QueryRunner<Result<TimeBoundaryResultValue>>()
{
@Override
public Sequence<Result<TimeBoundaryResultValue>> run(
Query<Result<TimeBoundaryResultValue>> query,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
new DateTime("2014-01-12"),
new TimeBoundaryResultValue(
ImmutableMap.<String, Object>of(
"maxTime",
new DateTime("2014-01-12")
)
)
)
)
);
}
},
new DateTime("2014-08-02").getMillis()
);
final Query<Result<TimeBoundaryResultValue>> timeBoundaryQuery =
Druids.newTimeBoundaryQueryBuilder()
.dataSource("dummy")
.build();
Assert.assertEquals(
Lists.newArrayList(
new Result<>(
new DateTime("2014-08-02"),
new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", new DateTime("2014-08-02")))
)
),
Sequences.toList(
timeBoundaryRunner.run(timeBoundaryQuery, CONTEXT),
Lists.<Result<TimeBoundaryResultValue>>newArrayList()
)
);
}
@Test
public void testEmptyFutureInterval() throws Exception
{
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(
new QueryRunner<Result<TimeseriesResultValue>>()
{
@Override
public Sequence<Result<TimeseriesResultValue>> run(
Query<Result<TimeseriesResultValue>> query,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
query.getIntervals().get(0).getStart(),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))
),
new Result<>(
query.getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))
)
)
);
}
},
new DateTime("2014-08-02").getMillis()
);
final Query<Result<TimeseriesResultValue>> query =
Druids.newTimeseriesQueryBuilder()
.dataSource("dummy")
.intervals("2014-08-06/2014-08-08")
.aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("count")))
.build();
Assert.assertEquals(
Lists.newArrayList(
new Result<>(
new DateTime("2014-08-02"),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 2))
),
new Result<>(
new DateTime("2014-08-02"),
new TimeseriesResultValue(ImmutableMap.<String, Object>of("metric", 3))
)
),
Sequences.toList(queryRunner.run(query, Maps.<String, Object>newHashMap()), Lists.<Result<TimeseriesResultValue>>newArrayList())
);
}
}
| |
package geometry;
import java.util.*;
public class Mesh {
public static final Mesh CUBE = getCube();
public static final Mesh HEART = getHeart();
public static final Mesh TETRAHEDRON = getTetrahedron();
public static final Mesh TORUS = getTorus();
public static final Mesh TRIANGLE_2D = get2DTriangleWith1Face();
public static final Mesh COMPLEX_TRIANGLE_2D = get2DTriangleWith4Faces();
private Map<Integer, Point3D> points;
private List<Face> faces;
public Mesh() {
points = new HashMap<Integer, Point3D>();
faces = new ArrayList<Face>();
}
public List<Face> getFaces() {
return faces;
}
public void addFace(Face face) {
for (Integer point: face.points) {
if (! points.containsKey(point)) {
System.out.println("No point with id: '" + point + "' in mesh.");
return;
}
}
faces.add(face);
}
public Point3D getPoint(int id) {
return points.get(id);
}
public Vector3D getVector(int id) {
return points.get(id).getVector();
}
public List<Point3D> getPoints(Integer[] ids) {
List<Point3D> outPoints = new ArrayList<Point3D>(ids.length);
for (int id : ids) {
outPoints.add(points.get(id));
}
return outPoints;
}
public Set<Integer> getAllPointsIDs() {
return points.keySet();
}
public List<Vector3D> getVectors(Integer ... ids) {
List<Vector3D> outVectors = new ArrayList<Vector3D>(ids.length);
for (int id : ids) {
outVectors.add(points.get(id).getVector());
}
return outVectors;
}
public List<Vector3D> getVectors(List<Integer> ids) {
return getVectors(ids.toArray(new Integer[ids.size()]));
}
public int getPointsSize() {
return points.size();
}
public void addPoint(Point3D point) {
if (point.getId() < 0) {
System.out.println("Point ID cannot be < 0. Found: " + point.getId());
return;
}
if (points.containsKey(point.getId())) {
System.out.println("Mesh already contains point with id: '" + point + "'.");
return;
}
points.put(point.getId(), point);
}
private static Mesh getCube() {
double[] center = {0.0, 0.0, 0.0};
double scale = 1.0;
Mesh cube = new Mesh();
cube.addPoint(new Point3D(1, -0.5 * scale + center[0], -0.5 * scale + center[1], -0.5 * scale + center[2]));
cube.addPoint(new Point3D(2, 0.5 * scale + center[0], -0.5 * scale + center[1], -0.5 * scale + center[2]));
cube.addPoint(new Point3D(3, 0.5 * scale + center[0], 0.5 * scale + center[1], -0.5 * scale + center[2]));
cube.addPoint(new Point3D(4, -0.5 * scale + center[0], 0.5 * scale + center[1], -0.5 * scale + center[2]));
cube.addPoint(new Point3D(5, -0.5 * scale + center[0], -0.5 * scale + center[1], 0.5 * scale + center[2]));
cube.addPoint(new Point3D(6, 0.5 * scale + center[0], -0.5 * scale + center[1], 0.5 * scale + center[2]));
cube.addPoint(new Point3D(7, 0.5 * scale + center[0], 0.5 * scale + center[1], 0.5 * scale + center[2]));
cube.addPoint(new Point3D(8, -0.5 * scale + center[0], 0.5 * scale + center[1], 0.5 * scale + center[2]));
cube.addFace(new Face(cube, 4,3,2,1));
cube.addFace(new Face(cube, 1,2,6,5));
cube.addFace(new Face(cube, 2,3,7,6));
cube.addFace(new Face(cube, 3,4,8,7));
cube.addFace(new Face(cube, 4,1,5,8));
cube.addFace(new Face(cube, 5,6,7,8));
return cube;
}
private static Mesh get2DTriangleWith1Face() {
double[] center = {0.0, 0.0, 0.0};
double scale = 1.0;
Mesh triangle = new Mesh();
triangle.addPoint(new Point3D(1, -0.5 * scale + center[0], -0.289 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(2, 0.0 * scale + center[0], 0.577 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(3, 0.5 * scale + center[0], -0.289 * scale + center[1], 0.0 * scale + center[2]));
triangle.addFace(new Face(triangle, 3, 2, 1));
return triangle;
}
private static Mesh get2DTriangleWith4Faces() {
double[] center = {0.0, 0.0, 0.0};
double scale = 1.0;
Mesh triangle = new Mesh();
triangle.addPoint(new Point3D(1, -0.5 * scale + center[0], -0.289 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(2, -0.25 * scale + center[0], 0.144 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(3, 0.0 * scale + center[0], 0.577 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(4, 0.25 * scale + center[0], 0.144 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(5, 0.5 * scale + center[0], -0.289 * scale + center[1], 0.0 * scale + center[2]));
triangle.addPoint(new Point3D(6, 0.0 * scale + center[0], -0.289 * scale + center[1], 0.0 * scale + center[2]));
triangle.addFace(new Face(triangle, 6, 2, 1));
triangle.addFace(new Face(triangle, 6, 4, 2));
triangle.addFace(new Face(triangle, 6, 5, 4));
triangle.addFace(new Face(triangle, 4, 3, 2));
return triangle;
}
private static Mesh getTetrahedron() {
double[] center = {0.0, 0.0, 0.0};
double scale = 1.0;
Mesh tetrahedron = new Mesh();
tetrahedron.addPoint(new Point3D(1, 0.577 * scale + center[0], 0.0 * scale + center[1], -0.204 * scale + center[2]));
tetrahedron.addPoint(new Point3D(2, -0.289 * scale + center[0], 0.5 * scale + center[1], -0.204 * scale + center[2]));
tetrahedron.addPoint(new Point3D(3, -0.289 * scale + center[0], -0.5 * scale + center[1], -0.204 * scale + center[2]));
tetrahedron.addPoint(new Point3D(4, 0.0 * scale + center[0], 0.0 * scale + center[1], 0.612 * scale + center[2]));
tetrahedron.addFace(new Face(tetrahedron, 3, 2, 1));
tetrahedron.addFace(new Face(tetrahedron, 2, 4, 1));
tetrahedron.addFace(new Face(tetrahedron, 3, 4, 2));
tetrahedron.addFace(new Face(tetrahedron, 1, 4, 3));
return tetrahedron;
}
private static Mesh getHeart() {
Mesh heart = new Mesh();
double[] center = {-0.5, -0.5, -0.125};
double scale = 0.25;
heart.addPoint(new Point3D(1, 0.0 * scale + center[0], 4.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(2, 1.0 * scale + center[0], 4.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(3, 2.0 * scale + center[0], 3.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(4, 3.0 * scale + center[0], 4.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(5, 4.0 * scale + center[0], 4.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(6, 4.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(7, 2.0 * scale + center[0], 0.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(8, 0.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
heart.addPoint(new Point3D(9, 0.0 * scale + center[0], 4.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(10, 1.0 * scale + center[0], 4.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(11, 2.0 * scale + center[0], 3.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(12, 3.0 * scale + center[0], 4.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(13, 4.0 * scale + center[0], 4.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(14, 4.0 * scale + center[0], 2.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(15, 2.0 * scale + center[0], 0.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addPoint(new Point3D(16, 0.0 * scale + center[0], 2.0 * scale + center[1], 1.0 * scale + center[2]));
heart.addFace(new Face(heart, 1, 2, 3, 7, 8));
heart.addFace(new Face(heart, 3, 4, 5, 6, 7));
heart.addFace(new Face(heart, 16, 15, 11, 10, 9));
heart.addFace(new Face(heart, 15, 14, 13, 12, 11));
heart.addFace(new Face(heart, 10, 2, 1, 9));
heart.addFace(new Face(heart, 11, 3, 2, 10));
heart.addFace(new Face(heart, 12, 4, 3, 11));
heart.addFace(new Face(heart, 13, 5, 4, 12));
heart.addFace(new Face(heart, 14, 6, 5, 13));
heart.addFace(new Face(heart, 15, 7, 6, 14));
heart.addFace(new Face(heart, 16, 8, 7, 15));
heart.addFace(new Face(heart, 9, 1, 8, 16));
return heart;
}
private static Mesh getTorus() {
Mesh torus = new Mesh();
double[] center = {-0.5, -0.5, 0.16};
double scale = 0.33;
torus.addPoint(new Point3D(2, 1.0 * scale + center[0], 0.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(3, 2.0 * scale + center[0], 0.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(5, 0.0 * scale + center[0], 1.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(6, 1.0 * scale + center[0], 1.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(7, 2.0 * scale + center[0], 1.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(8, 3.0 * scale + center[0], 1.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(9, 0.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(10, 1.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(11, 2.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(12, 3.0 * scale + center[0], 2.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(14, 1.0 * scale + center[0], 3.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(15, 2.0 * scale + center[0], 3.0 * scale + center[1], 0.0 * scale + center[2]));
torus.addPoint(new Point3D(18, 1.0 * scale + center[0], 0.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(19, 2.0 * scale + center[0], 0.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(21, 0.0 * scale + center[0], 1.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(22, 1.0 * scale + center[0], 1.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(23, 2.0 * scale + center[0], 1.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(24, 3.0 * scale + center[0], 1.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(25, 0.0 * scale + center[0], 2.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(26, 1.0 * scale + center[0], 2.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(27, 2.0 * scale + center[0], 2.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(28, 3.0 * scale + center[0], 2.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(30, 1.0 * scale + center[0], 3.0 * scale + center[1], -1.0 * scale + center[2]));
torus.addPoint(new Point3D(31, 2.0 * scale + center[0], 3.0 * scale + center[1], -1.0 * scale + center[2]));
//FACE
torus.addFace(new Face(torus, 2, 6, 5));
torus.addFace(new Face(torus, 2, 3, 7, 6));
torus.addFace(new Face(torus, 3, 8, 7));
torus.addFace(new Face(torus, 5, 6, 10, 9));
torus.addFace(new Face(torus, 7, 8, 12, 11));
torus.addFace(new Face(torus, 9, 10, 14));
torus.addFace(new Face(torus, 10, 11, 15, 14));
torus.addFace(new Face(torus, 11, 12, 15));
//BACK
torus.addFace(new Face(torus, 21, 22, 18));
torus.addFace(new Face(torus, 22, 23, 19, 18));
torus.addFace(new Face(torus, 23, 24, 19));
torus.addFace(new Face(torus, 25, 26, 22, 21));
torus.addFace(new Face(torus, 27, 28, 24, 23));
torus.addFace(new Face(torus, 30, 26, 25));
torus.addFace(new Face(torus, 30, 31, 27, 26));
torus.addFace(new Face(torus, 31, 28, 27));
//OUT
torus.addFace(new Face(torus, 2, 5, 21, 18));
torus.addFace(new Face(torus, 18, 19, 3, 2));
torus.addFace(new Face(torus, 19, 24, 8, 3));
torus.addFace(new Face(torus, 24, 28, 12, 8));
torus.addFace(new Face(torus, 28, 31, 15, 12));
torus.addFace(new Face(torus, 14, 15, 31, 30));
torus.addFace(new Face(torus, 9, 14, 30, 25));
torus.addFace(new Face(torus, 5, 9, 25, 21));
//IN
torus.addFace(new Face(torus, 6, 7, 23, 22));
torus.addFace(new Face(torus, 7, 11, 27, 23));
torus.addFace(new Face(torus, 26, 27, 11, 10));
torus.addFace(new Face(torus, 22, 26, 10, 6));
return torus;
}
}
| |
/*
* Copyright 2011 Stefan C. Mueller.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smurn.jply.util;
import org.smurn.jply.ListProperty;
import java.io.IOException;
import org.smurn.jply.Element;
import org.junit.Test;
import org.smurn.jply.DataType;
import org.smurn.jply.ElementReader;
import org.smurn.jply.ElementType;
import org.smurn.jply.Property;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
/**
* Unit tests for {@link NormalGenerator}.
*/
public class NormalGeneratorTest {
@Test
public void singleFace() throws IOException {
ElementType vertexType = new ElementType(
"vertex",
new Property("x", DataType.DOUBLE),
new Property("y", DataType.DOUBLE),
new Property("z", DataType.DOUBLE),
new Property("nx", DataType.DOUBLE),
new Property("ny", DataType.DOUBLE),
new Property("nz", DataType.DOUBLE));
Element vertex0 = new Element(vertexType);
vertex0.setDouble("x", 0);
vertex0.setDouble("y", 0);
vertex0.setDouble("z", 0);
Element vertex1 = new Element(vertexType);
vertex1.setDouble("x", 1);
vertex1.setDouble("y", 0);
vertex1.setDouble("z", 0);
Element vertex2 = new Element(vertexType);
vertex2.setDouble("x", 1);
vertex2.setDouble("y", 1);
vertex2.setDouble("z", 0);
Element expected0 = new Element(vertexType);
expected0.setDouble("x", 0);
expected0.setDouble("y", 0);
expected0.setDouble("z", 0);
expected0.setDouble("nx", 0);
expected0.setDouble("ny", 0);
expected0.setDouble("nz", 1);
Element expected1 = new Element(vertexType);
expected1.setDouble("x", 1);
expected1.setDouble("y", 0);
expected1.setDouble("z", 0);
expected1.setDouble("nx", 0);
expected1.setDouble("ny", 0);
expected1.setDouble("nz", 1);
Element expected2 = new Element(vertexType);
expected2.setDouble("x", 1);
expected2.setDouble("y", 1);
expected2.setDouble("z", 0);
expected2.setDouble("nx", 0);
expected2.setDouble("ny", 0);
expected2.setDouble("nz", 1);
ElementType faceType = new ElementType(
"face",
new ListProperty(DataType.UCHAR, "vertex_index", DataType.INT));
Element face0 = new Element(faceType);
face0.setIntList("vertex_index", new int[]{0, 1, 2});
ElementReader vertexReaderMock = mock(ElementReader.class);
when(vertexReaderMock.getElementType()).thenReturn(vertexType);
when(vertexReaderMock.readElement()).
thenReturn(vertex0).
thenReturn(vertex1).
thenReturn(vertex2).
thenReturn(null);
BufferedElementReader vertexReader =
new BufferedElementReader(vertexReaderMock);
ElementReader faceReader = mock(ElementReader.class);
when(faceReader.getElementType()).thenReturn(faceType);
when(faceReader.readElement()).
thenReturn(face0).
thenReturn(null);
NormalGenerator target = new NormalGenerator();
target.generateNormals(vertexReader, faceReader);
vertexReader.reset();
assertEquals(expected0, vertexReader.readElement());
assertEquals(expected1, vertexReader.readElement());
assertEquals(expected2, vertexReader.readElement());
}
@Test
public void twoFace() throws IOException {
ElementType vertexType = new ElementType(
"vertex",
new Property("x", DataType.DOUBLE),
new Property("y", DataType.DOUBLE),
new Property("z", DataType.DOUBLE),
new Property("nx", DataType.DOUBLE),
new Property("ny", DataType.DOUBLE),
new Property("nz", DataType.DOUBLE));
Element vertex0 = new Element(vertexType);
vertex0.setDouble("x", 0);
vertex0.setDouble("y", 0);
vertex0.setDouble("z", 0);
Element vertex1 = new Element(vertexType);
vertex1.setDouble("x", 1);
vertex1.setDouble("y", 0);
vertex1.setDouble("z", 0);
Element vertex2 = new Element(vertexType);
vertex2.setDouble("x", 1);
vertex2.setDouble("y", 1);
vertex2.setDouble("z", 0);
Element vertex3 = new Element(vertexType);
vertex3.setDouble("x", 0.5);
vertex3.setDouble("y", 0.5);
vertex3.setDouble("z", Math.sqrt(2.0) / 2.0);
Element expected0 = new Element(vertexType);
expected0.setDouble("x", 0);
expected0.setDouble("y", 0);
expected0.setDouble("z", 0);
expected0.setDouble("nx", 0.5);
expected0.setDouble("ny", -0.5);
expected0.setDouble("nz", Math.sqrt(0.5));
Element expected1 = new Element(vertexType);
expected1.setDouble("x", 1);
expected1.setDouble("y", 0);
expected1.setDouble("z", 0);
expected1.setDouble("nx", 0);
expected1.setDouble("ny", 0);
expected1.setDouble("nz", 1);
Element expected2 = new Element(vertexType);
expected2.setDouble("x", 1);
expected2.setDouble("y", 1);
expected2.setDouble("z", 0);
expected2.setDouble("nx", 0.5);
expected2.setDouble("ny", -0.5);
expected2.setDouble("nz", Math.sqrt(0.5));
Element expected3 = new Element(vertexType);
expected3.setDouble("x", 0.5);
expected3.setDouble("y", 0.5);
expected3.setDouble("z", Math.sqrt(2.0) / 2.0);
expected3.setDouble("nx", 1.0 / Math.sqrt(2));
expected3.setDouble("ny", -1.0 / Math.sqrt(2));
expected3.setDouble("nz", 0);
ElementType faceType = new ElementType(
"face",
new ListProperty(DataType.UCHAR, "vertex_index", DataType.INT));
Element face0 = new Element(faceType);
face0.setIntList("vertex_index", new int[]{0, 1, 2});
Element face1 = new Element(faceType);
face1.setIntList("vertex_index", new int[]{0, 2, 3});
ElementReader vertexReaderMock = mock(ElementReader.class);
when(vertexReaderMock.getElementType()).thenReturn(vertexType);
when(vertexReaderMock.readElement()).
thenReturn(vertex0).
thenReturn(vertex1).
thenReturn(vertex2).
thenReturn(vertex3).
thenReturn(null);
BufferedElementReader vertexReader =
new BufferedElementReader(vertexReaderMock);
ElementReader faceReader = mock(ElementReader.class);
when(faceReader.getElementType()).thenReturn(faceType);
when(faceReader.readElement()).
thenReturn(face0).
thenReturn(face1).
thenReturn(null);
NormalGenerator target = new NormalGenerator();
target.generateNormals(vertexReader, faceReader);
vertexReader.reset();
assertTrue(expected0.equals(vertexReader.readElement(), 1E-6));
assertTrue(expected1.equals(vertexReader.readElement(), 1E-6));
assertTrue(expected2.equals(vertexReader.readElement(), 1E-6));
}
@Test
public void wideAngle() throws IOException {
ElementType vertexType = new ElementType(
"vertex",
new Property("x", DataType.DOUBLE),
new Property("y", DataType.DOUBLE),
new Property("z", DataType.DOUBLE),
new Property("nx", DataType.DOUBLE),
new Property("ny", DataType.DOUBLE),
new Property("nz", DataType.DOUBLE));
Element vertex0 = new Element(vertexType);
vertex0.setDouble("x", 0);
vertex0.setDouble("y", 0);
vertex0.setDouble("z", 0);
Element vertex1 = new Element(vertexType);
vertex1.setDouble("x", 1);
vertex1.setDouble("y", 0);
vertex1.setDouble("z", 0);
Element vertex2 = new Element(vertexType);
vertex2.setDouble("x", -1);
vertex2.setDouble("y", 1);
vertex2.setDouble("z", 0);
Element vertex3 = new Element(vertexType);
vertex3.setDouble("x", 0);
vertex3.setDouble("y", 0);
vertex3.setDouble("z", 1);
Element expected0 = new Element(vertexType);
expected0.setDouble("x", 0);
expected0.setDouble("y", 0);
expected0.setDouble("z", 0);
expected0.setDouble("nx", 0);
expected0.setDouble("ny", 2.0/Math.sqrt(13));
expected0.setDouble("nz", 3.0/Math.sqrt(13));
ElementType faceType = new ElementType(
"face",
new ListProperty(DataType.UCHAR, "vertex_index", DataType.INT));
Element face0 = new Element(faceType);
face0.setIntList("vertex_index", new int[]{0, 1, 2});
Element face1 = new Element(faceType);
face1.setIntList("vertex_index", new int[]{0, 3, 1});
ElementReader vertexReaderMock = mock(ElementReader.class);
when(vertexReaderMock.getElementType()).thenReturn(vertexType);
when(vertexReaderMock.readElement()).
thenReturn(vertex0).
thenReturn(vertex1).
thenReturn(vertex2).
thenReturn(vertex3).
thenReturn(null);
BufferedElementReader vertexReader =
new BufferedElementReader(vertexReaderMock);
ElementReader faceReader = mock(ElementReader.class);
when(faceReader.getElementType()).thenReturn(faceType);
when(faceReader.readElement()).
thenReturn(face0).
thenReturn(face1).
thenReturn(null);
NormalGenerator target = new NormalGenerator();
target.generateNormals(vertexReader, faceReader);
vertexReader.reset();
Element actual = vertexReader.readElement();
assertTrue(expected0.equals(actual, 1E-6));
}
@Test
public void narrowAngle() throws IOException {
ElementType vertexType = new ElementType(
"vertex",
new Property("x", DataType.DOUBLE),
new Property("y", DataType.DOUBLE),
new Property("z", DataType.DOUBLE),
new Property("nx", DataType.DOUBLE),
new Property("ny", DataType.DOUBLE),
new Property("nz", DataType.DOUBLE));
Element vertex0 = new Element(vertexType);
vertex0.setDouble("x", 0);
vertex0.setDouble("y", 0);
vertex0.setDouble("z", 0);
Element vertex1 = new Element(vertexType);
vertex1.setDouble("x", 1);
vertex1.setDouble("y", 0);
vertex1.setDouble("z", 0);
Element vertex2 = new Element(vertexType);
vertex2.setDouble("x", 1);
vertex2.setDouble("y", 1);
vertex2.setDouble("z", 0);
Element vertex3 = new Element(vertexType);
vertex3.setDouble("x", 0);
vertex3.setDouble("y", 0);
vertex3.setDouble("z", 1);
Element expected0 = new Element(vertexType);
expected0.setDouble("x", 0);
expected0.setDouble("y", 0);
expected0.setDouble("z", 0);
expected0.setDouble("nx", 0);
expected0.setDouble("ny", 2.0/Math.sqrt(5));
expected0.setDouble("nz", 1.0/Math.sqrt(5));
ElementType faceType = new ElementType(
"face",
new ListProperty(DataType.UCHAR, "vertex_index", DataType.INT));
Element face0 = new Element(faceType);
face0.setIntList("vertex_index", new int[]{0, 1, 2});
Element face1 = new Element(faceType);
face1.setIntList("vertex_index", new int[]{0, 3, 1});
ElementReader vertexReaderMock = mock(ElementReader.class);
when(vertexReaderMock.getElementType()).thenReturn(vertexType);
when(vertexReaderMock.readElement()).
thenReturn(vertex0).
thenReturn(vertex1).
thenReturn(vertex2).
thenReturn(vertex3).
thenReturn(null);
BufferedElementReader vertexReader =
new BufferedElementReader(vertexReaderMock);
ElementReader faceReader = mock(ElementReader.class);
when(faceReader.getElementType()).thenReturn(faceType);
when(faceReader.readElement()).
thenReturn(face0).
thenReturn(face1).
thenReturn(null);
NormalGenerator target = new NormalGenerator();
target.generateNormals(vertexReader, faceReader);
vertexReader.reset();
Element actual = vertexReader.readElement();
assertTrue(expected0.equals(actual, 1E-6));
}
@Test
public void clockwise() throws IOException {
ElementType vertexType = new ElementType(
"vertex",
new Property("x", DataType.DOUBLE),
new Property("y", DataType.DOUBLE),
new Property("z", DataType.DOUBLE),
new Property("nx", DataType.DOUBLE),
new Property("ny", DataType.DOUBLE),
new Property("nz", DataType.DOUBLE));
Element vertex0 = new Element(vertexType);
vertex0.setDouble("x", 0);
vertex0.setDouble("y", 0);
vertex0.setDouble("z", 0);
Element vertex1 = new Element(vertexType);
vertex1.setDouble("x", 1);
vertex1.setDouble("y", 0);
vertex1.setDouble("z", 0);
Element vertex2 = new Element(vertexType);
vertex2.setDouble("x", 1);
vertex2.setDouble("y", 1);
vertex2.setDouble("z", 0);
Element expected0 = new Element(vertexType);
expected0.setDouble("x", 0);
expected0.setDouble("y", 0);
expected0.setDouble("z", 0);
expected0.setDouble("nx", 0);
expected0.setDouble("ny", 0);
expected0.setDouble("nz", -1);
Element expected1 = new Element(vertexType);
expected1.setDouble("x", 1);
expected1.setDouble("y", 0);
expected1.setDouble("z", 0);
expected1.setDouble("nx", 0);
expected1.setDouble("ny", 0);
expected1.setDouble("nz", -1);
Element expected2 = new Element(vertexType);
expected2.setDouble("x", 1);
expected2.setDouble("y", 1);
expected2.setDouble("z", 0);
expected2.setDouble("nx", 0);
expected2.setDouble("ny", 0);
expected2.setDouble("nz", -1);
ElementType faceType = new ElementType(
"face",
new ListProperty(DataType.UCHAR, "vertex_index", DataType.INT));
Element face0 = new Element(faceType);
face0.setIntList("vertex_index", new int[]{0, 1, 2});
ElementReader vertexReaderMock = mock(ElementReader.class);
when(vertexReaderMock.getElementType()).thenReturn(vertexType);
when(vertexReaderMock.readElement()).
thenReturn(vertex0).
thenReturn(vertex1).
thenReturn(vertex2).
thenReturn(null);
BufferedElementReader vertexReader =
new BufferedElementReader(vertexReaderMock);
ElementReader faceReader = mock(ElementReader.class);
when(faceReader.getElementType()).thenReturn(faceType);
when(faceReader.readElement()).
thenReturn(face0).
thenReturn(null);
NormalGenerator target = new NormalGenerator();
target.setCounterClockwise(false);
target.generateNormals(vertexReader, faceReader);
vertexReader.reset();
assertEquals(expected0, vertexReader.readElement());
assertEquals(expected1, vertexReader.readElement());
assertEquals(expected2, vertexReader.readElement());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* <p>
* Helpers for {@code java.lang.Thread} and {@code java.lang.ThreadGroup}.
* </p>
* <p>
* #ThreadSafe#
* </p>
*
* @see java.lang.Thread
* @see java.lang.ThreadGroup
* @since 3.5
*/
public class ThreadUtils {
/**
* Return the active thread with the specified id if it belong's to the specified thread group.
*
* @param threadId The thread id
* @param threadGroup The thread group
* @return The thread which belongs to a specified thread group and the thread's id match the specified id.
* {@code null} is returned if no such thread exists
* @throws IllegalArgumentException if the specified id is zero or negative or the group is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Thread findThreadById(final long threadId, final ThreadGroup threadGroup) {
if (threadGroup == null) {
throw new IllegalArgumentException("The thread group must not be null");
}
final Thread thread = findThreadById(threadId);
if(thread != null && threadGroup.equals(thread.getThreadGroup())) {
return thread;
}
return null;
}
/**
* Return the active thread with the specified id if it belong's to a thread group with the specified group name.
*
* @param threadId The thread id
* @param threadGroupName The thread group name
* @return The threads which belongs to a thread group with the specified group name and the thread's id match the specified id.
* {@code null} is returned if no such thread exists
* @throws IllegalArgumentException if the specified id is zero or negative or the group name is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Thread findThreadById(final long threadId, final String threadGroupName) {
if (threadGroupName == null) {
throw new IllegalArgumentException("The thread group name must not be null");
}
final Thread thread = findThreadById(threadId);
if(thread != null && thread.getThreadGroup() != null && thread.getThreadGroup().getName().equals(threadGroupName)) {
return thread;
}
return null;
}
/**
* Return active threads with the specified name if they belong to a specified thread group.
*
* @param threadName The thread name
* @param threadGroup The thread group
* @return The threads which belongs to a thread group and the thread's name match the specified name,
* An empty collection is returned if no such thread exists. The collection returned is always unmodifiable.
* @throws IllegalArgumentException if the specified thread name or group is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> findThreadsByName(final String threadName, final ThreadGroup threadGroup) {
return findThreads(threadGroup, false, new NamePredicate(threadName));
}
/**
* Return active threads with the specified name if they belong to a thread group with the specified group name.
*
* @param threadName The thread name
* @param threadGroupName The thread group name
* @return The threads which belongs to a thread group with the specified group name and the thread's name match the specified name,
* An empty collection is returned if no such thread exists. The collection returned is always unmodifiable.
* @throws IllegalArgumentException if the specified thread name or group name is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> findThreadsByName(final String threadName, final String threadGroupName) {
if (threadName == null) {
throw new IllegalArgumentException("The thread name must not be null");
}
if (threadGroupName == null) {
throw new IllegalArgumentException("The thread group name must not be null");
}
final Collection<ThreadGroup> threadGroups = findThreadGroups(new NamePredicate(threadGroupName));
if(threadGroups.isEmpty()) {
return Collections.emptyList();
}
final Collection<Thread> result = new ArrayList<Thread>();
final NamePredicate threadNamePredicate = new NamePredicate(threadName);
for(final ThreadGroup group : threadGroups) {
result.addAll(findThreads(group, false, threadNamePredicate));
}
return Collections.unmodifiableCollection(result);
}
/**
* Return active thread groups with the specified group name.
*
* @param threadGroupName The thread group name
* @return the thread groups with the specified group name or an empty collection if no such thread group exists. The collection returned is always unmodifiable.
* @throws IllegalArgumentException if group name is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<ThreadGroup> findThreadGroupsByName(final String threadGroupName) {
return findThreadGroups(new NamePredicate(threadGroupName));
}
/**
* Return all active thread groups excluding the system thread group (A thread group is active if it has been not destroyed).
*
* @return all thread groups excluding the system thread group. The collection returned is always unmodifiable.
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<ThreadGroup> getAllThreadGroups() {
return findThreadGroups(ALWAYS_TRUE_PREDICATE);
}
/**
* Return the system thread group (sometimes also referred as "root thread group").
*
* @return the system thread group
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static ThreadGroup getSystemThreadGroup() {
ThreadGroup threadGroup = Thread.currentThread().getThreadGroup();
while(threadGroup.getParent() != null) {
threadGroup = threadGroup.getParent();
}
return threadGroup;
}
/**
* Return all active threads (A thread is active if it has been started and has not yet died).
*
* @return all active threads. The collection returned is always unmodifiable.
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> getAllThreads() {
return findThreads(ALWAYS_TRUE_PREDICATE);
}
/**
* Return active threads with the specified name.
*
* @param threadName The thread name
* @return The threads with the specified name or an empty collection if no such thread exists. The collection returned is always unmodifiable.
* @throws IllegalArgumentException if the specified name is null
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> findThreadsByName(final String threadName) {
return findThreads(new NamePredicate(threadName));
}
/**
* Return the active thread with the specified id.
*
* @param threadId The thread id
* @return The thread with the specified id or {@code null} if no such thread exists
* @throws IllegalArgumentException if the specified id is zero or negative
* @throws SecurityException
* if the current thread cannot access the system thread group
*
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Thread findThreadById(final long threadId) {
final Collection<Thread> result = findThreads(new ThreadIdPredicate(threadId));
if(result.isEmpty()) {
return null;
} else {
return result.iterator().next();
}
}
/**
* <p>
* ThreadUtils instances should NOT be constructed in standard programming. Instead, the class should be used as
* {@code ThreadUtils.getAllThreads()}
* </p>
* <p>
* This constructor is public to permit tools that require a JavaBean instance to operate.
* </p>
*/
public ThreadUtils() {
super();
}
/**
* A predicate for selecting threads.
*/
//if java minimal version for lang becomes 1.8 extend this interface from java.util.function.Predicate
public interface ThreadPredicate /*extends java.util.function.Predicate<Thread>*/{
/**
* Evaluates this predicate on the given thread.
* @param thread the thread
* @return {@code true} if the thread matches the predicate, otherwise {@code false}
*/
boolean test(Thread thread);
}
/**
* A predicate for selecting threadgroups.
*/
//if java minimal version for lang becomes 1.8 extend this interface from java.util.function.Predicate
public interface ThreadGroupPredicate /*extends java.util.function.Predicate<ThreadGroup>*/{
/**
* Evaluates this predicate on the given threadgroup.
* @param threadGroup the threadgroup
* @return {@code true} if the threadGroup matches the predicate, otherwise {@code false}
*/
boolean test(ThreadGroup threadGroup);
}
/**
* Predicate which always returns true.
*/
public static final AlwaysTruePredicate ALWAYS_TRUE_PREDICATE = new AlwaysTruePredicate();
/**
* A predicate implementation which always returns true.
*/
private final static class AlwaysTruePredicate implements ThreadPredicate, ThreadGroupPredicate{
private AlwaysTruePredicate() {
}
@Override
public boolean test(final ThreadGroup threadGroup) {
return true;
}
@Override
public boolean test(final Thread thread) {
return true;
}
}
/**
* A predicate implementation which matches a thread or threadgroup name.
*/
public static class NamePredicate implements ThreadPredicate, ThreadGroupPredicate {
private final String name;
/**
* Predicate constructor
*
* @param name thread or threadgroup name
* @throws IllegalArgumentException if the name is {@code null}
*/
public NamePredicate(final String name) {
super();
if (name == null) {
throw new IllegalArgumentException("The name must not be null");
}
this.name = name;
}
@Override
public boolean test(final ThreadGroup threadGroup) {
return threadGroup != null && threadGroup.getName().equals(name);
}
@Override
public boolean test(final Thread thread) {
return thread != null && thread.getName().equals(name);
}
}
/**
* A predicate implementation which matches a thread id.
*/
public static class ThreadIdPredicate implements ThreadPredicate {
private final long threadId;
/**
* Predicate constructor
*
* @param threadId the threadId to match
* @throws IllegalArgumentException if the threadId is zero or negative
*/
public ThreadIdPredicate(final long threadId) {
super();
if (threadId <= 0) {
throw new IllegalArgumentException("The thread id must be greater than zero");
}
this.threadId = threadId;
}
@Override
public boolean test(final Thread thread) {
return thread != null && thread.getId() == threadId;
}
}
/**
* Select all active threads which match the given predicate.
*
* @param predicate the predicate
* @return An unmodifiable {@code Collection} of active threads matching the given predicate
*
* @throws IllegalArgumentException if the predicate is null
* @throws SecurityException
* if the current thread cannot access the system thread group
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> findThreads(final ThreadPredicate predicate){
return findThreads(getSystemThreadGroup(), true, predicate);
}
/**
* Select all active threadgroups which match the given predicate.
*
* @param predicate the predicate
* @return An unmodifiable {@code Collection} of active threadgroups matching the given predicate
* @throws IllegalArgumentException if the predicate is null
* @throws SecurityException
* if the current thread cannot access the system thread group
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<ThreadGroup> findThreadGroups(final ThreadGroupPredicate predicate){
return findThreadGroups(getSystemThreadGroup(), true, predicate);
}
/**
* Select all active threads which match the given predicate and which belongs to the given thread group (or one of its subgroups).
*
* @param group the thread group
* @param recurse if {@code true} then evaluate the predicate recursively on all threads in all subgroups of the given group
* @param predicate the predicate
* @return An unmodifiable {@code Collection} of active threads which match the given predicate and which belongs to the given thread group
* @throws IllegalArgumentException if the given group or predicate is null
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<Thread> findThreads(final ThreadGroup group, final boolean recurse, final ThreadPredicate predicate) {
if (group == null) {
throw new IllegalArgumentException("The group must not be null");
}
if (predicate == null) {
throw new IllegalArgumentException("The predicate must not be null");
}
int count = group.activeCount();
Thread[] threads;
do {
threads = new Thread[count + (count / 2) + 1]; //slightly grow the array size
count = group.enumerate(threads, recurse);
//return value of enumerate() must be strictly less than the array size according to javadoc
} while (count >= threads.length);
final List<Thread> result = new ArrayList<Thread>(count);
for (int i = 0; i < count; ++i) {
if (predicate.test(threads[i])) {
result.add(threads[i]);
}
}
return Collections.unmodifiableCollection(result);
}
/**
* Select all active threadgroups which match the given predicate and which is a subgroup of the given thread group (or one of its subgroups).
*
* @param group the thread group
* @param recurse if {@code true} then evaluate the predicate recursively on all threadgroups in all subgroups of the given group
* @param predicate the predicate
* @return An unmodifiable {@code Collection} of active threadgroups which match the given predicate and which is a subgroup of the given thread group
* @throws IllegalArgumentException if the given group or predicate is null
* @throws SecurityException if the current thread cannot modify
* thread groups from this thread's thread group up to the system thread group
*/
public static Collection<ThreadGroup> findThreadGroups(final ThreadGroup group, final boolean recurse, final ThreadGroupPredicate predicate){
if (group == null) {
throw new IllegalArgumentException("The group must not be null");
}
if (predicate == null) {
throw new IllegalArgumentException("The predicate must not be null");
}
int count = group.activeGroupCount();
ThreadGroup[] threadGroups;
do {
threadGroups = new ThreadGroup[count + (count / 2) + 1]; //slightly grow the array size
count = group.enumerate(threadGroups, recurse);
//return value of enumerate() must be strictly less than the array size according to javadoc
} while(count >= threadGroups.length);
final List<ThreadGroup> result = new ArrayList<ThreadGroup>(count);
for(int i = 0; i < count; ++i) {
if(predicate.test(threadGroups[i])) {
result.add(threadGroups[i]);
}
}
return Collections.unmodifiableCollection(result);
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.bluetooth;
import android.bluetooth.IBluetoothCallback;
import android.os.ParcelUuid;
import android.os.RemoteException;
import android.util.Log;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* A connected or connecting Bluetooth socket.
*
* <p>The interface for Bluetooth Sockets is similar to that of TCP sockets:
* {@link java.net.Socket} and {@link java.net.ServerSocket}. On the server
* side, use a {@link BluetoothServerSocket} to create a listening server
* socket. When a connection is accepted by the {@link BluetoothServerSocket},
* it will return a new {@link BluetoothSocket} to manage the connection.
* On the client side, use a single {@link BluetoothSocket} to both intiate
* an outgoing connection and to manage the connection.
*
* <p>The most common type of Bluetooth socket is RFCOMM, which is the type
* supported by the Android APIs. RFCOMM is a connection-oriented, streaming
* transport over Bluetooth. It is also known as the Serial Port Profile (SPP).
*
* <p>To create a {@link BluetoothSocket} for connecting to a known device, use
* {@link BluetoothDevice#createRfcommSocketToServiceRecord
* BluetoothDevice.createRfcommSocketToServiceRecord()}.
* Then call {@link #connect()} to attempt a connection to the remote device.
* This call will block until a connection is established or the connection
* fails.
*
* <p>To create a {@link BluetoothSocket} as a server (or "host"), see the
* {@link BluetoothServerSocket} documentation.
*
* <p>Once the socket is connected, whether initiated as a client or accepted
* as a server, open the IO streams by calling {@link #getInputStream} and
* {@link #getOutputStream} in order to retrieve {@link java.io.InputStream}
* and {@link java.io.OutputStream} objects, respectively, which are
* automatically connected to the socket.
*
* <p>{@link BluetoothSocket} is thread
* safe. In particular, {@link #close} will always immediately abort ongoing
* operations and close the socket.
*
* <p class="note"><strong>Note:</strong>
* Requires the {@link android.Manifest.permission#BLUETOOTH} permission.
*
* {@see BluetoothServerSocket}
* {@see java.io.InputStream}
* {@see java.io.OutputStream}
*/
public final class BluetoothSocket implements Closeable {
private static final String TAG = "BluetoothSocket";
/** @hide */
public static final int MAX_RFCOMM_CHANNEL = 30;
/** Keep TYPE_ fields in sync with BluetoothSocket.cpp */
/*package*/ static final int TYPE_RFCOMM = 1;
/*package*/ static final int TYPE_SCO = 2;
/*package*/ static final int TYPE_L2CAP = 3;
/*package*/ static final int EBADFD = 77;
/*package*/ static final int EADDRINUSE = 98;
private final int mType; /* one of TYPE_RFCOMM etc */
private final BluetoothDevice mDevice; /* remote device */
private final String mAddress; /* remote address */
private final boolean mAuth;
private final boolean mEncrypt;
private final BluetoothInputStream mInputStream;
private final BluetoothOutputStream mOutputStream;
private final SdpHelper mSdp;
private int mPort; /* RFCOMM channel or L2CAP psm */
/** prevents all native calls after destroyNative() */
private boolean mClosed;
/** protects mClosed */
private final ReentrantReadWriteLock mLock;
/** used by native code only */
private int mSocketData;
/**
* Construct a BluetoothSocket.
* @param type type of socket
* @param fd fd to use for connected socket, or -1 for a new socket
* @param auth require the remote device to be authenticated
* @param encrypt require the connection to be encrypted
* @param device remote device that this socket can connect to
* @param port remote port
* @param uuid SDP uuid
* @throws IOException On error, for example Bluetooth not available, or
* insufficient priveleges
*/
/*package*/ BluetoothSocket(int type, int fd, boolean auth, boolean encrypt,
BluetoothDevice device, int port, ParcelUuid uuid) throws IOException {
if (type == BluetoothSocket.TYPE_RFCOMM && uuid == null && fd == -1) {
if (port < 1 || port > MAX_RFCOMM_CHANNEL) {
throw new IOException("Invalid RFCOMM channel: " + port);
}
}
if (uuid == null) {
mPort = port;
mSdp = null;
} else {
mSdp = new SdpHelper(device, uuid);
mPort = -1;
}
mType = type;
mAuth = auth;
mEncrypt = encrypt;
mDevice = device;
if (device == null) {
mAddress = null;
} else {
mAddress = device.getAddress();
}
if (fd == -1) {
initSocketNative();
} else {
initSocketFromFdNative(fd);
}
mInputStream = new BluetoothInputStream(this);
mOutputStream = new BluetoothOutputStream(this);
mClosed = false;
mLock = new ReentrantReadWriteLock();
}
/**
* Construct a BluetoothSocket from address. Used by native code.
* @param type type of socket
* @param fd fd to use for connected socket, or -1 for a new socket
* @param auth require the remote device to be authenticated
* @param encrypt require the connection to be encrypted
* @param address remote device that this socket can connect to
* @param port remote port
* @throws IOException On error, for example Bluetooth not available, or
* insufficient priveleges
*/
private BluetoothSocket(int type, int fd, boolean auth, boolean encrypt, String address,
int port) throws IOException {
this(type, fd, auth, encrypt, new BluetoothDevice(address), port, null);
}
/** @hide */
@Override
protected void finalize() throws Throwable {
try {
close();
} finally {
super.finalize();
}
}
/**
* Attempt to connect to a remote device.
* <p>This method will block until a connection is made or the connection
* fails. If this method returns without an exception then this socket
* is now connected.
* <p>{@link #close} can be used to abort this call from another thread.
* @throws IOException on error, for example connection failure
*/
public void connect() throws IOException {
mLock.readLock().lock();
try {
if (mClosed) throw new IOException("socket closed");
if (mSdp != null) {
mPort = mSdp.doSdp(); // blocks
}
connectNative(); // blocks
} finally {
mLock.readLock().unlock();
}
}
/**
* Immediately close this socket, and release all associated resources.
* <p>Causes blocked calls on this socket in other threads to immediately
* throw an IOException.
*/
public void close() throws IOException {
// abort blocking operations on the socket
mLock.readLock().lock();
try {
if (mClosed) return;
if (mSdp != null) {
mSdp.cancel();
}
abortNative();
} finally {
mLock.readLock().unlock();
}
// all native calls are guaranteed to immediately return after
// abortNative(), so this lock should immediatley acquire
mLock.writeLock().lock();
try {
mClosed = true;
destroyNative();
} finally {
mLock.writeLock().unlock();
}
}
/**
* Get the remote device this socket is connecting, or connected, to.
* @return remote device
*/
public BluetoothDevice getRemoteDevice() {
return mDevice;
}
/**
* Get the input stream associated with this socket.
* <p>The input stream will be returned even if the socket is not yet
* connected, but operations on that stream will throw IOException until
* the associated socket is connected.
* @return InputStream
*/
public InputStream getInputStream() throws IOException {
return mInputStream;
}
/**
* Get the output stream associated with this socket.
* <p>The output stream will be returned even if the socket is not yet
* connected, but operations on that stream will throw IOException until
* the associated socket is connected.
* @return OutputStream
*/
public OutputStream getOutputStream() throws IOException {
return mOutputStream;
}
/**
* Currently returns unix errno instead of throwing IOException,
* so that BluetoothAdapter can check the error code for EADDRINUSE
*/
/*package*/ int bindListen() {
mLock.readLock().lock();
try {
if (mClosed) return EBADFD;
return bindListenNative();
} finally {
mLock.readLock().unlock();
}
}
/*package*/ BluetoothSocket accept(int timeout) throws IOException {
mLock.readLock().lock();
try {
if (mClosed) throw new IOException("socket closed");
return acceptNative(timeout);
} finally {
mLock.readLock().unlock();
}
}
/*package*/ int available() throws IOException {
mLock.readLock().lock();
try {
if (mClosed) throw new IOException("socket closed");
return availableNative();
} finally {
mLock.readLock().unlock();
}
}
/*package*/ int read(byte[] b, int offset, int length) throws IOException {
mLock.readLock().lock();
try {
if (mClosed) throw new IOException("socket closed");
return readNative(b, offset, length);
} finally {
mLock.readLock().unlock();
}
}
/*package*/ int write(byte[] b, int offset, int length) throws IOException {
mLock.readLock().lock();
try {
if (mClosed) throw new IOException("socket closed");
return writeNative(b, offset, length);
} finally {
mLock.readLock().unlock();
}
}
private native void initSocketNative() throws IOException;
private native void initSocketFromFdNative(int fd) throws IOException;
private native void connectNative() throws IOException;
private native int bindListenNative();
private native BluetoothSocket acceptNative(int timeout) throws IOException;
private native int availableNative() throws IOException;
private native int readNative(byte[] b, int offset, int length) throws IOException;
private native int writeNative(byte[] b, int offset, int length) throws IOException;
private native void abortNative() throws IOException;
private native void destroyNative() throws IOException;
/**
* Throws an IOException for given posix errno. Done natively so we can
* use strerr to convert to string error.
*/
/*package*/ native void throwErrnoNative(int errno) throws IOException;
/**
* Helper to perform blocking SDP lookup.
*/
private static class SdpHelper extends IBluetoothCallback.Stub {
private final IBluetooth service;
private final ParcelUuid uuid;
private final BluetoothDevice device;
private int channel;
private boolean canceled;
public SdpHelper(BluetoothDevice device, ParcelUuid uuid) {
service = BluetoothDevice.getService();
this.device = device;
this.uuid = uuid;
canceled = false;
}
/**
* Returns the RFCOMM channel for the UUID, or throws IOException
* on failure.
*/
public synchronized int doSdp() throws IOException {
if (canceled) throw new IOException("Service discovery canceled");
channel = -1;
boolean inProgress = false;
try {
inProgress = service.fetchRemoteUuids(device.getAddress(), uuid, this);
} catch (RemoteException e) {Log.e(TAG, "", e);}
if (!inProgress) throw new IOException("Unable to start Service Discovery");
try {
/* 12 second timeout as a precaution - onRfcommChannelFound
* should always occur before the timeout */
wait(12000); // block
} catch (InterruptedException e) {}
if (canceled) throw new IOException("Service discovery canceled");
if (channel < 1) throw new IOException("Service discovery failed");
return channel;
}
/** Object cannot be re-used after calling cancel() */
public synchronized void cancel() {
if (!canceled) {
canceled = true;
channel = -1;
notifyAll(); // unblock
}
}
public synchronized void onRfcommChannelFound(int channel) {
if (!canceled) {
this.channel = channel;
notifyAll(); // unblock
}
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml.highlighting;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInspection.InspectionManager;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Key;
import com.intellij.profile.ProfileChangeAdapter;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.profile.codeInspection.ProjectInspectionProfileManager;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.EventDispatcher;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xml.DomElement;
import com.intellij.util.xml.DomFileElement;
import com.intellij.util.xml.DomUtil;
import com.intellij.util.xml.impl.DomApplicationComponent;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
public class DomElementAnnotationsManagerImpl extends DomElementAnnotationsManager {
public static final Object LOCK = new Object();
private static final Key<DomElementsProblemsHolderImpl> DOM_PROBLEM_HOLDER_KEY = Key.create("DomProblemHolder");
private static final Key<CachedValue<Boolean>> CACHED_VALUE_KEY = Key.create("DomProblemHolderCachedValue");
private final EventDispatcher<DomHighlightingListener> myDispatcher = EventDispatcher.create(DomHighlightingListener.class);
private static final DomElementsProblemsHolder EMPTY_PROBLEMS_HOLDER = new DomElementsProblemsHolder() {
@Override
@NotNull
public List<DomElementProblemDescriptor> getProblems(DomElement domElement) {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getProblems(final DomElement domElement, boolean includeXmlProblems) {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getProblems(final DomElement domElement,
final boolean includeXmlProblems,
final boolean withChildren) {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getProblems(DomElement domElement,
final boolean includeXmlProblems,
final boolean withChildren,
HighlightSeverity minSeverity) {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getProblems(DomElement domElement, final boolean withChildren, HighlightSeverity minSeverity) {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getAllProblems() {
return Collections.emptyList();
}
@Override
public List<DomElementProblemDescriptor> getAllProblems(@NotNull DomElementsInspection inspection) {
return Collections.emptyList();
}
@Override
public boolean isInspectionCompleted(@NotNull final DomElementsInspection inspectionClass) {
return false;
}
};
private final Project myProject;
public DomElementAnnotationsManagerImpl(@NotNull Project project) {
myProject = project;
ProjectInspectionProfileManager.getInstance(project).addProfileChangeListener(new ProfileChangeAdapter() {
@Override
public void profileActivated(InspectionProfile oldProfile, @Nullable InspectionProfile profile) {
dropAnnotationsCache();
}
@Override
public void profileChanged(InspectionProfile profile) {
dropAnnotationsCache();
}
}, project);
}
@Override
public void dropAnnotationsCache() {
incModificationCount();
}
public final List<DomElementProblemDescriptor> appendProblems(@NotNull DomFileElement element, @NotNull DomElementAnnotationHolder annotationHolder, Class<? extends DomElementsInspection> inspectionClass) {
final DomElementAnnotationHolderImpl holderImpl = (DomElementAnnotationHolderImpl)annotationHolder;
synchronized (LOCK) {
final DomElementsProblemsHolderImpl holder = _getOrCreateProblemsHolder(element);
holder.appendProblems(holderImpl, inspectionClass);
}
myDispatcher.getMulticaster().highlightingFinished(element);
return Collections.unmodifiableList(holderImpl);
}
private DomElementsProblemsHolderImpl _getOrCreateProblemsHolder(final DomFileElement element) {
DomElementsProblemsHolderImpl holder;
final DomElement rootElement = element.getRootElement();
final XmlTag rootTag = rootElement.getXmlTag();
if (rootTag == null) return new DomElementsProblemsHolderImpl(element);
holder = rootTag.getUserData(DOM_PROBLEM_HOLDER_KEY);
if (isHolderOutdated(element.getFile()) || holder == null) {
holder = new DomElementsProblemsHolderImpl(element);
rootTag.putUserData(DOM_PROBLEM_HOLDER_KEY, holder);
final CachedValue<Boolean> cachedValue = CachedValuesManager.getManager(myProject).createCachedValue(
() -> new CachedValueProvider.Result<>(Boolean.FALSE, element, PsiModificationTracker.OUT_OF_CODE_BLOCK_MODIFICATION_COUNT,
this, ProjectRootManager.getInstance(myProject)), false);
cachedValue.getValue();
element.getFile().putUserData(CACHED_VALUE_KEY, cachedValue);
}
return holder;
}
public static boolean isHolderUpToDate(DomElement element) {
synchronized (LOCK) {
return !isHolderOutdated(DomUtil.getFile(element));
}
}
public static void outdateProblemHolder(final DomElement element) {
synchronized (LOCK) {
DomUtil.getFile(element).putUserData(CACHED_VALUE_KEY, null);
}
}
private static boolean isHolderOutdated(final XmlFile file) {
final CachedValue<Boolean> cachedValue = file.getUserData(CACHED_VALUE_KEY);
return cachedValue == null || !cachedValue.hasUpToDateValue();
}
@Override
@NotNull
public DomElementsProblemsHolder getProblemHolder(DomElement element) {
if (element == null || !element.isValid()) return EMPTY_PROBLEMS_HOLDER;
final DomFileElement<DomElement> fileElement = DomUtil.getFileElement(element);
synchronized (LOCK) {
final XmlTag tag = fileElement.getRootElement().getXmlTag();
if (tag != null) {
final DomElementsProblemsHolder readyHolder = tag.getUserData(DOM_PROBLEM_HOLDER_KEY);
if (readyHolder != null) {
return readyHolder;
}
}
return EMPTY_PROBLEMS_HOLDER;
}
}
@Override
@NotNull
public DomElementsProblemsHolder getCachedProblemHolder(DomElement element) {
return getProblemHolder(element);
}
public static void annotate(final DomElement element, final DomElementAnnotationHolder holder, final Class rootClass) {
final DomElementsAnnotator annotator = DomApplicationComponent.getInstance().getAnnotator(rootClass);
if (annotator != null) {
annotator.annotate(element, holder);
}
}
@Override
public List<ProblemDescriptor> createProblemDescriptors(final InspectionManager manager, DomElementProblemDescriptor problemDescriptor) {
return ContainerUtil.createMaybeSingletonList(DomElementsHighlightingUtil.createProblemDescriptors(manager, problemDescriptor));
}
@Override
public boolean isHighlightingFinished(final DomElement[] domElements) {
for (final DomElement domElement : domElements) {
if (getHighlightStatus(domElement) != DomHighlightStatus.INSPECTIONS_FINISHED) {
return false;
}
}
return true;
}
@Override
public void addHighlightingListener(DomHighlightingListener listener, Disposable parentDisposable) {
myDispatcher.addListener(listener, parentDisposable);
}
@Override
public DomHighlightingHelper getHighlightingHelper() {
return DomHighlightingHelperImpl.INSTANCE;
}
@Override
@NotNull
public <T extends DomElement> List<DomElementProblemDescriptor> checkFileElement(@NotNull final DomFileElement<T> domFileElement,
@NotNull final DomElementsInspection<T> inspection,
boolean onTheFly) {
final DomElementsProblemsHolder problemHolder = getProblemHolder(domFileElement);
if (isHolderUpToDate(domFileElement) && problemHolder.isInspectionCompleted(inspection)) {
return problemHolder.getAllProblems(inspection);
}
final DomElementAnnotationHolder holder = new DomElementAnnotationHolderImpl(onTheFly);
inspection.checkFileElement(domFileElement, holder);
return appendProblems(domFileElement, holder, inspection.getClass());
}
public List<DomElementsInspection> getSuitableDomInspections(final DomFileElement fileElement, boolean enabledOnly) {
Class rootType = fileElement.getRootElementClass();
final InspectionProfile profile = getInspectionProfile(fileElement);
final List<DomElementsInspection> inspections = new SmartList<>();
for (final InspectionToolWrapper toolWrapper : profile.getInspectionTools(fileElement.getFile())) {
if (!enabledOnly || profile.isToolEnabled(HighlightDisplayKey.find(toolWrapper.getShortName()), fileElement.getFile())) {
ContainerUtil.addIfNotNull(inspections, getSuitableInspection(toolWrapper.getTool(), rootType));
}
}
return inspections;
}
protected InspectionProfile getInspectionProfile(final DomFileElement fileElement) {
return InspectionProjectProfileManager.getInstance(fileElement.getManager().getProject()).getCurrentProfile();
}
@Nullable
private static DomElementsInspection getSuitableInspection(InspectionProfileEntry entry, Class rootType) {
if (entry instanceof DomElementsInspection) {
if (((DomElementsInspection)entry).getDomClasses().contains(rootType)) {
return (DomElementsInspection) entry;
}
}
return null;
}
@Nullable public <T extends DomElement> DomElementsInspection<T> getMockInspection(DomFileElement<T> root) {
if (root.getFileDescription().isAutomaticHighlightingEnabled()) {
return new MockAnnotatingDomInspection<>(root.getRootElementClass());
}
if (getSuitableDomInspections(root, false).isEmpty()) {
return new MockDomInspection<>(root.getRootElementClass());
}
return null;
}
private static boolean areInspectionsFinished(DomElementsProblemsHolderImpl holder, final List<DomElementsInspection> suitableInspections) {
for (final DomElementsInspection inspection : suitableInspections) {
if (!holder.isInspectionCompleted(inspection)) {
return false;
}
}
return true;
}
@NotNull
public DomHighlightStatus getHighlightStatus(final DomElement element) {
synchronized (LOCK) {
final DomFileElement<DomElement> root = DomUtil.getFileElement(element);
if (!isHolderOutdated(root.getFile())) {
final DomElementsProblemsHolder holder = getProblemHolder(element);
if (holder instanceof DomElementsProblemsHolderImpl) {
DomElementsProblemsHolderImpl holderImpl = (DomElementsProblemsHolderImpl)holder;
final List<DomElementsInspection> suitableInspections = getSuitableDomInspections(root, true);
final DomElementsInspection mockInspection = getMockInspection(root);
final boolean annotatorsFinished = mockInspection == null || holderImpl.isInspectionCompleted(mockInspection);
final boolean inspectionsFinished = areInspectionsFinished(holderImpl, suitableInspections);
if (annotatorsFinished) {
if (suitableInspections.isEmpty() || inspectionsFinished) return DomHighlightStatus.INSPECTIONS_FINISHED;
return DomHighlightStatus.ANNOTATORS_FINISHED;
}
}
}
return DomHighlightStatus.NONE;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.validate;
import com.google.common.base.Charsets;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.hamcrest.Matcher;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.index.query.QueryBuilders.queryString;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.*;
/**
*
*/
@ClusterScope(randomDynamicTemplates = false)
public class SimpleValidateQueryTests extends ElasticsearchIntegrationTest {
@Test
public void simpleValidateQuery() throws Exception {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(Charsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("_id:1")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("foo:1")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("bar:hey")).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("nonexistent:hello")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryString("foo:1 AND")).execute().actionGet().isValid(), equalTo(false));
}
private static String filter(String uncachedFilter) {
String filter = uncachedFilter;
if (cluster().hasFilterCache()) {
filter = "cache(" + filter + ")";
}
return filter;
}
@Test
public void explainValidateQuery() throws Exception {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.startObject("baz").field("type", "string").field("analyzer", "snowball").endObject()
.startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject()
.endObject().endObject().endObject())
.execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child-type")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("child-type")
.startObject("_parent").field("type", "type1").endObject()
.startObject("properties")
.startObject("foo").field("type", "string").endObject()
.endObject()
.endObject().endObject())
.execute().actionGet();
refresh();
ValidateQueryResponse response;
response = client().admin().indices().prepareValidateQuery("test")
.setSource("foo".getBytes(Charsets.UTF_8))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(false));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), containsString("Failed to parse"));
assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue());
final String typeFilter = filter("_type:type1");
assertExplanation(QueryBuilders.queryString("_id:1"), equalTo("filtered(ConstantScore(_uid:type1#1))->" + typeFilter));
assertExplanation(QueryBuilders.idsQuery("type1").addIds("1").addIds("2"),
equalTo("filtered(ConstantScore(_uid:type1#1 _uid:type1#2))->" + typeFilter));
assertExplanation(QueryBuilders.queryString("foo"), equalTo("filtered(_all:foo)->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.orFilter(
FilterBuilders.termFilter("bar", "2"),
FilterBuilders.termFilter("baz", "3")
)
), equalTo("filtered(filtered(foo:1)->" + filter("bar:[2 TO 2]") + " " + filter("baz:3") + ")->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.orFilter(
FilterBuilders.termFilter("bar", "2")
)
), equalTo("filtered(filtered(foo:1)->" + filter("bar:[2 TO 2]") + ")->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.matchAllQuery(),
FilterBuilders.geoPolygonFilter("pin.location")
.addPoint(40, -70)
.addPoint(30, -80)
.addPoint(20, -90)
.addPoint(40, -70) // closing polygon
), equalTo("filtered(ConstantScore(GeoPolygonFilter(pin.location, [[40.0, -70.0], [30.0, -80.0], [20.0, -90.0], [40.0, -70.0]])))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoBoundingBoxFilter("pin.location")
.topLeft(40, -80)
.bottomRight(20, -70)
), equalTo("filtered(ConstantScore(GeoBoundingBoxFilter(pin.location, [40.0, -80.0], [20.0, -70.0])))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceFilter("pin.location")
.lat(10).lon(20).distance(15, DistanceUnit.DEFAULT).geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(GeoDistanceFilter(pin.location, PLANE, 15.0, 10.0, 20.0)))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceFilter("pin.location")
.lat(10).lon(20).distance(15, DistanceUnit.DEFAULT).geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(GeoDistanceFilter(pin.location, PLANE, 15.0, 10.0, 20.0)))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceRangeFilter("pin.location")
.lat(10).lon(20).from("15m").to("25m").geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(GeoDistanceRangeFilter(pin.location, PLANE, [15.0 - 25.0], 10.0, 20.0)))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceRangeFilter("pin.location")
.lat(10).lon(20).from("15miles").to("25miles").geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(GeoDistanceRangeFilter(pin.location, PLANE, [" + DistanceUnit.DEFAULT.convert(15.0, DistanceUnit.MILES) + " - " + DistanceUnit.DEFAULT.convert(25.0, DistanceUnit.MILES) + "], 10.0, 20.0)))->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.andFilter(
FilterBuilders.termFilter("bar", "2"),
FilterBuilders.termFilter("baz", "3")
)
), equalTo("filtered(filtered(foo:1)->+" + filter("bar:[2 TO 2]") + " +" + filter("baz:3") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.termsFilter("foo", "1", "2", "3")),
equalTo("filtered(ConstantScore(" + filter("foo:1 foo:2 foo:3") + "))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.notFilter(FilterBuilders.termFilter("foo", "bar"))),
equalTo("filtered(ConstantScore(NotFilter(" + filter("foo:bar") + ")))->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.hasChildFilter(
"child-type",
QueryBuilders.matchQuery("foo", "1")
)
), equalTo("filtered(filtered(foo:1)->CustomQueryWrappingFilter(child_filter[child-type/type1](filtered(foo:1)->" + filter("_type:child-type") + ")))->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.scriptFilter("true")
), equalTo("filtered(filtered(foo:1)->ScriptFilter(true))->" + typeFilter));
}
@Test
public void explainValidateQueryTwoNodes() throws IOException {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.startObject("baz").field("type", "string").field("analyzer", "snowball").endObject()
.startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject()
.endObject().endObject().endObject())
.execute().actionGet();
refresh();
for (Client client : internalCluster()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setSource("foo".getBytes(Charsets.UTF_8))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(false));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), containsString("Failed to parse"));
assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue());
}
for (Client client : internalCluster()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.queryString("foo"))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(true));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("_all:foo"));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
}
}
@Test //https://github.com/elasticsearch/elasticsearch/issues/3629
public void explainDateRangeInQueryString() {
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder()
.put(indexSettings())
.put("index.number_of_shards", 1)));
String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
refresh();
ValidateQueryResponse response = client().admin().indices().prepareValidateQuery()
.setQuery(queryString("past:[now-2M/d TO now/d]")).setExplain(true).get();
assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay();
DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay();
assertThat(response.getQueryExplanation().get(0).getExplanation(),
equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]"));
assertThat(response.isValid(), equalTo(true));
}
@Test(expected = IndexMissingException.class)
public void validateEmptyCluster() {
client().admin().indices().prepareValidateQuery().get();
}
@Test
public void explainNoQuery() {
createIndex("test");
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery().setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test"));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), equalTo("ConstantScore(*:*)"));
}
@Test
public void explainFilteredAlias() {
assertAcked(prepareCreate("test")
.addMapping("test", "field", "type=string")
.addAlias(new Alias("alias").filter(FilterBuilders.termFilter("field", "value1"))));
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("alias")
.setQuery(QueryBuilders.matchAllQuery()).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test"));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:value1"));
}
@Test
public void explainMatchPhrasePrefix() {
assertAcked(prepareCreate("test").setSettings(
ImmutableSettings.settingsBuilder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
.putArray("index.analysis.filter.syns.synonyms", "one,two")
.put("index.analysis.analyzer.syns.tokenizer", "standard")
.putArray("index.analysis.analyzer.syns.filter", "syns")
).addMapping("test", "field","type=string,analyzer=syns"));
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo*\""));
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo bar")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo bar*\""));
// Stacked tokens
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "one bar")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"(one two) bar*\""));
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo one")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\""));
}
@Test
public void irrelevantPropertiesBeforeQuery() throws IOException {
createIndex("test");
ensureGreen();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource(new BytesArray("{\"foo\": \"bar\", \"query\": {\"term\" : { \"user\" : \"kimchy\" }}}")).get().isValid(), equalTo(false));
}
@Test
public void irrelevantPropertiesAfterQuery() throws IOException {
createIndex("test");
ensureGreen();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource(new BytesArray("{\"query\": {\"term\" : { \"user\" : \"kimchy\" }}, \"foo\": \"bar\"}")).get().isValid(), equalTo(false));
}
private void assertExplanation(QueryBuilder queryBuilder, Matcher<String> matcher) {
ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test")
.setTypes("type1")
.setQuery(queryBuilder)
.setExplain(true)
.execute().actionGet();
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
assertThat(response.getQueryExplanation().get(0).getExplanation(), matcher);
assertThat(response.isValid(), equalTo(true));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testsuites;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.cache.IgniteCacheEntryProcessorSequentialCallTest;
import org.apache.ignite.cache.IgniteWarmupClosureSelfTest;
import org.apache.ignite.cache.store.CacheStoreReadFromBackupTest;
import org.apache.ignite.cache.store.GridCacheBalancingStoreSelfTest;
import org.apache.ignite.cache.store.GridStoreLoadCacheTest;
import org.apache.ignite.cache.store.StoreResourceInjectionSelfTest;
import org.apache.ignite.cache.store.jdbc.CacheJdbcPojoStoreTest;
import org.apache.ignite.cache.store.jdbc.GridCacheJdbcBlobStoreSelfTest;
import org.apache.ignite.cache.store.jdbc.JdbcTypesDefaultTransformerTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalanceMultipleConnectionsTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalancePairedConnectionsTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationBalanceTest;
import org.apache.ignite.internal.managers.communication.IgniteCommunicationSslBalanceTest;
import org.apache.ignite.internal.managers.communication.IgniteIoTestMessagesTest;
import org.apache.ignite.internal.managers.communication.IgniteVariousConnectionNumberTest;
import org.apache.ignite.internal.processors.cache.CacheAffinityCallSelfTest;
import org.apache.ignite.internal.processors.cache.CacheDeferredDeleteQueueTest;
import org.apache.ignite.internal.processors.cache.CacheDeferredDeleteSanitySelfTest;
import org.apache.ignite.internal.processors.cache.CacheMvccTxFastFinishTest;
import org.apache.ignite.internal.processors.cache.CacheTxFastFinishTest;
import org.apache.ignite.internal.processors.cache.DataStorageConfigurationValidationTest;
import org.apache.ignite.internal.processors.cache.GridCacheAffinityApiSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheAffinityMapperSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheAffinityRoutingSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheAsyncOperationsLimitSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheConcurrentMapSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheConfigurationConsistencySelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheConfigurationValidationSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheLifecycleAwareSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheMissingCommitVersionSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheMvccManagerSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheMvccMultiThreadedUpdateSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheMvccPartitionedSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheMvccSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheOffHeapAtomicMultiThreadedUpdateSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheOffHeapMultiThreadedUpdateSelfTest;
import org.apache.ignite.internal.processors.cache.GridCachePartitionedLocalStoreSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheReplicatedLocalStoreSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheStopSelfTest;
import org.apache.ignite.internal.processors.cache.GridCacheTcpClientDiscoveryMultiThreadedTest;
import org.apache.ignite.internal.processors.cache.GridDataStorageConfigurationConsistencySelfTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicLocalInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicLocalWithStoreInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicNearEnabledInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicStopBusySelfTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheAtomicWithStoreInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheEntryListenerAtomicLocalTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheEntryListenerAtomicReplicatedTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheEntryListenerAtomicTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheEntryProcessorCallTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheManyAsyncOperationsTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheMvccTxInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheMvccTxNearEnabledInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheTxInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteCacheTxNearEnabledInvokeTest;
import org.apache.ignite.internal.processors.cache.IgniteClientAffinityAssignmentSelfTest;
import org.apache.ignite.internal.processors.cache.IgniteIncompleteCacheObjectSelfTest;
import org.apache.ignite.internal.processors.cache.binary.CacheKeepBinaryWithInterceptorTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheAffinityRoutingBinarySelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheAtomicPartitionedOnlyBinaryDataStreamerMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheAtomicPartitionedOnlyBinaryDataStreamerMultithreadedSelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheAtomicPartitionedOnlyBinaryMultiNodeSelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheAtomicPartitionedOnlyBinaryMultithreadedSelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheBinariesNearPartitionedByteArrayValuesSelfTest;
import org.apache.ignite.internal.processors.cache.binary.distributed.dht.GridCacheBinariesPartitionedOnlyByteArrayValuesSelfTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheAtomicExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheContinuousExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheIsolatedExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheP2PDisableExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCachePrivateExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheReplicatedExecutionContextTest;
import org.apache.ignite.internal.processors.cache.context.IgniteCacheSharedExecutionContextTest;
import org.apache.ignite.internal.processors.cache.distributed.CacheAtomicNearUpdateTopologyChangeTest;
import org.apache.ignite.internal.processors.cache.distributed.GridCacheClientModesTcpClientDiscoveryAbstractTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheAtomicMessageRecovery10ConnectionsTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheAtomicMessageRecoveryPairedConnectionsTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheAtomicMessageRecoveryTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheConnectionRecovery10ConnectionsTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheConnectionRecoveryTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheMessageRecoveryIdleConnectionTest;
import org.apache.ignite.internal.processors.cache.distributed.IgniteCacheMessageWriteTimeoutTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCacheAtomicNearCacheSelfTest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridCachePartitionsStateValidatorSelfTest;
import org.apache.ignite.internal.processors.cache.expiry.IgniteCacheAtomicLocalExpiryPolicyTest;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheEntryProcessorExternalizableFailedTest;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheEntryProcessorNonSerializableTest;
import org.apache.ignite.testframework.junits.DynamicSuite;
import org.junit.runner.RunWith;
/**
* Test suite.
*/
@RunWith(DynamicSuite.class)
public class IgniteCacheMvccTestSuite1 {
/**
* @return IgniteCache test suite.
*/
public static List<Class<?>> suite() {
System.setProperty(IgniteSystemProperties.IGNITE_FORCE_MVCC_MODE_IN_TESTS, "true");
Set<Class> ignoredTests = new HashSet<>();
// Skip classes that already contains Mvcc tests
ignoredTests.add(CacheKeepBinaryWithInterceptorTest.class);
ignoredTests.add(CacheEntryProcessorNonSerializableTest.class);
ignoredTests.add(CacheEntryProcessorExternalizableFailedTest.class);
ignoredTests.add(IgniteCacheEntryProcessorSequentialCallTest.class);
ignoredTests.add(IgniteCacheEntryProcessorCallTest.class);
ignoredTests.add(GridCacheConfigurationConsistencySelfTest.class);
ignoredTests.add(IgniteCacheMessageRecoveryIdleConnectionTest.class);
ignoredTests.add(IgniteCacheConnectionRecoveryTest.class);
ignoredTests.add(IgniteCacheConnectionRecovery10ConnectionsTest.class);
ignoredTests.add(CacheDeferredDeleteSanitySelfTest.class);
ignoredTests.add(CacheDeferredDeleteQueueTest.class);
ignoredTests.add(GridCacheStopSelfTest.class);
ignoredTests.add(GridCacheBinariesNearPartitionedByteArrayValuesSelfTest.class);
ignoredTests.add(GridCacheBinariesPartitionedOnlyByteArrayValuesSelfTest.class);
// Atomic caches.
ignoredTests.add(IgniteCacheEntryListenerAtomicTest.class);
ignoredTests.add(IgniteCacheEntryListenerAtomicReplicatedTest.class);
ignoredTests.add(IgniteCacheEntryListenerAtomicLocalTest.class);
ignoredTests.add(IgniteCacheAtomicLocalExpiryPolicyTest.class);
ignoredTests.add(IgniteCacheAtomicInvokeTest.class);
ignoredTests.add(IgniteCacheAtomicNearEnabledInvokeTest.class);
ignoredTests.add(IgniteCacheAtomicWithStoreInvokeTest.class);
ignoredTests.add(IgniteCacheAtomicLocalInvokeTest.class);
ignoredTests.add(IgniteCacheAtomicLocalWithStoreInvokeTest.class);
ignoredTests.add(GridCachePartitionedLocalStoreSelfTest.class);
ignoredTests.add(GridCacheReplicatedLocalStoreSelfTest.class);
ignoredTests.add(CacheStoreReadFromBackupTest.class);
ignoredTests.add(IgniteCacheAtomicExecutionContextTest.class);
ignoredTests.add(IgniteCacheReplicatedExecutionContextTest.class);
ignoredTests.add(IgniteCacheContinuousExecutionContextTest.class);
ignoredTests.add(IgniteCacheIsolatedExecutionContextTest.class);
ignoredTests.add(IgniteCacheP2PDisableExecutionContextTest.class);
ignoredTests.add(IgniteCachePrivateExecutionContextTest.class);
ignoredTests.add(IgniteCacheSharedExecutionContextTest.class);
ignoredTests.add(IgniteCacheAtomicStopBusySelfTest.class);
ignoredTests.add(GridCacheAtomicNearCacheSelfTest.class);
ignoredTests.add(CacheAtomicNearUpdateTopologyChangeTest.class);
ignoredTests.add(GridCacheOffHeapAtomicMultiThreadedUpdateSelfTest.class);
ignoredTests.add(IgniteCacheAtomicMessageRecoveryTest.class);
ignoredTests.add(IgniteCacheAtomicMessageRecoveryPairedConnectionsTest.class);
ignoredTests.add(IgniteCacheAtomicMessageRecovery10ConnectionsTest.class);
ignoredTests.add(GridCacheClientModesTcpClientDiscoveryAbstractTest.CaseNearPartitionedAtomic.class);
ignoredTests.add(GridCacheClientModesTcpClientDiscoveryAbstractTest.CaseNearReplicatedAtomic.class);
ignoredTests.add(GridCacheClientModesTcpClientDiscoveryAbstractTest.CaseClientPartitionedAtomic.class);
ignoredTests.add(GridCacheClientModesTcpClientDiscoveryAbstractTest.CaseClientReplicatedAtomic.class);
ignoredTests.add(GridCacheAtomicPartitionedOnlyBinaryDataStreamerMultiNodeSelfTest.class);
ignoredTests.add(GridCacheAtomicPartitionedOnlyBinaryDataStreamerMultithreadedSelfTest.class);
ignoredTests.add(GridCacheAtomicPartitionedOnlyBinaryMultiNodeSelfTest.class);
ignoredTests.add(GridCacheAtomicPartitionedOnlyBinaryMultithreadedSelfTest.class);
// Irrelevant tests.
ignoredTests.add(GridCacheMvccSelfTest.class); // This is about MvccCandidate, but not TxSnapshot.
ignoredTests.add(GridCacheMvccPartitionedSelfTest.class); // This is about MvccCandidate, but not TxSnapshot.
ignoredTests.add(GridCacheMvccManagerSelfTest.class); // This is about MvccCandidate, but not TxSnapshot.
ignoredTests.add(GridCacheMissingCommitVersionSelfTest.class); // Mvcc tx states resides in TxLog.
// Other non-Tx test.
ignoredTests.add(GridCacheAffinityRoutingSelfTest.class);
ignoredTests.add(GridCacheAffinityRoutingBinarySelfTest.class);
ignoredTests.add(IgniteClientAffinityAssignmentSelfTest.class);
ignoredTests.add(GridCacheConcurrentMapSelfTest.class);
ignoredTests.add(CacheAffinityCallSelfTest.class);
ignoredTests.add(GridCacheAffinityMapperSelfTest.class);
ignoredTests.add(GridCacheAffinityApiSelfTest.class);
ignoredTests.add(GridCacheConfigurationValidationSelfTest.class);
ignoredTests.add(GridDataStorageConfigurationConsistencySelfTest.class);
ignoredTests.add(DataStorageConfigurationValidationTest.class);
ignoredTests.add(JdbcTypesDefaultTransformerTest.class);
ignoredTests.add(GridCacheJdbcBlobStoreSelfTest.class);
ignoredTests.add(CacheJdbcPojoStoreTest.class);
ignoredTests.add(GridCacheBalancingStoreSelfTest.class);
ignoredTests.add(GridStoreLoadCacheTest.class);
ignoredTests.add(IgniteWarmupClosureSelfTest.class);
ignoredTests.add(StoreResourceInjectionSelfTest.class);
ignoredTests.add(GridCacheAsyncOperationsLimitSelfTest.class);
ignoredTests.add(IgniteCacheManyAsyncOperationsTest.class);
ignoredTests.add(GridCacheLifecycleAwareSelfTest.class);
ignoredTests.add(IgniteCacheMessageWriteTimeoutTest.class);
ignoredTests.add(GridCachePartitionsStateValidatorSelfTest.class);
ignoredTests.add(IgniteVariousConnectionNumberTest.class);
ignoredTests.add(IgniteIncompleteCacheObjectSelfTest.class);
ignoredTests.add(IgniteCommunicationBalanceTest.class);
ignoredTests.add(IgniteCommunicationBalancePairedConnectionsTest.class);
ignoredTests.add(IgniteCommunicationBalanceMultipleConnectionsTest.class);
ignoredTests.add(IgniteCommunicationSslBalanceTest.class);
ignoredTests.add(IgniteIoTestMessagesTest.class);
ignoredTests.add(GridCacheTcpClientDiscoveryMultiThreadedTest.class);
// Skip classes which Mvcc implementations are added in this method below.
ignoredTests.add(GridCacheOffHeapMultiThreadedUpdateSelfTest.class); // See GridCacheMvccMultiThreadedUpdateSelfTest.
ignoredTests.add(CacheTxFastFinishTest.class); // See CacheMvccTxFastFinishTest.
ignoredTests.add(IgniteCacheTxInvokeTest.class); // See IgniteCacheMvccTxInvokeTest.
ignoredTests.add(IgniteCacheTxNearEnabledInvokeTest.class); // See IgniteCacheMvccTxNearEnabledInvokeTest.
List<Class<?>> suite = new ArrayList<>(IgniteBinaryCacheTestSuite.suite(ignoredTests));
// Add Mvcc clones.
suite.add(GridCacheMvccMultiThreadedUpdateSelfTest.class);
suite.add(CacheMvccTxFastFinishTest.class);
suite.add(IgniteCacheMvccTxInvokeTest.class);
suite.add(IgniteCacheMvccTxNearEnabledInvokeTest.class);
return suite;
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.docproc.jdisc;
import com.google.inject.Inject;
import com.yahoo.component.chain.Chain;
import com.yahoo.component.chain.model.ChainsModel;
import com.yahoo.component.provider.ComponentRegistry;
import com.yahoo.concurrent.DaemonThreadFactory;
import com.yahoo.config.docproc.DocprocConfig;
import com.yahoo.config.docproc.SchemamappingConfig;
import com.yahoo.container.core.ChainsConfig;
import com.yahoo.container.core.document.ContainerDocumentConfig;
import com.yahoo.docproc.AbstractConcreteDocumentFactory;
import com.yahoo.docproc.CallStack;
import com.yahoo.docproc.DocprocService;
import com.yahoo.docproc.DocumentProcessor;
import com.yahoo.docproc.jdisc.messagebus.MbusRequestContext;
import com.yahoo.docproc.proxy.SchemaMap;
import com.yahoo.document.DocumentTypeManager;
import com.yahoo.document.config.DocumentmanagerConfig;
import com.yahoo.jdisc.Metric;
import com.yahoo.jdisc.Request;
import com.yahoo.jdisc.handler.AbstractRequestHandler;
import com.yahoo.jdisc.handler.ContentChannel;
import com.yahoo.jdisc.handler.ResponseHandler;
import com.yahoo.messagebus.jdisc.MbusRequest;
import com.yahoo.processing.execution.chain.ChainRegistry;
import java.util.TimerTask;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.logging.Level;
import static com.yahoo.component.chain.ChainsConfigurer.prepareChainRegistry;
import static com.yahoo.component.chain.model.ChainsModelBuilder.buildFromConfig;
/**
* TODO: Javadoc
*
* @author Einar M R Rosenvinge
*/
public class DocumentProcessingHandler extends AbstractRequestHandler {
private static final Logger log = Logger.getLogger(DocumentProcessingHandler.class.getName());
@SuppressWarnings("removal") // TODO Vespa 8: remove
private final ComponentRegistry<DocprocService> docprocServiceRegistry;
private final ComponentRegistry<AbstractConcreteDocumentFactory> docFactoryRegistry;
private final ChainRegistry<DocumentProcessor> chainRegistry = new ChainRegistry<>();
private final ScheduledThreadPoolExecutor laterExecutor =
new ScheduledThreadPoolExecutor(2, new DaemonThreadFactory("docproc-later-"));
private final ContainerDocumentConfig containerDocConfig;
private final DocumentTypeManager documentTypeManager;
@SuppressWarnings("removal") // TODO Vespa 8: remove
private DocumentProcessingHandler(ComponentRegistry<DocprocService> docprocServiceRegistry,
ComponentRegistry<DocumentProcessor> documentProcessorComponentRegistry,
ComponentRegistry<AbstractConcreteDocumentFactory> docFactoryRegistry,
int numThreads,
DocumentTypeManager documentTypeManager,
ChainsModel chainsModel, SchemaMap schemaMap,
Metric metric,
ContainerDocumentConfig containerDocConfig) {
this.docprocServiceRegistry = docprocServiceRegistry;
this.docFactoryRegistry = docFactoryRegistry;
this.containerDocConfig = containerDocConfig;
this.documentTypeManager = documentTypeManager;
DocprocService.schemaMap = schemaMap;
laterExecutor.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
laterExecutor.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
if (chainsModel != null) {
prepareChainRegistry(chainRegistry, chainsModel, documentProcessorComponentRegistry);
for (Chain<DocumentProcessor> chain : chainRegistry.allComponents()) {
log.config("Setting up call stack for chain " + chain.getId());
DocprocService service = new DocprocService(chain.getId(), convertToCallStack(chain, metric), documentTypeManager, computeNumThreads(numThreads));
service.setInService(true);
docprocServiceRegistry.register(service.getId(), service);
}
}
}
private static int computeNumThreads(int maxThreads) {
return (maxThreads > 0) ? maxThreads : Runtime.getRuntime().availableProcessors();
}
@SuppressWarnings("removal") // TODO Vespa 8: remove
DocumentProcessingHandler(ComponentRegistry<DocprocService> docprocServiceRegistry,
ComponentRegistry<DocumentProcessor> documentProcessorComponentRegistry,
ComponentRegistry<AbstractConcreteDocumentFactory> docFactoryRegistry,
DocumentProcessingHandlerParameters params) {
this(docprocServiceRegistry, documentProcessorComponentRegistry, docFactoryRegistry,
params.getMaxNumThreads(),
params.getDocumentTypeManager(), params.getChainsModel(), params.getSchemaMap(),
params.getMetric(),
params.getContainerDocConfig());
}
@Inject
public DocumentProcessingHandler(ComponentRegistry<DocumentProcessor> documentProcessorComponentRegistry,
ComponentRegistry<AbstractConcreteDocumentFactory> docFactoryRegistry,
ChainsConfig chainsConfig,
SchemamappingConfig mappingConfig,
DocumentTypeManager documentTypeManager,
DocprocConfig docprocConfig,
ContainerDocumentConfig containerDocConfig,
Metric metric) {
this(new ComponentRegistry<>(),
documentProcessorComponentRegistry, docFactoryRegistry,
new DocumentProcessingHandlerParameters()
.setMaxNumThreads(docprocConfig.numthreads())
.setDocumentTypeManager(documentTypeManager)
.setChainsModel(buildFromConfig(chainsConfig)).setSchemaMap(configureMapping(mappingConfig))
.setMetric(metric)
.setContainerDocumentConfig(containerDocConfig));
docprocServiceRegistry.freeze();
}
@Override
@SuppressWarnings("removal") // TODO Vespa 8: remove
protected void destroy() {
laterExecutor.shutdown();
docprocServiceRegistry.allComponents().forEach(docprocService -> docprocService.deconstruct());
}
@SuppressWarnings("removal") // TODO Vespa 8: remove
public ComponentRegistry<DocprocService> getDocprocServiceRegistry() {
return docprocServiceRegistry;
}
public ChainRegistry<DocumentProcessor> getChains() {
return chainRegistry;
}
private static SchemaMap configureMapping(SchemamappingConfig mappingConfig) {
SchemaMap map = new SchemaMap();
map.configure(mappingConfig);
return map;
}
@SuppressWarnings("removal") // TODO Vespa 8: remove
private static CallStack convertToCallStack(Chain<DocumentProcessor> chain, Metric metric) {
CallStack stack = new CallStack(chain.getId().stringValue(), metric);
for (DocumentProcessor processor : chain.components()) {
processor.getFieldMap().putAll(DocprocService.schemaMap.chainMap(chain.getId().stringValue(), processor.getId().stringValue()));
stack.addLast(processor);
}
return stack;
}
@Override
@SuppressWarnings("removal") // TODO Vespa 8: remove
public ContentChannel handleRequest(Request request, ResponseHandler handler) {
RequestContext requestContext;
if (request instanceof MbusRequest) {
requestContext = new MbusRequestContext((MbusRequest) request, handler, docprocServiceRegistry, docFactoryRegistry, containerDocConfig);
} else {
//Other types can be added here in the future
throw new IllegalArgumentException("Request type not supported: " + request);
}
if (!requestContext.isProcessable()) {
requestContext.skip();
return null;
}
String serviceName = requestContext.getServiceName();
DocprocService service = docprocServiceRegistry.getComponent(serviceName);
// No need to enqueue a task if the docproc chain is empty, just forward requestContext
if (service == null) {
log.log(Level.SEVERE, "DocprocService for session '" + serviceName +
"' not found, returning request '" + requestContext + "'.");
requestContext.processingFailed(RequestContext.ErrorCode.ERROR_PROCESSING_FAILURE,
"DocprocService " + serviceName + " not found.");
return null;
} else if (service.getExecutor().getCallStack().size() == 0) {
//call stack was empty, just forward message
requestContext.skip();
return null;
}
DocumentProcessingTask task = new DocumentProcessingTask(requestContext, this, service, service.getThreadPoolExecutor());
task.submit();
return null;
}
void submit(DocumentProcessingTask task, long delay) {
LaterTimerTask timerTask = new LaterTimerTask(task, delay);
laterExecutor.schedule(timerTask, delay, TimeUnit.MILLISECONDS);
}
private static class LaterTimerTask extends TimerTask {
private final DocumentProcessingTask processingTask;
private final long delay;
private LaterTimerTask(DocumentProcessingTask processingTask, long delay) {
this.delay = delay;
log.log(Level.FINE, () -> "Enqueueing in " + delay + " ms due to Progress.LATER: " + processingTask);
this.processingTask = processingTask;
}
@Override
public void run() {
log.log(Level.FINE, () -> "Submitting after having waited " + delay + " ms in LATER queue: " + processingTask);
processingTask.submit();
}
}
public DocumentTypeManager getDocumentTypeManager() {
return documentTypeManager;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import java.util.ArrayList;
import java.util.NavigableSet;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import com.google.protobuf.TextFormat;
public class HLogUtil {
static final Log LOG = LogFactory.getLog(HLogUtil.class);
/**
* Pattern used to validate a HLog file name
*/
private static final Pattern pattern =
Pattern.compile(".*\\.\\d*("+HLog.META_HLOG_FILE_EXTN+")*");
/**
* @param filename
* name of the file to validate
* @return <tt>true</tt> if the filename matches an HLog, <tt>false</tt>
* otherwise
*/
public static boolean validateHLogFilename(String filename) {
return pattern.matcher(filename).matches();
}
/**
* Construct the HLog directory name
*
* @param serverName
* Server name formatted as described in {@link ServerName}
* @return the relative HLog directory name, e.g.
* <code>.logs/1.example.org,60030,12345</code> if
* <code>serverName</code> passed is
* <code>1.example.org,60030,12345</code>
*/
public static String getHLogDirectoryName(final String serverName) {
StringBuilder dirName = new StringBuilder(HConstants.HREGION_LOGDIR_NAME);
dirName.append("/");
dirName.append(serverName);
return dirName.toString();
}
/**
* @param regiondir
* This regions directory in the filesystem.
* @return The directory that holds recovered edits files for the region
* <code>regiondir</code>
*/
public static Path getRegionDirRecoveredEditsDir(final Path regiondir) {
return new Path(regiondir, HConstants.RECOVERED_EDITS_DIR);
}
/**
* Move aside a bad edits file.
*
* @param fs
* @param edits
* Edits file to move aside.
* @return The name of the moved aside file.
* @throws IOException
*/
public static Path moveAsideBadEditsFile(final FileSystem fs, final Path edits)
throws IOException {
Path moveAsideName = new Path(edits.getParent(), edits.getName() + "."
+ System.currentTimeMillis());
if (!fs.rename(edits, moveAsideName)) {
LOG.warn("Rename failed from " + edits + " to " + moveAsideName);
}
return moveAsideName;
}
/**
* @param path
* - the path to analyze. Expected format, if it's in hlog directory:
* / [base directory for hbase] / hbase / .logs / ServerName /
* logfile
* @return null if it's not a log file. Returns the ServerName of the region
* server that created this log file otherwise.
*/
public static ServerName getServerNameFromHLogDirectoryName(
Configuration conf, String path) throws IOException {
if (path == null
|| path.length() <= HConstants.HREGION_LOGDIR_NAME.length()) {
return null;
}
if (conf == null) {
throw new IllegalArgumentException("parameter conf must be set");
}
final String rootDir = conf.get(HConstants.HBASE_DIR);
if (rootDir == null || rootDir.isEmpty()) {
throw new IllegalArgumentException(HConstants.HBASE_DIR
+ " key not found in conf.");
}
final StringBuilder startPathSB = new StringBuilder(rootDir);
if (!rootDir.endsWith("/"))
startPathSB.append('/');
startPathSB.append(HConstants.HREGION_LOGDIR_NAME);
if (!HConstants.HREGION_LOGDIR_NAME.endsWith("/"))
startPathSB.append('/');
final String startPath = startPathSB.toString();
String fullPath;
try {
fullPath = FileSystem.get(conf).makeQualified(new Path(path)).toString();
} catch (IllegalArgumentException e) {
LOG.info("Call to makeQualified failed on " + path + " " + e.getMessage());
return null;
}
if (!fullPath.startsWith(startPath)) {
return null;
}
final String serverNameAndFile = fullPath.substring(startPath.length());
if (serverNameAndFile.indexOf('/') < "a,0,0".length()) {
// Either it's a file (not a directory) or it's not a ServerName format
return null;
}
Path p = new Path(path);
return getServerNameFromHLogDirectoryName(p);
}
/**
* This function returns region server name from a log file name which is in either format:
* hdfs://<name node>/hbase/.logs/<server name>-splitting/... or hdfs://<name
* node>/hbase/.logs/<server name>/...
* @param logFile
* @return null if the passed in logFile isn't a valid HLog file path
*/
public static ServerName getServerNameFromHLogDirectoryName(Path logFile) {
Path logDir = logFile.getParent();
String logDirName = logDir.getName();
if (logDirName.equals(HConstants.HREGION_LOGDIR_NAME)) {
logDir = logFile;
logDirName = logDir.getName();
}
ServerName serverName = null;
if (logDirName.endsWith(HLog.SPLITTING_EXT)) {
logDirName = logDirName.substring(0, logDirName.length() - HLog.SPLITTING_EXT.length());
}
try {
serverName = ServerName.parseServerName(logDirName);
} catch (IllegalArgumentException ex) {
serverName = null;
LOG.warn("Cannot parse a server name from path=" + logFile + "; " + ex.getMessage());
}
if (serverName != null && serverName.getStartcode() < 0) {
LOG.warn("Invalid log file path=" + logFile);
return null;
}
return serverName;
}
/**
* Returns sorted set of edit files made by wal-log splitter, excluding files
* with '.temp' suffix.
*
* @param fs
* @param regiondir
* @return Files in passed <code>regiondir</code> as a sorted set.
* @throws IOException
*/
public static NavigableSet<Path> getSplitEditFilesSorted(final FileSystem fs,
final Path regiondir) throws IOException {
NavigableSet<Path> filesSorted = new TreeSet<Path>();
Path editsdir = HLogUtil.getRegionDirRecoveredEditsDir(regiondir);
if (!fs.exists(editsdir))
return filesSorted;
FileStatus[] files = FSUtils.listStatus(fs, editsdir, new PathFilter() {
@Override
public boolean accept(Path p) {
boolean result = false;
try {
// Return files and only files that match the editfile names pattern.
// There can be other files in this directory other than edit files.
// In particular, on error, we'll move aside the bad edit file giving
// it a timestamp suffix. See moveAsideBadEditsFile.
Matcher m = HLog.EDITFILES_NAME_PATTERN.matcher(p.getName());
result = fs.isFile(p) && m.matches();
// Skip the file whose name ends with RECOVERED_LOG_TMPFILE_SUFFIX,
// because it means splithlog thread is writting this file.
if (p.getName().endsWith(HLog.RECOVERED_LOG_TMPFILE_SUFFIX)) {
result = false;
}
} catch (IOException e) {
LOG.warn("Failed isFile check on " + p);
}
return result;
}
});
if (files == null)
return filesSorted;
for (FileStatus status : files) {
filesSorted.add(status.getPath());
}
return filesSorted;
}
public static boolean isMetaFile(Path p) {
return isMetaFile(p.getName());
}
public static boolean isMetaFile(String p) {
if (p != null && p.endsWith(HLog.META_HLOG_FILE_EXTN)) {
return true;
}
return false;
}
/**
* Write the marker that a compaction has succeeded and is about to be committed.
* This provides info to the HMaster to allow it to recover the compaction if
* this regionserver dies in the middle (This part is not yet implemented). It also prevents
* the compaction from finishing if this regionserver has already lost its lease on the log.
* @param sequenceId Used by HLog to get sequence Id for the waledit.
*/
public static void writeCompactionMarker(HLog log, HTableDescriptor htd, HRegionInfo info,
final CompactionDescriptor c, AtomicLong sequenceId) throws IOException {
WALEdit e = WALEdit.createCompaction(c);
long now = EnvironmentEdgeManager.currentTimeMillis();
TableName tn = TableName.valueOf(c.getTableName().toByteArray());
log.appendNoSync(info, tn, e, new ArrayList<UUID>(), now, htd, sequenceId, false,
HConstants.NO_NONCE, HConstants.NO_NONCE);
log.sync();
if (LOG.isTraceEnabled()) {
LOG.trace("Appended compaction marker " + TextFormat.shortDebugString(c));
}
}
}
| |
package org.apache.helix.integration;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.StringWriter;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListSet;
import org.apache.helix.Criteria;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.HelixManager;
import org.apache.helix.InstanceType;
import org.apache.helix.NotificationContext;
import org.apache.helix.PropertyKey;
import org.apache.helix.PropertyType;
import org.apache.helix.ZNRecord;
import org.apache.helix.PropertyKey.Builder;
import org.apache.helix.manager.zk.DefaultSchedulerMessageHandlerFactory;
import org.apache.helix.messaging.AsyncCallback;
import org.apache.helix.messaging.handling.HelixTaskResult;
import org.apache.helix.messaging.handling.MessageHandler;
import org.apache.helix.messaging.handling.MessageHandlerFactory;
import org.apache.helix.model.Message;
import org.apache.helix.model.Message.MessageState;
import org.apache.helix.model.Message.MessageType;
import org.apache.helix.monitoring.ZKPathDataDumpTask;
import org.apache.helix.util.HelixUtil;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestSchedulerMessage extends ZkStandAloneCMTestBaseWithPropertyServerCheck
{
TestMessagingHandlerFactory _factory = new TestMessagingHandlerFactory();
public static class TestMessagingHandlerFactory implements
MessageHandlerFactory
{
public Map<String, Set<String>> _results = new ConcurrentHashMap<String, Set<String>>();
@Override
public MessageHandler createHandler(Message message,
NotificationContext context)
{
return new TestMessagingHandler(message, context);
}
@Override
public String getMessageType()
{
return "TestParticipant";
}
@Override
public void reset()
{
// TODO Auto-generated method stub
}
public class TestMessagingHandler extends MessageHandler
{
public TestMessagingHandler(Message message, NotificationContext context)
{
super(message, context);
// TODO Auto-generated constructor stub
}
@Override
public HelixTaskResult handleMessage() throws InterruptedException
{
HelixTaskResult result = new HelixTaskResult();
result.setSuccess(true);
String destName = _message.getTgtName();
synchronized (_results)
{
if (!_results.containsKey(_message.getPartitionName()))
{
_results.put(_message.getPartitionName(),
new ConcurrentSkipListSet<String>());
}
}
_results.get(_message.getPartitionName()).add(destName);
return result;
}
@Override
public void onError(Exception e, ErrorCode code, ErrorType type)
{
// TODO Auto-generated method stub
}
}
}
@Test()
public void TestSchedulerMsg() throws Exception
{
_factory._results.clear();
HelixManager manager = null;
for (int i = 0; i < NODE_NR; i++)
{
String hostDest = "localhost_" + (START_PORT + i);
_startCMResultMap.get(hostDest)._manager.getMessagingService()
.registerMessageHandlerFactory(_factory.getMessageType(), _factory);
manager = _startCMResultMap.get(hostDest)._manager;
}
Message schedulerMessage = new Message(MessageType.SCHEDULER_MSG + "", UUID
.randomUUID().toString());
schedulerMessage.setTgtSessionId("*");
schedulerMessage.setTgtName("CONTROLLER");
// TODO: change it to "ADMIN" ?
schedulerMessage.setSrcName("CONTROLLER");
// Template for the individual message sent to each participant
Message msg = new Message(_factory.getMessageType(), "Template");
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
// Criteria to send individual messages
Criteria cr = new Criteria();
cr.setInstanceName("localhost_%");
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setResource("%");
cr.setPartition("%");
ObjectMapper mapper = new ObjectMapper();
SerializationConfig serializationConfig = mapper.getSerializationConfig();
serializationConfig.set(SerializationConfig.Feature.INDENT_OUTPUT, true);
StringWriter sw = new StringWriter();
mapper.writeValue(sw, cr);
String crString = sw.toString();
schedulerMessage.getRecord().setSimpleField("Criteria", crString);
schedulerMessage.getRecord().setMapField("MessageTemplate",
msg.getRecord().getSimpleFields());
schedulerMessage.getRecord().setSimpleField("TIMEOUT", "-1");
HelixDataAccessor helixDataAccessor = manager.getHelixDataAccessor();
Builder keyBuilder = helixDataAccessor.keyBuilder();
helixDataAccessor.createProperty(
keyBuilder.controllerMessage(schedulerMessage.getMsgId()),
schedulerMessage);
Thread.sleep(15000);
Assert.assertEquals(_PARTITIONS, _factory._results.size());
PropertyKey controllerTaskStatus = keyBuilder.controllerTaskStatus(
MessageType.SCHEDULER_MSG.toString(), schedulerMessage.getMsgId());
int messageResultCount = 0;
for(int i = 0; i < 10; i++)
{
ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus)
.getRecord();
Assert.assertTrue(statusUpdate.getMapField("SentMessageCount")
.get("MessageCount").equals("" + (_PARTITIONS * 3)));
for(String key : statusUpdate.getMapFields().keySet())
{
if(key.startsWith("MessageResult "))
{
messageResultCount ++;
}
}
if(messageResultCount == _PARTITIONS * 3)
{
break;
}
else
{
Thread.sleep(2000);
}
}
Assert.assertEquals(messageResultCount, _PARTITIONS * 3);
int count = 0;
for (Set<String> val : _factory._results.values())
{
count += val.size();
}
Assert.assertEquals(count, _PARTITIONS * 3);
// test the ZkPathDataDumpTask
String controllerStatusPath = HelixUtil.getControllerPropertyPath(manager.getClusterName(),
PropertyType.STATUSUPDATES_CONTROLLER);
List<String> subPaths = _zkClient.getChildren(controllerStatusPath);
Assert.assertTrue(subPaths.size() > 0);
for(String subPath : subPaths)
{
String nextPath = controllerStatusPath + "/" + subPath;
List<String> subsubPaths = _zkClient.getChildren(nextPath);
Assert.assertTrue(subsubPaths.size() > 0);
}
String instanceStatusPath = HelixUtil.getInstancePropertyPath(manager.getClusterName(), "localhost_" + (START_PORT),
PropertyType.STATUSUPDATES);
subPaths = _zkClient.getChildren(instanceStatusPath);
Assert.assertTrue(subPaths.size() > 0);
for(String subPath : subPaths)
{
String nextPath = instanceStatusPath + "/" + subPath;
List<String> subsubPaths = _zkClient.getChildren(nextPath);
Assert.assertTrue(subsubPaths.size() > 0);
for(String subsubPath : subsubPaths)
{
String nextnextPath = nextPath + "/" + subsubPath;
Assert.assertTrue(_zkClient.getChildren(nextnextPath).size() > 0);
}
}
ZKPathDataDumpTask dumpTask = new ZKPathDataDumpTask(manager, _zkClient, 0);
dumpTask.run();
subPaths = _zkClient.getChildren(controllerStatusPath);
Assert.assertTrue(subPaths.size() > 0);
for(String subPath : subPaths)
{
String nextPath = controllerStatusPath + "/" + subPath;
List<String> subsubPaths = _zkClient.getChildren(nextPath);
Assert.assertTrue(subsubPaths.size() == 0);
}
subPaths = _zkClient.getChildren(instanceStatusPath);
Assert.assertTrue(subPaths.size() > 0);
for(String subPath : subPaths)
{
String nextPath = instanceStatusPath + "/" + subPath;
List<String> subsubPaths = _zkClient.getChildren(nextPath);
Assert.assertTrue(subsubPaths.size() > 0);
for(String subsubPath : subsubPaths)
{
String nextnextPath = nextPath + "/" + subsubPath;
Assert.assertTrue(_zkClient.getChildren(nextnextPath).size() == 0);
}
}
}
@Test()
public void TestSchedulerMsg2() throws Exception
{
_factory._results.clear();
HelixManager manager = null;
for (int i = 0; i < NODE_NR; i++)
{
String hostDest = "localhost_" + (START_PORT + i);
_startCMResultMap.get(hostDest)._manager.getMessagingService()
.registerMessageHandlerFactory(_factory.getMessageType(), _factory);
manager = _startCMResultMap.get(hostDest)._manager;
}
Message schedulerMessage = new Message(MessageType.SCHEDULER_MSG + "", UUID
.randomUUID().toString());
schedulerMessage.setTgtSessionId("*");
schedulerMessage.setTgtName("CONTROLLER");
// TODO: change it to "ADMIN" ?
schedulerMessage.setSrcName("CONTROLLER");
// Template for the individual message sent to each participant
Message msg = new Message(_factory.getMessageType(), "Template");
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
// Criteria to send individual messages
Criteria cr = new Criteria();
cr.setInstanceName("localhost_%");
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setResource("%");
cr.setPartition("%");
ObjectMapper mapper = new ObjectMapper();
SerializationConfig serializationConfig = mapper.getSerializationConfig();
serializationConfig.set(SerializationConfig.Feature.INDENT_OUTPUT, true);
StringWriter sw = new StringWriter();
mapper.writeValue(sw, cr);
String crString = sw.toString();
schedulerMessage.getRecord().setSimpleField("Criteria", crString);
schedulerMessage.getRecord().setMapField("MessageTemplate",
msg.getRecord().getSimpleFields());
schedulerMessage.getRecord().setSimpleField("TIMEOUT", "-1");
schedulerMessage.getRecord().setSimpleField("WAIT_ALL", "true");
Criteria cr2 = new Criteria();
cr2.setRecipientInstanceType(InstanceType.CONTROLLER);
cr2.setInstanceName("*");
cr2.setSessionSpecific(false);
class MockAsyncCallback extends AsyncCallback
{
Message _message;
public MockAsyncCallback()
{
}
@Override
public void onTimeOut()
{
// TODO Auto-generated method stub
}
@Override
public void onReplyMessage(Message message)
{
_message = message;
}
}
MockAsyncCallback callback = new MockAsyncCallback();
manager.getMessagingService().sendAndWait(cr2, schedulerMessage, callback, -1);
String msgId = callback._message.getResultMap().get(DefaultSchedulerMessageHandlerFactory.SCHEDULER_MSG_ID);
HelixDataAccessor helixDataAccessor = manager.getHelixDataAccessor();
Builder keyBuilder = helixDataAccessor.keyBuilder();
Assert.assertEquals(_PARTITIONS, _factory._results.size());
PropertyKey controllerTaskStatus = keyBuilder.controllerTaskStatus(
MessageType.SCHEDULER_MSG.toString(), msgId);
ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus)
.getRecord();
Assert.assertTrue(statusUpdate.getMapField("SentMessageCount")
.get("MessageCount").equals("" + (_PARTITIONS * 3)));
int messageResultCount = 0;
for(String key : statusUpdate.getMapFields().keySet())
{
if(key.startsWith("MessageResult "))
{
messageResultCount ++;
}
}
Assert.assertEquals(messageResultCount, _PARTITIONS * 3);
int count = 0;
for (Set<String> val : _factory._results.values())
{
count += val.size();
}
Assert.assertEquals(count, _PARTITIONS * 3);
}
@Test()
public void TestSchedulerZeroMsg() throws Exception
{
TestMessagingHandlerFactory factory = new TestMessagingHandlerFactory();
HelixManager manager = null;
for (int i = 0; i < NODE_NR; i++)
{
String hostDest = "localhost_" + (START_PORT + i);
_startCMResultMap.get(hostDest)._manager.getMessagingService()
.registerMessageHandlerFactory(factory.getMessageType(), factory);
manager = _startCMResultMap.get(hostDest)._manager;
}
Message schedulerMessage = new Message(MessageType.SCHEDULER_MSG + "", UUID
.randomUUID().toString());
schedulerMessage.setTgtSessionId("*");
schedulerMessage.setTgtName("CONTROLLER");
// TODO: change it to "ADMIN" ?
schedulerMessage.setSrcName("CONTROLLER");
// Template for the individual message sent to each participant
Message msg = new Message(factory.getMessageType(), "Template");
msg.setTgtSessionId("*");
msg.setMsgState(MessageState.NEW);
// Criteria to send individual messages
Criteria cr = new Criteria();
cr.setInstanceName("localhost_DOESNOTEXIST");
cr.setRecipientInstanceType(InstanceType.PARTICIPANT);
cr.setSessionSpecific(false);
cr.setResource("%");
cr.setPartition("%");
ObjectMapper mapper = new ObjectMapper();
SerializationConfig serializationConfig = mapper.getSerializationConfig();
serializationConfig.set(SerializationConfig.Feature.INDENT_OUTPUT, true);
StringWriter sw = new StringWriter();
mapper.writeValue(sw, cr);
String crString = sw.toString();
schedulerMessage.getRecord().setSimpleField("Criteria", crString);
schedulerMessage.getRecord().setMapField("MessageTemplate",
msg.getRecord().getSimpleFields());
schedulerMessage.getRecord().setSimpleField("TIMEOUT", "-1");
HelixDataAccessor helixDataAccessor = manager.getHelixDataAccessor();
Builder keyBuilder = helixDataAccessor.keyBuilder();
PropertyKey controllerMessageKey = keyBuilder
.controllerMessage(schedulerMessage.getMsgId());
helixDataAccessor.setProperty(controllerMessageKey, schedulerMessage);
Thread.sleep(3000);
Assert.assertEquals(0, factory._results.size());
PropertyKey controllerTaskStatus = keyBuilder.controllerTaskStatus(
MessageType.SCHEDULER_MSG.toString(), schedulerMessage.getMsgId());
ZNRecord statusUpdate = helixDataAccessor.getProperty(controllerTaskStatus)
.getRecord();
Assert.assertTrue(statusUpdate.getMapField("SentMessageCount")
.get("MessageCount").equals("0"));
int count = 0;
for (Set<String> val : factory._results.values())
{
count += val.size();
}
Assert.assertEquals(count, 0);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/campaign_audience_view_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* Request message for [CampaignAudienceViewService.GetCampaignAudienceView][google.ads.googleads.v9.services.CampaignAudienceViewService.GetCampaignAudienceView].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.GetCampaignAudienceViewRequest}
*/
public final class GetCampaignAudienceViewRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)
GetCampaignAudienceViewRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCampaignAudienceViewRequest.newBuilder() to construct.
private GetCampaignAudienceViewRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetCampaignAudienceViewRequest() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCampaignAudienceViewRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetCampaignAudienceViewRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v9_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v9_services_GetCampaignAudienceViewRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.class, com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest other = (com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for [CampaignAudienceViewService.GetCampaignAudienceView][google.ads.googleads.v9.services.CampaignAudienceViewService.GetCampaignAudienceView].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.GetCampaignAudienceViewRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v9_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v9_services_GetCampaignAudienceViewRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.class, com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v9_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest build() {
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest buildPartial() {
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest result = new com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest(this);
result.resourceName_ = resourceName_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest) {
return mergeFrom((com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest other) {
if (other == com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.GetCampaignAudienceViewRequest)
private static final com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest();
}
public static com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetCampaignAudienceViewRequest>
PARSER = new com.google.protobuf.AbstractParser<GetCampaignAudienceViewRequest>() {
@java.lang.Override
public GetCampaignAudienceViewRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetCampaignAudienceViewRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetCampaignAudienceViewRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetCampaignAudienceViewRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.GetCampaignAudienceViewRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package de.unistuttgart.quadrama.io.tei;
import static de.unistuttgart.quadrama.io.core.DramaIOUtil.select2Annotation;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.uima.UimaContext;
import org.apache.uima.collection.CollectionException;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.cas.FSArray;
import org.apache.uima.jcas.cas.StringArray;
import org.apache.uima.resource.ResourceInitializationException;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import de.unistuttgart.ims.drama.api.Act;
import de.unistuttgart.ims.drama.api.ActHeading;
import de.unistuttgart.ims.drama.api.Author;
import de.unistuttgart.ims.drama.api.CastFigure;
import de.unistuttgart.ims.drama.api.Drama;
import de.unistuttgart.ims.drama.api.Figure;
import de.unistuttgart.ims.drama.api.Scene;
import de.unistuttgart.ims.drama.api.SceneHeading;
import de.unistuttgart.ims.drama.api.Speaker;
import de.unistuttgart.ims.drama.api.Speech;
import de.unistuttgart.ims.drama.api.StageDirection;
import de.unistuttgart.ims.drama.api.Translator;
import de.unistuttgart.ims.drama.api.Utterance;
import de.unistuttgart.ims.uima.io.xml.ArrayUtil;
import de.unistuttgart.ims.uima.io.xml.GenericXmlReader;
import de.unistuttgart.ims.uima.io.xml.type.XMLElement;
import de.unistuttgart.ims.uimautil.AnnotationUtil;
import de.unistuttgart.quadrama.io.core.AbstractDramaUrlReader;
public class CoreTeiReader extends AbstractDramaUrlReader {
public static final String PARAM_STRICT = "strict";
@ConfigurationParameter(name = PARAM_STRICT, mandatory = false, defaultValue = "false")
boolean strict = false;
@Override
public void initialize(UimaContext context) throws ResourceInitializationException {
super.initialize(context);
}
@Override
public void getNext(final JCas jcas, InputStream file, Drama drama) throws IOException, CollectionException {
GenericXmlReader<Drama> gxr = new GenericXmlReader<Drama>(Drama.class);
gxr.setTextRootSelector("TEI > text");
gxr.setPreserveWhitespace(false);
gxr.addGlobalRule("fileDesc > publicationStmt > idno[type=quadramaX]", (d, e) -> d.setDocumentId(e.text()));
gxr.addGlobalRule("fileDesc > titleStmt > title", (d, e) -> d.setDocumentTitle(e.text()));
gxr.addGlobalRule("fileDesc > titleStmt > author", (d, e) -> {
Author a = new Author(jcas);
a.setName(e.text());
if (e.hasAttr("key") && e.attr("key").startsWith("pnd")) {
a.setPnd(e.attr("key").substring(3));
}
a.addToIndexes();
});
gxr.addGlobalRule("fileDesc > titleStmt > editor[role=translator]", (d, e) -> {
Translator a = new Translator(jcas);
a.setName(e.text());
if (e.hasAttr("key") && e.attr("key").startsWith("pnd")) {
a.setPnd(e.attr("key").substring(3));
}
a.addToIndexes();
});
if (getLanguage().equals(LANGUAGE_UNSPECIFIED))
gxr.addGlobalRule("profileDesc > langUsage > language",
(d, e) -> jcas.setDocumentLanguage(e.attr("ident")));
gxr.addGlobalRule("profileDesc > particDesc > listPerson > person", CastFigure.class, (cf, e) -> {
cf.setXmlId(ArrayUtil.toStringArray(jcas, e.attr("xml:id")));
if (e.hasText())
cf.setNames(ArrayUtil.toStringArray(jcas, e.text()));
else
cf.setNames(ArrayUtil.toStringArray(jcas));
if (cf.getNames().size() > 0)
cf.setDisplayName(cf.getNames(0));
else
cf.setDisplayName(cf.getXmlId(0));
});
// segmentation
gxr.addRule("div[type=act]", Act.class, (a, e) -> a.setRegular(true));
gxr.addRule("div[type=act] > head", ActHeading.class);
gxr.addRule("div[type=scene]", Scene.class, (a, e) -> a.setRegular(true));
gxr.addRule("div[type=scene] > head", SceneHeading.class);
gxr.addRule("speaker", Speaker.class);
gxr.addRule("stage", StageDirection.class);
gxr.addRule("l", Speech.class);
gxr.addRule("p", Speech.class);
gxr.addRule("sp", Utterance.class, (u, e) -> {
Collection<Speaker> speakers = JCasUtil.selectCovered(Speaker.class, u);
for (Speaker sp : speakers) {
String[] whos = e.attr("who").split(" ");
sp.setXmlId(new StringArray(jcas, whos.length));
sp.setCastFigure(new FSArray(jcas, whos.length));
for (int i = 0; i < whos.length; i++) {
String xmlid = whos[i].substring(1);
sp.setXmlId(i, xmlid);
if (gxr.exists(xmlid)) {
sp.setCastFigure(i, (CastFigure) gxr.getAnnotation(xmlid).getValue());
u.setCastFigure((CastFigure) gxr.getAnnotation(xmlid).getValue());
}
}
}
});
gxr.read(jcas, file);
try {
AnnotationUtil.trim(new ArrayList<Figure>(JCasUtil.select(jcas, Figure.class)));
} catch (ArrayIndexOutOfBoundsException e) {
// catch silently
// TODO: fix https://github.com/nilsreiter/uima-util/issues/13
}
try {
AnnotationUtil.trim(new ArrayList<Speech>(JCasUtil.select(jcas, Speech.class)));
} catch (ArrayIndexOutOfBoundsException e) {
// catch silently
// TODO: fix https://github.com/nilsreiter/uima-util/issues/13
}
try {
AnnotationUtil.trim(new ArrayList<Utterance>(JCasUtil.select(jcas, Utterance.class)));
} catch (ArrayIndexOutOfBoundsException e) {
// catch silently
// TODO: fix https://github.com/nilsreiter/uima-util/issues/13
}
try {
AnnotationUtil.trim(new ArrayList<Scene>(JCasUtil.select(jcas, Scene.class)));
} catch (ArrayIndexOutOfBoundsException e) {
// catch silently
// TODO: fix https://github.com/nilsreiter/uima-util/issues/13
}
try {
AnnotationUtil.trim(new ArrayList<Act>(JCasUtil.select(jcas, Act.class)));
} catch (ArrayIndexOutOfBoundsException e) {
// catch silently
// TODO: fix https://github.com/nilsreiter/uima-util/issues/13
}
}
@Deprecated
public static void readCast(JCas jcas, Drama drama, Document doc) {
Map<String, CastFigure> idFigureMap = new HashMap<String, CastFigure>();
Elements castEntries = doc.select("profileDesc > particDesc > listPerson > person");
// castEntries.addAll(doc.select("profileDesc > particDesc > listPerson
// > personGrp"));
FSArray castListArray = new FSArray(jcas, castEntries.size());
for (int i = 0; i < castEntries.size(); i++) {
Element castEntry = castEntries.get(i);
String id = castEntry.attr("xml:id");
StringArray arr = new StringArray(jcas, 1);
arr.set(0, castEntry.text());
CastFigure figure = new CastFigure(jcas);
figure.setXmlId(new StringArray(jcas, 1));
figure.setXmlId(0, id);
figure.setNames(arr);
figure.addToIndexes();
idFigureMap.put(id, figure);
castListArray.set(i, figure);
}
drama.setCastList(castListArray);
for (Speaker speaker : JCasUtil.select(jcas, Speaker.class)) {
speaker.setCastFigure(new FSArray(jcas, speaker.getXmlId().size()));
for (int i = 0; i < speaker.getXmlId().size(); i++)
speaker.setCastFigure(i, idFigureMap.get(speaker.getXmlId(i)));
}
}
@Deprecated
public static void readActs(JCas jcas, Element root, Map<String, XMLElement> map, boolean strict) {
for (Act a : select2Annotation(jcas, root, map, "div[type=act]", Act.class, null)) {
a.setRegular(true);
}
select2Annotation(jcas, root, map, "div[type=act] > head", ActHeading.class, null);
}
/**
* Detect scenes. The following things are checked:
* <ol>
* <li>if they are explicitly marked with <code>type=scnee</code>, we take them
* and return.</li>
* <li>if Act annotations do exist in the JCas, we search for divs that have
* head annotations.</li>
* </ol>
*
* @param jcas
* @param root
* @param map
*/
@Deprecated
public static void readScenes(JCas jcas, Element root, Map<String, XMLElement> map, boolean strict) {
select2Annotation(jcas, root, map, "div[type=scene]", Scene.class, null);
select2Annotation(jcas, root, map, "div[type=scene] > head", SceneHeading.class, null);
for (Scene scene : JCasUtil.select(jcas, Scene.class))
scene.setRegular(true);
}
@Deprecated
public static void readActsAndScenes(JCas jcas, Element root, Map<String, XMLElement> map, boolean strict) {
readActs(jcas, root, map, strict);
readScenes(jcas, root, map, strict);
}
}
| |
package org.switchyard.console.client;
/**
* Interface to represent the messages contained in resource bundle:
* 'src/main/java/org/switchyard/console/client/Messages.properties'.
*
* This interface is maintained manually because the i18n goal does not handle
* number formats (parameters are string). Also, GWT does not support integer
* formats. To maintain, run mvn gwt:i18n. This will produce a new Messages
* class in target/generated-sources/gwt. Merge the newly generated file into
* this file.
*/
public interface Messages extends com.google.gwt.i18n.client.Messages {
/**
* Translated "Details...".
*
* @return translated "Details..."
*/
@DefaultMessage("Details...")
@Key("button_details")
String button_details();
/**
* Translated "View Configuration...".
*
* @return translated "View Configuration..."
*/
@DefaultMessage("View Configuration...")
@Key("button_viewConfiguration")
String button_viewConfiguration();
/**
* Translated "View Details...".
*
* @return translated "View Details..."
*/
@DefaultMessage("View Details...")
@Key("button_viewDetails")
String button_viewDetails();
/**
* Translated "<inherited>".
*
* @return translated "<inherited>"
*/
@DefaultMessage("<inherited>")
@Key("constant_inherited")
String constant_inherited();
/**
* Translated "Unknown".
*
* @return translated "Unknown"
*/
@DefaultMessage("Unknown")
@Key("constant_unknown")
String constant_unknown();
/**
* Translated "Displays details for a specific application. Select an application to see its implementation details.".
*
* @return translated "Displays details for a specific application. Select an application to see its implementation details."
*/
@DefaultMessage("Displays details for a specific application. Select an application to see its implementation details.")
@Key("description_applicationDetails")
String description_applicationDetails();
/**
* Translated "Displays a list of deployed SwitchYard applications. Select an application to see more details.".
*
* @return translated "Displays a list of deployed SwitchYard applications. Select an application to see more details."
*/
@DefaultMessage("Displays a list of deployed SwitchYard applications. Select an application to see more details.")
@Key("description_applications")
String description_applications();
/**
* Translated "Displays all artifacts referenced throughout the system, along with the applications referencing a specific artifact.".
*
* @return translated "Displays all artifacts referenced throughout the system, along with the applications referencing a specific artifact."
*/
@DefaultMessage("Displays all artifacts referenced throughout the system, along with the applications referencing a specific artifact.")
@Key("description_artifactReferences")
String description_artifactReferences();
/**
* Translated "Displays message metrics for a selected reference.".
*
* @return translated "Displays message metrics for a selected reference."
*/
@DefaultMessage("Displays message metrics for a selected reference.")
@Key("description_referenceMetrics")
String description_referenceMetrics();
/**
* Translated "Displays details for the selected service.".
*
* @return translated "Displays details for the selected service."
*/
@DefaultMessage("Displays details for the selected service.")
@Key("description_serviceDetails")
String description_serviceDetails();
/**
* Translated "Displays message metrics for a selected service.".
*
* @return translated "Displays message metrics for a selected service."
*/
@DefaultMessage("Displays message metrics for a selected service.")
@Key("description_serviceMetrics")
String description_serviceMetrics();
/**
* Translated "Displays a list of deployed SwitchYard references. Select a reference to see more details.".
*
* @return translated "Displays a list of deployed SwitchYard references. Select a reference to see more details."
*/
@DefaultMessage("Displays a list of deployed SwitchYard references. Select a reference to see more details.")
@Key("description_switchYardReferences")
String description_switchYardReferences();
/**
* Translated "Displays details about the SwitchYard runtime.".
*
* @return translated "Displays details about the SwitchYard runtime."
*/
@DefaultMessage("Displays details about the SwitchYard runtime.")
@Key("description_switchYardRuntime")
String description_switchYardRuntime();
/**
* Translated "Displays a list of deployed SwitchYard services. Select a service to see more details.".
*
* @return translated "Displays a list of deployed SwitchYard services. Select a service to see more details."
*/
@DefaultMessage("Displays a list of deployed SwitchYard services. Select a service to see more details.")
@Key("description_switchYardServices")
String description_switchYardServices();
/**
* Translated "Displays message metrics for the SwitchYard subsystem.".
*
* @return translated "Displays message metrics for the SwitchYard subsystem."
*/
@DefaultMessage("Displays message metrics for the SwitchYard subsystem.")
@Key("description_systemMetrics")
String description_systemMetrics();
/**
* Translated "Could not load all reference metrics.".
*
* @return translated "Could not load all reference metrics."
*/
@DefaultMessage("Could not load all reference metrics.")
@Key("error_allReferenceMetricsLoad")
String error_allReferenceMetricsLoad();
/**
* Translated "Could not load all service metrics.".
*
* @return translated "Could not load all service metrics."
*/
@DefaultMessage("Could not load all service metrics.")
@Key("error_allServiceMetricsLoad")
String error_allServiceMetricsLoad();
/**
* Translated "Could not load information for application: {0}".
* @param arg0 application name
* @return translated "Could not load information for application: {0}"
*/
@DefaultMessage("Could not load information for application: {0}")
@Key("error_applicationLoad")
String error_applicationLoad(String arg0);
/**
* Translated "Could not load artifact references.".
*
* @return translated "Could not load artifact references."
*/
@DefaultMessage("Could not load artifact references.")
@Key("error_artifactsLoad")
String error_artifactsLoad();
/**
* Translated "Comparison column specified, but no baseline set!".
*
* @return translated "Comparison column specified, but no baseline set!"
*/
@DefaultMessage("Comparison column specified, but no baseline set!")
@Key("error_comparisonColumnWithoutBaseline")
String error_comparisonColumnWithoutBaseline();
/**
* Translated "Could not load information for component: {0}".
* @param arg0 component name
* @return translated "Could not load information for component: {0}"
*/
@DefaultMessage("Could not load information for component: {0}")
@Key("error_componentLoad")
String error_componentLoad(String arg0);
/**
* Translated "Illegal baseline index {0,number} on number of samples {1,number}.".
* @param arg0 baseline index
* @param arg1 sample index
* @return translated "Illegal baseline index {0,number} on number of samples {1,number}."
*/
@DefaultMessage("Illegal baseline index {0,number} on number of samples {1,number}.")
@Key("error_illegalBaselineIndex")
String error_illegalBaselineIndex(int arg0, int arg1);
/**
* Translated "Metric value at index {0,number} is null".
* @param arg0 index
* @return translated "Metric value at index {0,number} is null"
*/
@DefaultMessage("Metric value at index {0,number} is null")
@Key("error_metricIsNullAtIndex")
String error_metricIsNullAtIndex(int arg0);
/**
* Translated "Cannot reveal application details. No application specified.".
*
* @return translated "Cannot reveal application details. No application specified."
*/
@DefaultMessage("Cannot reveal application details. No application specified.")
@Key("error_navigateToApplication")
String error_navigateToApplication();
/**
* Translated "Cannot reveal reference details. No reference or application specified.".
*
* @return translated "Cannot reveal reference details. No reference or application specified."
*/
@DefaultMessage("Cannot reveal reference details. No reference or application specified.")
@Key("error_navigateToReference")
String error_navigateToReference();
/**
* Translated "Cannot reveal service details. No service or application specified.".
*
* @return translated "Cannot reveal service details. No service or application specified."
*/
@DefaultMessage("Cannot reveal service details. No service or application specified.")
@Key("error_navigateToService")
String error_navigateToService();
/**
* Translated "Could not load information for reference: {0} from application: {1}".
* @param arg0 reference name
* @param arg1 application name
* @return translated "Could not load information for reference: {0} from application: {1}"
*/
@DefaultMessage("Could not load information for reference: {0} from application: {1}")
@Key("error_referenceLoad")
String error_referenceLoad(String arg0, String arg1);
/**
* Translated "Failure resetting metrics for {0}: {1}".
* @param arg0 entity name
* @param arg1 error message
* @return translated "Failure resetting metrics for {0}: {1}"
*/
@DefaultMessage("Failure resetting metrics for {0}: {1}")
@Key("error_resetObjectMetrics")
String error_resetObjectMetrics(String arg0, String arg1);
/**
* Translated "Failure resetting system metrics: {0}".
* @param arg0 error message
* @return translated "Failure resetting system metrics: {0}"
*/
@DefaultMessage("Failure resetting system metrics: {0}")
@Key("error_resetSystemMetrics")
String error_resetSystemMetrics(String arg0);
/**
* Translated "Could not load information for service: {0} from application: {1}".
* @param arg0 service name
* @param arg1 application name
* @return translated "Could not load information for service: {0} from application: {1}"
*/
@DefaultMessage("Could not load information for service: {0} from application: {1}")
@Key("error_serviceLoad")
String error_serviceLoad(String arg0, String arg1);
/**
* Translated "Could not load metrics for service: {0}".
* @param arg0 service name
* @return translated "Could not load metrics for service: {0}"
*/
@DefaultMessage("Could not load metrics for service: {0}")
@Key("error_serviceMetricsLoad")
String error_serviceMetricsLoad(String arg0);
/**
* Translated "Failure setting property: {0}".
* @param arg0 property name
* @return translated "Failure setting property: {0}"
*/
@DefaultMessage("Failure setting property: {0}")
@Key("error_setProperty")
String error_setProperty(String arg0);
/**
* Translated "Failure starting gateway for {0}: {1}".
* @param arg0 gateway name
* @param arg1 error message
* @return translated "Failure starting gateway for {0}: {1}"
*/
@DefaultMessage("Failure starting gateway for {0}: {1}")
@Key("error_startGateway")
String error_startGateway(String arg0, String arg1);
/**
* Translated "Failure stopping gateway for {0}: {1}".
* @param arg0 gateway name
* @param arg1 error message
* @return translated "Failure stopping gateway for {0}: {1}"
*/
@DefaultMessage("Failure stopping gateway for {0}: {1}")
@Key("error_stopGateway")
String error_stopGateway(String arg0, String arg1);
/**
* Translated "Could not load metrics for system".
*
* @return translated "Could not load metrics for system"
*/
@DefaultMessage("Could not load metrics for system")
@Key("error_systemMetricsLoad")
String error_systemMetricsLoad();
/**
* Translated "Unknown error".
*
* @return translated "Unknown error"
*/
@DefaultMessage("Unknown error")
@Key("error_unknown")
String error_unknown();
/**
* Translated "Failure updating throttling details for {0}: {1}".
* @param arg0 service name
* @param arg1 error message
* @return translated "Failure updating throttling details for {0}: {1}"
*/
@DefaultMessage("Failure updating throttling details for {0}: {1}")
@Key("error_updateThrottling")
String error_updateThrottling(String arg0, String arg1);
/**
* Translated "Actual".
*
* @return translated "Actual"
*/
@DefaultMessage("Actual")
@Key("label_actual")
String label_actual();
/**
* Translated "Application".
*
* @return translated "Application"
*/
@DefaultMessage("Application")
@Key("label_application")
String label_application();
/**
* Translated "Application Details".
*
* @return translated "Application Details"
*/
@DefaultMessage("Application Details")
@Key("label_applicationDetails")
String label_applicationDetails();
/**
* Translated "Application Name".
*
* @return translated "Application Name"
*/
@DefaultMessage("Application Name")
@Key("label_applicationName")
String label_applicationName();
/**
* Translated "Application Namespace".
*
* @return translated "Application Namespace"
*/
@DefaultMessage("Application Namespace")
@Key("label_applicationNamespace")
String label_applicationNamespace();
/**
* Translated "Applications".
*
* @return translated "Applications"
*/
@DefaultMessage("Applications")
@Key("label_applications")
String label_applications();
/**
* Translated "Applications Using Artifact".
*
* @return translated "Applications Using Artifact"
*/
@DefaultMessage("Applications Using Artifact")
@Key("label_applicationsUsingArtifacts")
String label_applicationsUsingArtifacts();
/**
* Translated "Artifact References".
*
* @return translated "Artifact References"
*/
@DefaultMessage("Artifact References")
@Key("label_artifactReferences")
String label_artifactReferences();
/**
* Translated "Artifacts".
*
* @return translated "Artifacts"
*/
@DefaultMessage("Artifacts")
@Key("label_artifacts")
String label_artifacts();
/**
* Translated "Average Processing Time".
*
* @return translated "Average Processing Time"
*/
@DefaultMessage("Average Processing Time")
@Key("label_averageProcessingTime")
String label_averageProcessingTime();
/**
* Translated "Average Time".
*
* @return translated "Average Time"
*/
@DefaultMessage("Average Time")
@Key("label_averageTime")
String label_averageTime();
/**
* Translated "Component Details".
*
* @return translated "Component Details"
*/
@DefaultMessage("Component Details")
@Key("label_componentDetails")
String label_componentDetails();
/**
* Translated "Component Services".
*
* @return translated "Component Services"
*/
@DefaultMessage("Component Services")
@Key("label_componentServices")
String label_componentServices();
/**
* Translated "Configuration".
*
* @return translated "Configuration"
*/
@DefaultMessage("Configuration")
@Key("label_configuration")
String label_configuration();
/**
* Translated "Core Runtime".
*
* @return translated "Core Runtime"
*/
@DefaultMessage("Core Runtime")
@Key("label_coreRuntime")
String label_coreRuntime();
/**
* Translated "Details".
*
* @return translated "Details"
*/
@DefaultMessage("Details")
@Key("label_details")
String label_details();
/**
* Translated "Enabled".
*
* @return translated "Enabled"
*/
@DefaultMessage("Enabled")
@Key("label_enabled")
String label_enabled();
/**
* Translated "Fault Count".
*
* @return translated "Fault Count"
*/
@DefaultMessage("Fault Count")
@Key("label_faultCount")
String label_faultCount();
/**
* Translated "Fault %".
*
* @return translated "Fault %"
*/
@DefaultMessage("Fault %")
@Key("label_faultPercent")
String label_faultPercent();
/**
* Translated "From".
*
* @return translated "From"
*/
@DefaultMessage("From")
@Key("label_from")
String label_from();
/**
* Translated "Gateway Configuration".
*
* @return translated "Gateway Configuration"
*/
@DefaultMessage("Gateway Configuration")
@Key("label_gatewayConfiguration")
String label_gatewayConfiguration();
/**
* Translated "Gateway Metrics".
*
* @return translated "Gateway Metrics"
*/
@DefaultMessage("Gateway Metrics")
@Key("label_gatewayMetrics")
String label_gatewayMetrics();
/**
* Translated "Gateways".
*
* @return translated "Gateways"
*/
@DefaultMessage("Gateways")
@Key("label_gateways")
String label_gateways();
/**
* Translated "Implementation".
*
* @return translated "Implementation"
*/
@DefaultMessage("Implementation")
@Key("label_implementation")
String label_implementation();
/**
* Translated "Implementation Details".
*
* @return translated "Implementation Details"
*/
@DefaultMessage("Implementation Details")
@Key("label_implementationDetails")
String label_implementationDetails();
/**
* Translated "{0} implementation".
* @param arg0 implementation name
* @return translated "{0} implementation"
*/
@DefaultMessage("{0} implementation")
@Key("label_implementationInstance")
String label_implementationInstance(String arg0);
/**
* Translated "Installed Components".
*
* @return translated "Installed Components"
*/
@DefaultMessage("Installed Components")
@Key("label_installedComponents")
String label_installedComponents();
/**
* Translated "Interface".
*
* @return translated "Interface"
*/
@DefaultMessage("Interface")
@Key("label_interface")
String label_interface();
/**
* Translated "Max. Processing Time".
*
* @return translated "Max. Processing Time"
*/
@DefaultMessage("Max. Processing Time")
@Key("label_maxProcessingTime")
String label_maxProcessingTime();
/**
* Translated "Maximum Requests".
*
* @return translated "Maximum Requests"
*/
@DefaultMessage("Maximum Requests")
@Key("label_maximumRequests")
String label_maximumRequests();
/**
* Translated "Message Count".
*
* @return translated "Message Count"
*/
@DefaultMessage("Message Count")
@Key("label_messageCount")
String label_messageCount();
/**
* Translated "Message Counts".
*
* @return translated "Message Counts"
*/
@DefaultMessage("Message Counts")
@Key("label_messageCounts")
String label_messageCounts();
/**
* Translated "Metric".
*
* @return translated "Metric"
*/
@DefaultMessage("Metric")
@Key("label_metric")
String label_metric();
/**
* Translated "Min. Processing Time".
*
* @return translated "Min. Processing Time"
*/
@DefaultMessage("Min. Processing Time")
@Key("label_minProcessingTime")
String label_minProcessingTime();
/**
* Translated "Name".
*
* @return translated "Name"
*/
@DefaultMessage("Name")
@Key("label_name")
String label_name();
/**
* Translated "Namespace".
*
* @return translated "Namespace"
*/
@DefaultMessage("Namespace")
@Key("label_namespace")
String label_namespace();
/**
* Translated "Operation Metrics".
*
* @return translated "Operation Metrics"
*/
@DefaultMessage("Operation Metrics")
@Key("label_operationMetrics")
String label_operationMetrics();
/**
* Translated "Processing Times".
*
* @return translated "Processing Times"
*/
@DefaultMessage("Processing Times")
@Key("label_processingTimes")
String label_processingTimes();
/**
* Translated "Promoted Service".
*
* @return translated "Promoted Service"
*/
@DefaultMessage("Promoted Service")
@Key("label_promotedService")
String label_promotedService();
/**
* Translated "Properties".
*
* @return translated "Properties"
*/
@DefaultMessage("Properties")
@Key("label_properties")
String label_properties();
/**
* Translated "Raw Configuration".
*
* @return translated "Raw Configuration"
*/
@DefaultMessage("Raw Configuration")
@Key("label_rawConfiguration")
String label_rawConfiguration();
/**
* Translated "Reference Details".
*
* @return translated "Reference Details"
*/
@DefaultMessage("Reference Details")
@Key("label_referenceDetails")
String label_referenceDetails();
/**
* Translated "Reference Message Metrics".
*
* @return translated "Reference Message Metrics"
*/
@DefaultMessage("Reference Message Metrics")
@Key("label_referenceMessageMetrics")
String label_referenceMessageMetrics();
/**
* Translated "Reference Metrics".
*
* @return translated "Reference Metrics"
*/
@DefaultMessage("Reference Metrics")
@Key("label_referenceMetrics")
String label_referenceMetrics();
/**
* Translated "Referenced Service Metrics".
*
* @return translated "Referenced Service Metrics"
*/
@DefaultMessage("Referenced Service Metrics")
@Key("label_referencedServiceMetrics")
String label_referencedServiceMetrics();
/**
* Translated "References".
*
* @return translated "References"
*/
@DefaultMessage("References")
@Key("label_references")
String label_references();
/**
* Translated "Referencing Applications".
*
* @return translated "Referencing Applications"
*/
@DefaultMessage("Referencing Applications")
@Key("label_referencingApplications")
String label_referencingApplications();
/**
* Translated "Reset".
*
* @return translated "Reset"
*/
@DefaultMessage("Reset")
@Key("label_reset")
String label_reset();
/**
* Translated "Reset All Metrics".
*
* @return translated "Reset All Metrics"
*/
@DefaultMessage("Reset All Metrics")
@Key("label_resetAllMetrics")
String label_resetAllMetrics();
/**
* Translated "Reset Metrics".
*
* @return translated "Reset Metrics"
*/
@DefaultMessage("Reset Metrics")
@Key("label_resetMetrics")
String label_resetMetrics();
/**
* Translated "Runtime Details".
*
* @return translated "Runtime Details"
*/
@DefaultMessage("Runtime Details")
@Key("label_runtimeDetails")
String label_runtimeDetails();
/**
* Translated "Service Details".
*
* @return translated "Service Details"
*/
@DefaultMessage("Service Details")
@Key("label_serviceDetails")
String label_serviceDetails();
/**
* Translated "Service Message Metrics".
*
* @return translated "Service Message Metrics"
*/
@DefaultMessage("Service Message Metrics")
@Key("label_serviceMessageMetrics")
String label_serviceMessageMetrics();
/**
* Translated "Service Metrics".
*
* @return translated "Service Metrics"
*/
@DefaultMessage("Service Metrics")
@Key("label_serviceMetrics")
String label_serviceMetrics();
/**
* Translated "Service Operation Metrics".
*
* @return translated "Service Operation Metrics"
*/
@DefaultMessage("Service Operation Metrics")
@Key("label_serviceOperationMetrics")
String label_serviceOperationMetrics();
/**
* Translated "Services".
*
* @return translated "Services"
*/
@DefaultMessage("Services")
@Key("label_services")
String label_services();
/**
* Translated "Start".
*
* @return translated "Start"
*/
@DefaultMessage("Start")
@Key("label_start")
String label_start();
/**
* Translated "Start/Stop".
*
* @return translated "Start/Stop"
*/
@DefaultMessage("Start/Stop")
@Key("label_startStop")
String label_startStop();
/**
* Translated "Status".
*
* @return translated "Status"
*/
@DefaultMessage("Status")
@Key("label_status")
String label_status();
/**
* Translated "Stop".
*
* @return translated "Stop"
*/
@DefaultMessage("Stop")
@Key("label_stop")
String label_stop();
/**
* Translated "Success Count".
*
* @return translated "Success Count"
*/
@DefaultMessage("Success Count")
@Key("label_successCount")
String label_successCount();
/**
* Translated "SwitchYard Applications".
*
* @return translated "SwitchYard Applications"
*/
@DefaultMessage("SwitchYard Applications")
@Key("label_switchYardApplications")
String label_switchYardApplications();
/**
* Translated "SwitchYard Artifact References".
*
* @return translated "SwitchYard Artifact References"
*/
@DefaultMessage("SwitchYard Artifact References")
@Key("label_switchYardArtifactReferences")
String label_switchYardArtifactReferences();
/**
* Translated "SwitchYard Message Metrics".
*
* @return translated "SwitchYard Message Metrics"
*/
@DefaultMessage("SwitchYard Message Metrics")
@Key("label_switchYardMessageMetrics")
String label_switchYardMessageMetrics();
/**
* Translated "SwitchYard References".
*
* @return translated "SwitchYard References"
*/
@DefaultMessage("SwitchYard References")
@Key("label_switchYardReferences")
String label_switchYardReferences();
/**
* Translated "SwitchYard Runtime".
*
* @return translated "SwitchYard Runtime"
*/
@DefaultMessage("SwitchYard Runtime")
@Key("label_switchYardRuntime")
String label_switchYardRuntime();
/**
* Translated "SwitchYard Runtime Details".
*
* @return translated "SwitchYard Runtime Details"
*/
@DefaultMessage("SwitchYard Runtime Details")
@Key("label_switchYardRuntimeDetails")
String label_switchYardRuntimeDetails();
/**
* Translated "SwitchYard Services".
*
* @return translated "SwitchYard Services"
*/
@DefaultMessage("SwitchYard Services")
@Key("label_switchYardServices")
String label_switchYardServices();
/**
* Translated "System".
*
* @return translated "System"
*/
@DefaultMessage("System")
@Key("label_system")
String label_system();
/**
* Translated "System Message Metrics".
*
* @return translated "System Message Metrics"
*/
@DefaultMessage("System Message Metrics")
@Key("label_systemMessageMetrics")
String label_systemMessageMetrics();
/**
* Translated "Target Namespace".
*
* @return translated "Target Namespace"
*/
@DefaultMessage("Target Namespace")
@Key("label_targetNamespace")
String label_targetNamespace();
/**
* Translated "Throttling".
*
* @return translated "Throttling"
*/
@DefaultMessage("Throttling")
@Key("label_throttling")
String label_throttling();
/**
* Translated "Time %".
*
* @return translated "Time %"
*/
@DefaultMessage("Time %")
@Key("label_timePercent")
String label_timePercent();
/**
* Translated "Time Period (millis)".
*
* @return translated "Time Period (millis)"
*/
@DefaultMessage("Time Period (millis)")
@Key("label_timePeriod")
String label_timePeriod();
/**
* Translated "To".
*
* @return translated "To"
*/
@DefaultMessage("To")
@Key("label_to")
String label_to();
/**
* Translated "Total Count".
*
* @return translated "Total Count"
*/
@DefaultMessage("Total Count")
@Key("label_totalCount")
String label_totalCount();
/**
* Translated "Total Processing Time".
*
* @return translated "Total Processing Time"
*/
@DefaultMessage("Total Processing Time")
@Key("label_totalProcessingTime")
String label_totalProcessingTime();
/**
* Translated "Transformers".
*
* @return translated "Transformers"
*/
@DefaultMessage("Transformers")
@Key("label_transformers")
String label_transformers();
/**
* Translated "Type".
*
* @return translated "Type"
*/
@DefaultMessage("Type")
@Key("label_type")
String label_type();
/**
* Translated "URL".
*
* @return translated "URL"
*/
@DefaultMessage("URL")
@Key("label_url")
String label_url();
/**
* Translated "Validators".
*
* @return translated "Validators"
*/
@DefaultMessage("Validators")
@Key("label_validators")
String label_validators();
/**
* Translated "Version".
*
* @return translated "Version"
*/
@DefaultMessage("Version")
@Key("label_version")
String label_version();
}
| |
package cz.tomaskypta.tools.langtool.exporting;
import java.io.*;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.hssf.util.HSSFColor;
import org.apache.poi.ss.util.CellRangeAddress;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
public class ToolExport {
private static final String DIR_VALUES = "values";
private static final String[] POTENTIAL_RES_DIRS = new String[]{"res", "src/main/res"};
private DocumentBuilder builder;
private File outExcelFile;
private String project;
private Map<String, Integer> keysIndex;
private PrintStream out;
private ExportConfig mConfig;
private Set<String> sAllowedFiles = new HashSet<String>();
{
sAllowedFiles.add("strings.xml");
}
public ToolExport(PrintStream out) throws ParserConfigurationException {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
builder = dbf.newDocumentBuilder();
this.out = out == null ? System.out : out;
}
public static void run(ExportConfig config) throws SAXException,
IOException, ParserConfigurationException {
run(null, config);
}
public static void run(PrintStream out, ExportConfig config) throws SAXException, IOException, ParserConfigurationException {
ToolExport tool = new ToolExport(out);
if (StringUtils.isEmpty(config.inputExportProject)) {
tool.out.println("Cannot export, missing config");
return;
}
File project = new File(config.inputExportProject);
if (StringUtils.isEmpty(config.outputFile)) {
config.outputFile = "exported_strings_" + System.currentTimeMillis() + ".xls";
}
tool.outExcelFile = new File(config.outputFile);
tool.project = project.getName();
tool.mConfig = config;
tool.sAllowedFiles.addAll(config.additionalResources);
tool.export(project);
}
private void export(File project) throws SAXException, IOException {
File res = findResourceDir(project);
if (res == null) {
System.err.println("Cannot find resource directory.");
return;
}
for (File dir : res.listFiles()) {
if (!dir.isDirectory() || !dir.getName().startsWith(DIR_VALUES)) {
continue;
}
String dirName = dir.getName();
if (dirName.equals(DIR_VALUES)) {
keysIndex = exportDefLang(dir);
} else {
int index = dirName.indexOf('-');
if (index == -1)
continue;
String lang = dirName.substring(index + 1);
exportLang(lang, dir);
}
}
}
private File findResourceDir(File project) {
List<File> availableResDirs = new LinkedList<File>();
for (String potentialResDir : POTENTIAL_RES_DIRS) {
File res = new File(project, potentialResDir);
if (res.exists()) {
availableResDirs.add(res);
}
}
if (!availableResDirs.isEmpty()) {
return availableResDirs.get(0);
}
return null;
}
private void exportLang(String lang, File valueDir) throws IOException, SAXException {
for (String fileName : sAllowedFiles) {
File stringFile = new File(valueDir, fileName);
if (!stringFile.exists()) {
continue;
}
exportLangToExcel(project, lang, stringFile, getStrings(stringFile), outExcelFile, keysIndex);
}
}
private Map<String, Integer> exportDefLang(File valueDir) throws IOException, SAXException {
Map<String, Integer> keys = new HashMap<String, Integer>();
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet;
sheet = wb.createSheet(project);
int rowIndex = 0;
sheet.createRow(rowIndex++);
createTilte(wb, sheet);
addLang2Tilte(wb, sheet, "default");
sheet.createFreezePane(1, 1);
FileOutputStream outFile = new FileOutputStream(outExcelFile);
wb.write(outFile);
outFile.close();
for (String fileName : sAllowedFiles) {
File stringFile = new File(valueDir, fileName);
if (!stringFile.exists()) {
continue;
}
keys.putAll(exportDefLangToExcel(rowIndex, project, stringFile, getStrings(stringFile), outExcelFile));
}
return keys;
}
private NodeList getStrings(File f) throws SAXException, IOException {
Document dom = builder.parse(f);
return dom.getDocumentElement().getChildNodes();
}
private static HSSFCellStyle createTilteStyle(HSSFWorkbook wb) {
HSSFFont bold = wb.createFont();
bold.setBoldweight(HSSFFont.BOLDWEIGHT_BOLD);
HSSFCellStyle style = wb.createCellStyle();
style.setFont(bold);
style.setFillForegroundColor(HSSFColor.GREY_25_PERCENT.index);
style.setFillPattern(HSSFCellStyle.SOLID_FOREGROUND);
style.setAlignment(HSSFCellStyle.ALIGN_CENTER);
style.setWrapText(true);
return style;
}
private static HSSFCellStyle createCommentStyle(HSSFWorkbook wb) {
HSSFFont commentFont = wb.createFont();
commentFont.setColor(HSSFColor.GREEN.index);
commentFont.setItalic(true);
commentFont.setFontHeightInPoints((short)12);
HSSFCellStyle commentStyle = wb.createCellStyle();
commentStyle.setFont(commentFont);
return commentStyle;
}
private static HSSFCellStyle createPlurarStyle(HSSFWorkbook wb) {
HSSFFont commentFont = wb.createFont();
commentFont.setColor(HSSFColor.GREY_50_PERCENT.index);
commentFont.setItalic(true);
commentFont.setFontHeightInPoints((short)12);
HSSFCellStyle commentStyle = wb.createCellStyle();
commentStyle.setFont(commentFont);
return commentStyle;
}
private static HSSFCellStyle createKeyStyle(HSSFWorkbook wb) {
HSSFFont bold = wb.createFont();
bold.setBoldweight(HSSFFont.BOLDWEIGHT_BOLD);
bold.setFontHeightInPoints((short)11);
HSSFCellStyle keyStyle = wb.createCellStyle();
keyStyle.setFont(bold);
return keyStyle;
}
private static HSSFCellStyle createTextStyle(HSSFWorkbook wb) {
HSSFFont plain = wb.createFont();
plain.setFontHeightInPoints((short)12);
HSSFCellStyle textStyle = wb.createCellStyle();
textStyle.setFont(plain);
return textStyle;
}
private static HSSFCellStyle createMissedStyle(HSSFWorkbook wb) {
HSSFCellStyle style = wb.createCellStyle();
style.setFillForegroundColor(HSSFColor.RED.index);
style.setFillPattern(HSSFCellStyle.SOLID_FOREGROUND);
return style;
}
private static void createTilte(HSSFWorkbook wb, HSSFSheet sheet) {
HSSFRow titleRow = sheet.getRow(0);
HSSFCell cell = titleRow.createCell(0);
cell.setCellStyle(createTilteStyle(wb));
cell.setCellValue("KEY");
sheet.setColumnWidth(cell.getColumnIndex(), (40 * 256));
}
private static void addLang2Tilte(HSSFWorkbook wb, HSSFSheet sheet, String lang) {
HSSFRow titleRow = sheet.getRow(0);
HSSFCell lastCell = titleRow.getCell((int)titleRow.getLastCellNum() - 1);
if (lang.equals(lastCell.getStringCellValue())) {
// language column already exists
return;
}
HSSFCell cell = titleRow.createCell((int)titleRow.getLastCellNum());
cell.setCellStyle(createTilteStyle(wb));
cell.setCellValue(lang);
sheet.setColumnWidth(cell.getColumnIndex(), (60 * 256));
}
private Map<String, Integer> exportDefLangToExcel(int rowIndex, String project, File src, NodeList strings, File f) throws FileNotFoundException, IOException {
out.println();
out.println("Start processing DEFAULT language " + src.getName());
Map<String, Integer> keys = new HashMap<String, Integer>();
HSSFWorkbook wb = new HSSFWorkbook(new FileInputStream(f));
HSSFCellStyle commentStyle = createCommentStyle(wb);
HSSFCellStyle plurarStyle = createPlurarStyle(wb);
HSSFCellStyle keyStyle = createKeyStyle(wb);
HSSFCellStyle textStyle = createTextStyle(wb);
HSSFSheet sheet = wb.getSheet(project);
for (int i = 0; i < strings.getLength(); i++) {
Node item = strings.item(i);
if (item.getNodeType() == Node.TEXT_NODE) {
}
if (item.getNodeType() == Node.COMMENT_NODE) {
HSSFRow row = sheet.createRow(rowIndex++);
HSSFCell cell = row.createCell(0);
cell.setCellValue(String.format("/** %s **/", item.getTextContent()));
cell.setCellStyle(commentStyle);
sheet.addMergedRegion(new CellRangeAddress(row.getRowNum(), row.getRowNum(), 0, 255));
}
if ("string".equals(item.getNodeName())) {
Node translatable = item.getAttributes().getNamedItem("translatable");
if (translatable != null && "false".equals(translatable.getNodeValue())) {
continue;
}
String key = item.getAttributes().getNamedItem("name").getNodeValue();
if (mConfig.isIgnoredKey(key)) {
continue;
}
keys.put(key, rowIndex);
HSSFRow row = sheet.createRow(rowIndex++);
HSSFCell cell = row.createCell(0);
cell.setCellValue(key);
cell.setCellStyle(keyStyle);
cell = row.createCell(1);
cell.setCellStyle(textStyle);
cell.setCellValue(item.getTextContent());
} else if ("plurals".equals(item.getNodeName())) {
String key = item.getAttributes().getNamedItem("name").getNodeValue();
if (mConfig.isIgnoredKey(key)) {
continue;
}
String plurarName = key;
HSSFRow row = sheet.createRow(rowIndex++);
HSSFCell cell = row.createCell(0);
cell.setCellValue(String.format("//plurals: %s", plurarName));
cell.setCellStyle(plurarStyle);
NodeList items = item.getChildNodes();
for (int j = 0; j < items.getLength(); j++) {
Node plurarItem = items.item(j);
if ("item".equals(plurarItem.getNodeName())) {
String itemKey = plurarName + "#" + plurarItem.getAttributes().getNamedItem("quantity").getNodeValue();
keys.put(itemKey, rowIndex);
HSSFRow itemRow = sheet.createRow(rowIndex++);
HSSFCell itemCell = itemRow.createCell(0);
itemCell.setCellValue(itemKey);
itemCell.setCellStyle(keyStyle);
itemCell = itemRow.createCell(1);
itemCell.setCellStyle(textStyle);
itemCell.setCellValue(plurarItem.getTextContent());
}
}
} else if ("string-array".equals(item.getNodeName())) {
String key = item.getAttributes().getNamedItem("name").getNodeValue();
if (mConfig.isIgnoredKey(key)) {
continue;
}
NodeList arrayItems = item.getChildNodes();
for (int j = 0, k = 0; j < arrayItems.getLength(); j++) {
Node arrayItem = arrayItems.item(j);
if ("item".equals(arrayItem.getNodeName())) {
String itemKey = key + "[" + k++ + "]";
keys.put(itemKey, rowIndex);
HSSFRow itemRow = sheet.createRow(rowIndex++);
HSSFCell itemCell = itemRow.createCell(0);
itemCell.setCellValue(itemKey);
itemCell.setCellStyle(keyStyle);
itemCell = itemRow.createCell(1);
itemCell.setCellStyle(textStyle);
itemCell.setCellValue(arrayItem.getTextContent());
}
}
}
}
FileOutputStream outFile = new FileOutputStream(f);
wb.write(outFile);
outFile.close();
out.println("DEFAULT language was precessed");
return keys;
}
private void exportLangToExcel(String project, String lang, File src, NodeList strings, File f, Map<String, Integer> keysIndex) throws FileNotFoundException, IOException {
out.println();
out.println(String.format("Start processing: '%s'", lang) + " " + src.getName());
Set<String> missedKeys = new HashSet<String>(keysIndex.keySet());
HSSFWorkbook wb = new HSSFWorkbook(new FileInputStream(f));
HSSFCellStyle textStyle = createTextStyle(wb);
HSSFSheet sheet = wb.getSheet(project);
addLang2Tilte(wb, sheet, lang);
HSSFRow titleRow = sheet.getRow(0);
int lastColumnIdx = (int)titleRow.getLastCellNum() - 1;
for (int i = 0; i < strings.getLength(); i++) {
Node item = strings.item(i);
if ("string".equals(item.getNodeName())) {
Node translatable = item.getAttributes().getNamedItem("translatable");
if (translatable != null && "false".equals(translatable.getNodeValue())) {
continue;
}
String key = item.getAttributes().getNamedItem("name").getNodeValue();
Integer index = keysIndex.get(key);
if (index == null) {
out.println("\t" + key + " - row does not exist");
continue;
}
missedKeys.remove(key);
HSSFRow row = sheet.getRow(index);
HSSFCell cell = row.createCell(lastColumnIdx);
cell.setCellValue(item.getTextContent());
cell.setCellStyle(textStyle);
} else if ("plurals".equals(item.getNodeName())) {
String key = item.getAttributes().getNamedItem("name").getNodeValue();
String plurarName = key;
NodeList items = item.getChildNodes();
for (int j = 0; j < items.getLength(); j++) {
Node plurarItem = items.item(j);
if ("item".equals(plurarItem.getNodeName())) {
key = plurarName + "#" + plurarItem.getAttributes().getNamedItem("quantity").getNodeValue();
Integer index = keysIndex.get(key);
if (index == null) {
out.println("\t" + key + " - row does not exist");
continue;
}
missedKeys.remove(key);
HSSFRow row = sheet.getRow(index);
HSSFCell cell = row.createCell(lastColumnIdx);
cell.setCellValue(plurarItem.getTextContent());
cell.setCellStyle(textStyle);
}
}
} else if ("string-array".equals(item.getNodeName())) {
String key = item.getAttributes().getNamedItem("name").getNodeValue();
NodeList arrayItems = item.getChildNodes();
for (int j = 0, k = 0; j < arrayItems.getLength(); j++) {
Node arrayItem = arrayItems.item(j);
if ("item".equals(arrayItem.getNodeName())) {
String itemKey = key + "[" + k++ + "]";
Integer rowIndex = keysIndex.get(itemKey);
if (rowIndex == null) {
out.println("\t" + key + " - row does not exist");
continue;
}
missedKeys.remove(key);
HSSFRow itemRow = sheet.getRow(rowIndex);
HSSFCell cell = itemRow.createCell(lastColumnIdx);
cell.setCellValue(arrayItem.getTextContent());
cell.setCellStyle(textStyle);
}
}
}
}
HSSFCellStyle missedStyle = createMissedStyle(wb);
if (!missedKeys.isEmpty()) {
out.println(" MISSED KEYS:");
}
for (String missedKey : missedKeys) {
out.println("\t" + missedKey);
Integer index = keysIndex.get(missedKey);
HSSFRow row = sheet.getRow(index);
HSSFCell cell = row.createCell((int)row.getLastCellNum());
cell.setCellStyle(missedStyle);
}
FileOutputStream outStream = new FileOutputStream(f);
wb.write(outStream);
outStream.close();
if (missedKeys.isEmpty()) {
out.println(String.format("'%s' was processed", lang));
} else {
out.println(String.format("'%s' was processed with MISSED KEYS - %d", lang, missedKeys.size()));
}
}
}
| |
/********************************************************************
* Copyright 2010 the University of New Mexico.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
********************************************************************/
package org.unc.hive.services.rs;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.xml.namespace.QName;
import org.apache.log4j.Logger;
import edu.unc.ils.mrc.hive.api.SKOSConcept;
import edu.unc.ils.mrc.hive.api.SKOSScheme;
import edu.unc.ils.mrc.hive.api.SKOSSearcher;
import edu.unc.ils.mrc.hive.api.SKOSServer;
import edu.unc.ils.mrc.hive.api.SKOSTagger;
/**
* The ConceptsResource class utilizes the SKOSConcept, SKOSSearcher,
* and SKOSTagger interfaces to provides JAX-RS based RESTful access
* to SKOSConcept objects and their attributes and to query a scheme
* to find matching concepts.
*
* The basic URI pattern for all methods in the class is
* 'schemes/{schemeName}/concepts'.
*
* Many, though not all, of the methods return "application/xml" output,
* so this is the default Content-Type. Other methods return "text/plain"
* and these use the @Produces annotation to override the default setting.
*
* @author dcosta
*
*/
@Produces("application/xml")
@Path("schemes/{schemeName}/concepts")
public class ConceptsResource {
/*
* Class fields
*/
private static final String XML_DECLARATION =
"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n";
/*
* Instance fields
*/
private Logger logger = Logger.getLogger(ConceptsResource.class);
/*
* Class methods
*/
/**
* Utility method to transform a list of SKOSConcept objects into XML.
*
* @param skosConcepts the list of SKOSConcept objects
* @return xmlString the XML string
*/
public static String conceptListToXML(List<SKOSConcept> skosConcepts) {
String xmlString = "";
StringBuffer xmlStringBuffer =
new StringBuffer(XML_DECLARATION);
xmlStringBuffer.append("<SKOSConcepts>\n");
for (SKOSConcept skosConcept : skosConcepts) {
if (skosConcept != null) {
String skosFormat = skosConcept.getSKOSFormat();
xmlStringBuffer.append("<SKOSConcept>\n");
xmlStringBuffer.append(skosFormat + "\n");
xmlStringBuffer.append("</SKOSConcept>\n");
}
}
xmlStringBuffer.append("</SKOSConcepts>");
xmlString = xmlStringBuffer.toString();
return xmlString;
}
/**
* Utility method to transform a map of String keys and QName values
* into XML.
*
* @param treeMap the map of String keys and QName values
* @return xmlString the XML string
*/
public static String conceptTreeMapToXML(TreeMap<String, QName> treeMap) {
String xmlString = "";
StringBuffer xmlStringBuffer = new StringBuffer(XML_DECLARATION);
xmlStringBuffer.append("<concepts>\n");
for (String s : treeMap.keySet()) {
QName qName = treeMap.get(s);
if (qName != null) {
xmlStringBuffer.append("<concept>\n");
xmlStringBuffer.append(" <prefLabel>" + s + "</prefLabel>\n");
xmlStringBuffer.append(" <QName>\n");
xmlStringBuffer.append(" <namespaceURI>" + qName.getNamespaceURI() + "</namespaceURI>\n");
xmlStringBuffer.append(" <prefix>" + qName.getPrefix() + "</prefix>\n");
xmlStringBuffer.append(" <localPart>" + qName.getLocalPart() + "</localPart>\n");
xmlStringBuffer.append(" <string>" + qName.toString() + "</string>\n");
xmlStringBuffer.append(" </QName>\n");
xmlStringBuffer.append("</concept>\n");
}
}
xmlStringBuffer.append("</concepts>");
xmlString = xmlStringBuffer.toString();
return xmlString;
}
/**
* Utility method to transform a QName to XML.
*
* @param qName the QName object to be transformed
* @return xmlString the XML string
*/
public static String qNameToXML(QName qName) {
String xmlString = "";
StringBuffer xmlStringBuffer = new StringBuffer(XML_DECLARATION);
if (qName != null) {
xmlStringBuffer.append(" <QName>\n");
xmlStringBuffer.append(" <namespaceURI>" + qName.getNamespaceURI() + "</namespaceURI>\n");
xmlStringBuffer.append(" <prefix>" + qName.getPrefix() + "</prefix>\n");
xmlStringBuffer.append(" <localPart>" + qName.getLocalPart() + "</localPart>\n");
xmlStringBuffer.append(" <string>" + qName.toString() + "</string>\n");
xmlStringBuffer.append(" </QName>");
xmlString = xmlStringBuffer.toString();
}
return xmlString;
}
/*
* Constructors
*/
/*
* Instance methods
*/
/**
* Modifies the schemaURI to make it compatible with the format expected
* by the SKOSSearcher object:
*
* lter -- append a '#' character
* nbii -- append the string "/Concept/"
*
* @param schemaURI the original schema URI
* @param schemeName the scheme name, e.g. "lter"
* @return the modified schema URI, e.g. "lter#"
*/
static String modifySchemaURI(String schemaURI, String schemeName) {
StringBuffer modifiedSchemaURI = new StringBuffer(schemaURI);
if (schemaURI != null && !schemaURI.endsWith("#")) {
modifiedSchemaURI.append("#");
}
return modifiedSchemaURI.toString();
}
/**
* Gets a list of alternate labels using the SKOSConcept.getAltLabels()
* method.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return a list of alternate labels, one per line
*/
@GET
@Path("{localPart}/altLabels")
@Produces("text/plain")
public String getAltLabels(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
StringBuffer altLabels = new StringBuffer("");
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
List<String> altLabelsList = skosConcept.getAltLabels();
for (String s : altLabelsList) {
altLabels.append(s + "\n");
}
}
}
}
return altLabels.toString().trim();
}
/**
* Gets an XML representation of the list of broader concepts corresponding
* to a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return an XML string representing the list of broader concepts
*/
@GET
@Path("{localPart}/broaders")
public String getBroaders(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
String broadersXML = "";
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
TreeMap<String, QName> broadersMap = skosConcept.getBroaders();
broadersXML = conceptTreeMapToXML(broadersMap);
}
}
}
return broadersXML;
}
/**
* Gets an XML representation of the list of child concepts corresponding
* to a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return an XML string representing the list of child concepts
*/
@GET
@Path("{localPart}/children")
public String getChildren(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
TreeMap<String, QName> skosConceptMap = null;
String childrenXML = "";
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConceptMap = skosSearcher.searchChildrenByURI(schemaURI, localPart);
if (skosConceptMap != null) {
childrenXML = conceptTreeMapToXML(skosConceptMap);
}
}
}
return childrenXML;
}
/**
* Gets an XML representation of the list of narrower concepts corresponding
* to a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return an XML string representing the list of narrower concepts
*/
@GET
@Path("{localPart}/narrowers")
public String getNarrowers(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
String narrowersXML = "";
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
TreeMap<String, QName> narrowersMap = skosConcept.getNarrowers();
narrowersXML = conceptTreeMapToXML(narrowersMap);
}
}
}
return narrowersXML;
}
/**
* Gets the preferred label value for a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return the preferred label string
*/
@GET
@Path("{localPart}/prefLabel")
@Produces("text/plain")
public String getPrefLabel(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
String prefLabel = null;
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
prefLabel = skosConcept.getPrefLabel();
}
}
}
return prefLabel;
}
/**
* Gets the list of preferred labels for a given scheme.
*
* @param schemeName the scheme name, e.g. "nbii"
* @return a list of preferred labels, one per line
*/
@GET
@Path("prefLabels")
@Produces("text/plain")
public String getPrefLabels(@PathParam("schemeName") String schemeName) {
StringBuffer prefLabelsBuffer = new StringBuffer("");
SKOSServer skosServer = ConfigurationListener.getSKOSServer();
if (schemeName != null) {
TreeMap<String, SKOSScheme> skosSchemes = skosServer.getSKOSSchemas();
for (String s : skosSchemes.keySet()) {
if (s.equalsIgnoreCase(schemeName)) {
SKOSScheme skosScheme = skosSchemes.get(s);
if (skosScheme != null) {
Map<String, QName> alphaIndex = skosScheme.getAlphaIndex();
if (alphaIndex != null) {
for (String prefLabel : alphaIndex.keySet()) {
prefLabelsBuffer.append(prefLabel + "\n");
}
}
}
}
}
}
return prefLabelsBuffer.toString().trim();
}
/**
* Gets the list of preferred labels for a given scheme where each
* label starts with the specified letters.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param startLetters the start letters of the labels to be returned
* @return a list of preferred labels, one per line
*/
@GET
@Path("prefLabels/{startLetters}")
@Produces("text/plain")
public String getPrefLabelsStartLetters(
@PathParam("schemeName") String schemeName,
@PathParam("startLetters") String startLetters
) {
StringBuffer prefLabelsBuffer = new StringBuffer("");
SKOSServer skosServer = ConfigurationListener.getSKOSServer();
if (schemeName != null && startLetters != null) {
TreeMap<String, SKOSScheme> skosSchemes = skosServer.getSKOSSchemas();
for (String s : skosSchemes.keySet()) {
if (s.equalsIgnoreCase(schemeName)) {
SKOSScheme skosScheme = skosSchemes.get(s);
if (skosScheme != null) {
TreeMap<String, QName> alphaIndex =
skosScheme.getSubAlphaIndex(startLetters);
if (alphaIndex != null) {
for (String prefLabel : alphaIndex.keySet()) {
prefLabelsBuffer.append(prefLabel + "\n");
}
}
}
}
}
}
return prefLabelsBuffer.toString().trim();
}
/**
* Gets an XML representation of the QName for a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return an XML string representing the QName of the concept
*/
@GET
@Path("{localPart}/QName")
public String getQName(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
String qNameXML = "";
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
QName qName = skosConcept.getQName();
qNameXML = qNameToXML(qName);
}
}
}
return qNameXML.trim();
}
/**
* Gets an XML representation of the list of related concepts corresponding
* to a given concept.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return an XML string representing the list of related concepts
*/
@GET
@Path("{localPart}/relateds")
public String getRelateds(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
String relatedXML = "";
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
TreeMap<String, QName> relatedMap = skosConcept.getRelated();
relatedXML = conceptTreeMapToXML(relatedMap);
}
}
}
return relatedXML;
}
/**
* Gets the SKOS representation for a given concept based on its localPart
* identifier.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param localPart id of the concept to be accessed, e.g. "285"
* @return the concept's SKOS representation in XML format
*/
@GET
@Path("{localPart}/SKOSFormat")
public String getSKOSFormat(@PathParam("schemeName") String schemeName,
@PathParam("localPart") String localPart) {
SKOSConcept skosConcept = null;
StringBuffer xmlStringBuffer = new StringBuffer(XML_DECLARATION);
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcept = skosSearcher.searchConceptByURI(schemaURI, localPart);
if (skosConcept != null) {
xmlStringBuffer.append(skosConcept.getSKOSFormat());
}
}
}
return xmlStringBuffer.toString();
}
/**
* Gets the SKOS representation for a given concept based on its preferred
* label.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param prefLabel the preferred label, e.g. "Eruptions"
* @return the concept's SKOS representation in XML format
*/
@GET
@Path("concept/SKOSFormat")
public String getConceptFromPrefLabel(
@PathParam("schemeName") String schemeName,
@QueryParam("prefLabel") String prefLabel) {
StringBuffer xmlStringBuffer =
new StringBuffer(XML_DECLARATION);
if (prefLabel != null) {
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null) {
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
List<SKOSConcept> skosConcepts =
skosSearcher.searchConceptByKeyword(prefLabel);
for (SKOSConcept skosConcept : skosConcepts) {
String conceptPrefLabel = skosConcept.getPrefLabel();
if (prefLabel.equalsIgnoreCase(conceptPrefLabel)) {
QName qName = skosConcept.getQName();
String namespaceURI = qName.getNamespaceURI();
if (schemaURI.equals(namespaceURI)) {
xmlStringBuffer.append(skosConcept.getSKOSFormat());
}
}
}
}
}
}
return xmlStringBuffer.toString();
}
/**
* Gets a list of concepts that match a given keyword.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param keyword the keyword to be matched to the concepts
* @return a list of concepts in XML format
*/
@GET
@Path("SKOSFormat")
public String searchConceptsByKeyword(
@PathParam("schemeName") String schemeName,
@QueryParam("keyword") String keyword) {
List<SKOSConcept> skosConcepts = null;
String xmlString = null;
SchemesResource schemesResource = new SchemesResource();
String schemaURI = schemesResource.getSchemaURI(schemeName);
if (schemaURI != null && keyword != null) {
List<SKOSConcept> schemeConcepts = new ArrayList<SKOSConcept>();
schemaURI = schemaURI.trim();
schemaURI = modifySchemaURI(schemaURI, schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
if (skosSearcher != null) {
skosConcepts = skosSearcher.searchConceptByKeyword(keyword);
// Filter matching concepts based on the specified schemeName
for (SKOSConcept skosConcept : skosConcepts) {
QName qName = skosConcept.getQName();
String namespaceURI = qName.getNamespaceURI();
if (schemaURI.equals(namespaceURI)) {
schemeConcepts.add(skosConcept);
}
}
xmlString = conceptListToXML(schemeConcepts);
}
}
return xmlString;
}
/**
* Analyzes a document to search for tags that match a given vocabulary
* and returns the matching tags as a list of vocabulary concepts.
*
* @param schemeName the scheme name, e.g. "nbii"
* @param file the file to be analyzed
* @return a list of SKOS concepts in XML format
*/
@PUT
@Path("tags/SKOSFormat")
public String tagDocument(
@PathParam("schemeName") String schemeName,
@QueryParam("algorithm") String algorithm,
File file)
{
String xmlString = "";
if (file != null) {
String inputFilePath = file.getAbsolutePath();
logger.debug("inputFilePath: " + inputFilePath);
List<SKOSConcept> skosConcepts = null;
List<String> vocabularyList = new ArrayList<String>();
vocabularyList.add(schemeName);
SKOSSearcher skosSearcher = ConfigurationListener.getSKOSSearcher();
SKOSTagger skosTagger = ConfigurationListener.getSKOSTagger(algorithm);
if (skosSearcher != null && skosTagger != null) {
skosConcepts =
skosTagger.getTags(inputFilePath, vocabularyList, skosSearcher, 10);
xmlString = conceptListToXML(skosConcepts);
}
file.delete();
}
return xmlString;
}
}
| |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.api.logging;
import com.thoughtworks.go.plugin.internal.api.LoggingService;
import java.lang.reflect.Field;
/**
* Logger for use by plugin developers.
*
* @author Go Team
* @see <a href="https://developer.gocd.org/current/writing_go_plugins/go_plugins_basics.html" target="_blank">Go Plugin Documentation</a>
*/
public class Logger {
private String pluginId;
private static LoggingService loggingService;
private final String loggerName;
public static Logger getLoggerFor(Class loggerClass) {
String id;
try {
Class<?> defaultGoPluginActivator = loggerClass.getClassLoader().loadClass("com.thoughtworks.go.plugin.activation.DefaultGoPluginActivator");
id = (String) getStaticField(defaultGoPluginActivator, "pluginId");
} catch (Exception e) {
id = "UNKNOWN";
System.err.println("Could not find pluginId for logger: " + loggerClass.toString());
}
return getLoggerFor(loggerClass, id);
}
public static Logger getLoggerFor(Class loggerClass, String pluginId) {
return new Logger(loggerClass.getName(), pluginId);
}
public static void initialize(LoggingService loggingService) {
Logger.loggingService = loggingService;
}
private Logger(String loggerName, String pluginId) {
this.loggerName = loggerName;
this.pluginId = pluginId;
}
/**
* Messages to be logged in debug mode.
*
* @param message a string containing the message to be logged.
*/
public void debug(String message) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.debug(pluginId, loggerName, message);
}
/**
* Messages to be logged in debug mode.
*
* @param message a string containing the message to be logged.
* @param throwable
*/
public void debug(String message, Throwable throwable) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.debug(pluginId, loggerName, message, throwable);
}
/**
* Messages to be logged in debug mode according to the specified format
* and argument.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the DEBUG level. </p>
*
* @param message the format string.
* @param arg the argument
*/
public void debug(String message, Object arg) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.debug(pluginId, loggerName, message, arg);
}
/**
* Messages to be logged in debug mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the DEBUG level. </p>
*
* @param message the format string.
* @param arg1 the first argument
* @param arg2 the second argument
*/
public void debug(String message, Object arg1, Object arg2) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.debug(pluginId, loggerName, message, arg1, arg2);
}
/**
* Messages to be logged in debug mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the DEBUG level. </p>
*
* @param message the format string.
* @param arguments a list of 3 or more arguments
*/
public void debug(String message, Object... arguments) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.debug(pluginId, loggerName, message, arguments);
}
/**
* Messages to be logged in info mode.
*
* @param message a string containing the message to be logged.
*/
public void info(String message) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.info(pluginId, loggerName, message);
}
/**
* Messages to be logged in info mode.
*
* @param message a string containing the message to be logged.
* @param throwable
*/
public void info(String message, Throwable throwable) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.info(pluginId, loggerName, message, throwable);
}
/**
* Messages to be logged in info mode according to the specified format
* and argument.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the INFO level. </p>
*
* @param message the format string.
* @param arg the argument
*/
public void info(String message, Object arg) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.info(pluginId, loggerName, message, arg);
}
/**
* Messages to be logged in info mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the INFO level. </p>
*
* @param message the format string.
* @param arg1 the first argument
* @param arg2 the second argument
*/
public void info(String message, Object arg1, Object arg2) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.info(pluginId, loggerName, message, arg1, arg2);
}
/**
* Messages to be logged in info mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the INFO level. </p>
*
* @param message the format string.
* @param arguments a list of 3 or more arguments
*/
public void info(String message, Object... arguments) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.info(pluginId, loggerName, message, arguments);
}
/**
* Messages to be logged in warn mode.
*
* @param message a string containing the message to be logged.
*/
public void warn(String message) {
if (loggingService == null) {
System.err.println(message);
return;
}
loggingService.warn(pluginId, loggerName, message);
}
/**
* Messages to be logged in warn mode.
*
* @param message a string containing the message to be logged.
* @param throwable
*/
public void warn(String message, Throwable throwable) {
if (loggingService == null) {
System.err.println(message);
return;
}
loggingService.warn(pluginId, loggerName, message, throwable);
}
/**
* Messages to be logged in warn mode according to the specified format
* and argument.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the WARN level. </p>
*
* @param message the format string.
* @param arg the argument
*/
public void warn(String message, Object arg) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.warn(pluginId, loggerName, message, arg);
}
/**
* Messages to be logged in warn mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the WARN level. </p>
*
* @param message the format string.
* @param arg1 the first argument
* @param arg2 the second argument
*/
public void warn(String message, Object arg1, Object arg2) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.warn(pluginId, loggerName, message, arg1, arg2);
}
/**
* Messages to be logged in warn mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the WARN level. </p>
*
* @param message the format string.
* @param arguments a list of 3 or more arguments
*/
public void warn(String message, Object... arguments) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.warn(pluginId, loggerName, message, arguments);
}
/**
* Messages to be logged in error mode.
*
* @param message a string containing the message to be logged.
*/
public void error(String message) {
if (loggingService == null) {
System.err.println(message);
return;
}
loggingService.error(pluginId, loggerName, message);
}
/**
* Messages to be logged in error mode.
*
* @param message a string containing the message to be logged.
* @param throwable
*/
public void error(String message, Throwable throwable) {
if (loggingService == null) {
System.err.println(message);
return;
}
loggingService.error(pluginId, loggerName, message, throwable);
}
/**
* Messages to be logged in error mode according to the specified format
* and argument.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the ERROR level. </p>
*
* @param message the format string.
* @param arg the argument
*/
public void error(String message, Object arg) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.error(pluginId, loggerName, message, arg);
}
/**
* Messages to be logged in error mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the ERROR level. </p>
*
* @param message the format string.
* @param arg1 the first argument
* @param arg2 the second argument
*/
public void error(String message, Object arg1, Object arg2) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.error(pluginId, loggerName, message, arg1, arg2);
}
/**
* Messages to be logged in error mode according to the specified format
* and arguments.
*
* <p>This form avoids unnecessary object creation when the logger
* is disabled for the ERROR level. </p>
*
* @param message the format string.
* @param arguments a list of 3 or more arguments
*/
public void error(String message, Object... arguments) {
if (loggingService == null) {
System.out.println(message);
return;
}
loggingService.error(pluginId, loggerName, message, arguments);
}
private static Object getStaticField(Class kls, String name) {
try {
Field field = kls.getDeclaredField(name);
field.setAccessible(true);
return field.get(null);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
| |
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.izforge.izpack.util.file.types;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Enumeration;
import java.util.Vector;
import java.util.logging.Logger;
import com.izforge.izpack.util.file.DirectoryScanner;
import com.izforge.izpack.util.file.FileScanner;
import com.izforge.izpack.util.file.types.selectors.*;
/**
* Class that holds an implicit patternset and supports nested
* patternsets and creates a DirectoryScanner using these patterns.
* <p>Common base class for DirSet and FileSet.</p>
*/
public class FileSet extends DataType
implements Cloneable, SelectorContainer
{
private static final Logger logger = Logger.getLogger(FileSet.class.getName());
private PatternSet defaultPatterns = new PatternSet();
private Vector<PatternSet> additionalPatterns = new Vector<PatternSet>();
private Vector<FileSelector> selectors = new Vector<FileSelector>();
private File dir;
private boolean useDefaultExcludes = true;
private boolean isCaseSensitive = true;
private boolean followSymlinks = true;
/**
* Construct a new <code>FileSet</code>.
*/
public FileSet()
{
super();
}
/**
* Sets the base-directory for this instance.
*
* @param dir the directory's <code>File</code> instance.
*/
public void setDir(File dir) throws Exception
{
this.dir = dir;
}
/**
* Retrieves the base-directory for this instance.
*
* @return <code>File</code>.
*/
public File getDir()
{
return dir;
}
/**
* Add a name entry to the include list.
*
* @return <code>PatternSet.NameEntry</code>.
*/
public PatternSet.NameEntry createInclude()
{
return defaultPatterns.createInclude();
}
/**
* Add a name entry to the exclude list.
*
* @return <code>PatternSet.NameEntry</code>.
*/
public PatternSet.NameEntry createExclude()
{
return defaultPatterns.createExclude();
}
/**
* Creates a single file fileset.
*
* @param file the single <code>File</code> included in this
* <code>AbstractFileSet</code>.
*/
public void setFile(File file) throws Exception
{
if (!file.exists())
{
throw new FileNotFoundException("File " + file + " not found");
}
setDir(file.getParentFile());
createInclude().setName(file.getName());
}
/**
* Appends <code>includes</code> to the current list of include
* patterns.
* <p>Patterns may be separated by a comma or a space.</p>
*
* @param includes the <code>String</code> containing the include patterns.
*/
public void setIncludes(String includes)
{
defaultPatterns.setIncludes(includes);
}
/**
* Appends <code>excludes</code> to the current list of exclude
* patterns.
* <p/>
* <p>Patterns may be separated by a comma or a space.</p>
*
* @param excludes the <code>String</code> containing the exclude patterns.
*/
public void setExcludes(String excludes)
{
defaultPatterns.setExcludes(excludes);
}
/**
* Sets whether default exclusions should be used or not.
*
* @param useDefaultExcludes <code>boolean</code>.
*/
public void setDefaultexcludes(boolean useDefaultExcludes)
{
this.useDefaultExcludes = useDefaultExcludes;
}
/**
* Whether default exclusions should be used or not.
*/
public boolean getDefaultexcludes()
{
return useDefaultExcludes;
}
/**
* Sets case sensitivity of the file system.
*
* @param isCaseSensitive <code>boolean</code>.
*/
public void setCaseSensitive(boolean isCaseSensitive)
{
this.isCaseSensitive = isCaseSensitive;
}
/**
* Sets whether or not symbolic links should be followed.
*
* @param followSymlinks whether or not symbolic links should be followed.
*/
public void setFollowSymlinks(boolean followSymlinks)
{
this.followSymlinks = followSymlinks;
}
/**
* Find out if the fileset wants to follow symbolic links.
*
* @return <code>boolean</code> indicating whether symbolic links
* should be followed.
* @since Ant 1.6
*/
public boolean isFollowSymlinks()
{
return followSymlinks;
}
/**
* Returns the directory scanner needed to access the files to process.
*
* @return a <code>DirectoryScanner</code> instance.
*/
public DirectoryScanner getDirectoryScanner() throws Exception
{
if (dir == null)
{
throw new Exception("No directory specified for fileset");
}
if (!dir.exists())
{
throw new Exception(dir.getAbsolutePath() + " not found.");
}
if (!dir.isDirectory())
{
throw new Exception(dir.getAbsolutePath()
+ " is not a directory.");
}
DirectoryScanner ds = new DirectoryScanner();
setupDirectoryScanner(ds);
ds.setFollowSymlinks(followSymlinks);
ds.scan();
return ds;
}
/**
* Set up the specified directory scanner against the specified project.
*
* @param ds a <code>FileScanner</code> instance.
*/
public void setupDirectoryScanner(FileScanner ds)
{
if (ds == null)
{
throw new IllegalArgumentException("ds cannot be null");
}
ds.setBasedir(dir);
final int count = additionalPatterns.size();
for (int i = 0; i < count; i++)
{
Object o = additionalPatterns.elementAt(i);
defaultPatterns.append((PatternSet) o);
}
logger.fine("Fileset setup scanner in dir " + dir
+ " with " + defaultPatterns);
ds.setIncludes(defaultPatterns.getIncludePatterns());
ds.setExcludes(defaultPatterns.getExcludePatterns());
if (ds instanceof SelectorScanner)
{
SelectorScanner ss = (SelectorScanner) ds;
ss.setSelectors(getSelectors());
}
if (useDefaultExcludes)
{
ds.addDefaultExcludes();
}
ds.setCaseSensitive(isCaseSensitive);
}
// SelectorContainer methods
/**
* Indicates whether there are any selectors here.
*
* @return whether any selectors are in this container.
*/
@Override
public boolean hasSelectors()
{
return !(selectors.isEmpty());
}
/**
* Indicates whether there are any patterns here.
*
* @return whether any patterns are in this container.
*/
public boolean hasPatterns()
{
if (defaultPatterns.hasPatterns())
{
return true;
}
Enumeration<PatternSet> e = additionalPatterns.elements();
while (e.hasMoreElements())
{
PatternSet ps = e.nextElement();
if (ps.hasPatterns())
{
return true;
}
}
return false;
}
/**
* Gives the count of the number of selectors in this container.
*
* @return the number of selectors in this container as an <code>int</code>.
*/
@Override
public int selectorCount()
{
return selectors.size();
}
/**
* Returns the set of selectors as an array.
*
* @return a <code>FileSelector[]</code> of the selectors in this container.
*/
@Override
public FileSelector[] getSelectors()
{
return selectors.toArray(new FileSelector[selectors.size()]);
}
/**
* Returns an enumerator for accessing the set of selectors.
*
* @return an <code>Enumeration</code> of selectors.
*/
@Override
public Enumeration<FileSelector> selectorElements()
{
return selectors.elements();
}
/**
* Add a new selector into this container.
*
* @param selector the new <code>FileSelector</code> to add.
*/
@Override
public void appendSelector(FileSelector selector)
{
selectors.addElement(selector);
}
/* Methods below all add specific selectors */
/**
* Add an "And" selector entry on the selector list.
*
* @param selector the <code>AndSelector</code> to add.
*/
@Override
public void addAnd(AndSelector selector)
{
appendSelector(selector);
}
/**
* Add an "Or" selector entry on the selector list.
*
* @param selector the <code>OrSelector</code> to add.
*/
@Override
public void addOr(OrSelector selector)
{
appendSelector(selector);
}
/**
* Add a "Not" selector entry on the selector list.
*
* @param selector the <code>NotSelector</code> to add.
*/
@Override
public void addNot(NotSelector selector)
{
appendSelector(selector);
}
/**
* Add a "None" selector entry on the selector list.
*
* @param selector the <code>NoneSelector</code> to add.
*/
@Override
public void addNone(NoneSelector selector)
{
appendSelector(selector);
}
/**
* Add a majority selector entry on the selector list.
*
* @param selector the <code>MajoritySelector</code> to add.
*/
@Override
public void addMajority(MajoritySelector selector)
{
appendSelector(selector);
}
/**
* Add a selector date entry on the selector list.
*
* @param selector the <code>DateSelector</code> to add.
*/
@Override
public void addDate(DateSelector selector)
{
appendSelector(selector);
}
/**
* Add a selector size entry on the selector list.
*
* @param selector the <code>SizeSelector</code> to add.
*/
@Override
public void addSize(SizeSelector selector)
{
appendSelector(selector);
}
/**
* Add a DifferentSelector entry on the selector list.
*
* @param selector the <code>DifferentSelector</code> to add.
*/
@Override
public void addDifferent(DifferentSelector selector)
{
appendSelector(selector);
}
/**
* Add a selector filename entry on the selector list.
*
* @param selector the <code>FilenameSelector</code> to add.
*/
@Override
public void addFilename(FilenameSelector selector)
{
appendSelector(selector);
}
/**
* Add a selector type entry on the selector list.
*
* @param selector the <code>TypeSelector</code> to add.
*/
@Override
public void addType(TypeSelector selector)
{
appendSelector(selector);
}
/**
* Add an extended selector entry on the selector list.
*
* @param selector the <code>ExtendSelector</code> to add.
*/
@Override
public void addCustom(ExtendSelector selector)
{
appendSelector(selector);
}
/**
* Add a contains selector entry on the selector list.
*
* @param selector the <code>ContainsSelector</code> to add.
*/
@Override
public void addContains(ContainsSelector selector)
{
appendSelector(selector);
}
/**
* Add a present selector entry on the selector list.
*
* @param selector the <code>PresentSelector</code> to add.
*/
@Override
public void addPresent(PresentSelector selector)
{
appendSelector(selector);
}
/**
* Add a depth selector entry on the selector list.
*
* @param selector the <code>DepthSelector</code> to add.
*/
@Override
public void addDepth(DepthSelector selector)
{
appendSelector(selector);
}
/**
* Add a depends selector entry on the selector list.
*
* @param selector the <code>DependSelector</code> to add.
*/
@Override
public void addDepend(DependSelector selector)
{
appendSelector(selector);
}
/**
* Add a regular expression selector entry on the selector list.
*
* @param selector the <code>ContainsRegexpSelector</code> to add.
*/
@Override
public void addContainsRegexp(ContainsRegexpSelector selector)
{
appendSelector(selector);
}
/**
* Add an arbitary selector.
*
* @param selector the <code>FileSelector</code> to add.
*/
@Override
public void add(FileSelector selector)
{
appendSelector(selector);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.util;
import groovy.lang.*;
import org.codehaus.groovy.runtime.*;
import org.codehaus.groovy.runtime.memoize.LRUCache;
import org.codehaus.groovy.runtime.typehandling.GroovyCastException;
import org.codehaus.groovy.transform.trait.Traits;
import java.lang.ref.WeakReference;
import java.lang.reflect.Modifier;
import java.util.*;
/**
* Classes to generate 'Proxy' objects which implement interfaces,
* maps of closures and/or extend classes/delegates.
*
* @author Paul King
* @author Guillaume Laforge
* @author Cedric Champeau
*/
public class ProxyGenerator {
private static final Class[] EMPTY_INTERFACE_ARRAY = new Class[0];
private static final Map<Object,Object> EMPTY_CLOSURE_MAP = Collections.emptyMap();
private static final Set<String> EMPTY_KEYSET = Collections.emptySet();
public static final ProxyGenerator INSTANCE = new ProxyGenerator();
static {
// wrap the standard MetaClass with the delegate
setMetaClass(GroovySystem.getMetaClassRegistry().getMetaClass(ProxyGenerator.class));
}
private ClassLoader override = null;
private boolean debug = false;
private boolean emptyMethods = false;
/**
* The adapter cache is used to cache proxy classes. When, for example, a call like:
* map as MyClass is found, then a lookup is made into the cache to find if a suitable
* adapter already exists. If so, then the class is reused, instead of generating a
* new class.
*/
private final LRUCache adapterCache = new LRUCache(16);
public boolean getDebug() {
return debug;
}
/**
* Instructs <code>ProxyGenerator</code> to dump generated Groovy
* source code to standard output during construction. This is useful
* for debugging purposes but should be turned off in production.
*
* @param debug true if you want generated source to be printed
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
public boolean getEmptyMethods() {
return emptyMethods;
}
/**
* Changes generated methods to have empty implementations.
* <p>
* Methods in generated aggregates not supplied in a closures map or
* base class are given 'default' implementations. The implementation
* will normally throw an <code>UnsupportedOperationException</code>
* but setting this boolean will leave it empty.
*
* @param emptyMethods true if you want generated methods to be empty
*/
public void setEmptyMethods(boolean emptyMethods) {
this.emptyMethods = emptyMethods;
}
public ClassLoader getOverride() {
return override;
}
public void setOverride(ClassLoader override) {
this.override = override;
}
public GroovyObject instantiateAggregateFromBaseClass(Class clazz) {
return instantiateAggregateFromBaseClass((Map) null, clazz);
}
public GroovyObject instantiateAggregateFromBaseClass(Map map, Class clazz) {
return instantiateAggregateFromBaseClass(map, clazz, null);
}
public GroovyObject instantiateAggregateFromBaseClass(Closure cl, Class clazz) {
Map<String, Closure> m = new HashMap<String, Closure>();
m.put("*", cl);
return instantiateAggregateFromBaseClass(m, clazz, null);
}
public GroovyObject instantiateAggregateFromBaseClass(Class clazz, Object[] constructorArgs) {
return instantiateAggregate(null, null, clazz, constructorArgs);
}
public GroovyObject instantiateAggregateFromBaseClass(Map map, Class clazz, Object[] constructorArgs) {
return instantiateAggregate(map, null, clazz, constructorArgs);
}
public GroovyObject instantiateAggregateFromInterface(Class clazz) {
return instantiateAggregateFromInterface(null, clazz);
}
public GroovyObject instantiateAggregateFromInterface(Map map, Class clazz) {
List<Class> interfaces = new ArrayList<Class>();
interfaces.add(clazz);
return instantiateAggregate(map, interfaces);
}
public GroovyObject instantiateAggregate(List<Class> interfaces) {
return instantiateAggregate(null, interfaces);
}
public GroovyObject instantiateAggregate(Map closureMap, List<Class> interfaces) {
return instantiateAggregate(closureMap, interfaces, null);
}
public GroovyObject instantiateAggregate(Map closureMap, List<Class> interfaces, Class clazz) {
return instantiateAggregate(closureMap, interfaces, clazz, null);
}
@SuppressWarnings("unchecked")
public GroovyObject instantiateAggregate(Map closureMap, List<Class> interfaces, Class clazz, Object[] constructorArgs) {
if (clazz!=null && Modifier.isFinal(clazz.getModifiers())) {
throw new GroovyCastException("Cannot coerce a map to class "+clazz.getName()+" because it is a final class");
}
Map<Object,Object> map = closureMap!=null?closureMap: EMPTY_CLOSURE_MAP;
Class[] intfs = interfaces!=null? interfaces.toArray(new Class[interfaces.size()]): EMPTY_INTERFACE_ARRAY;
Class base = clazz;
if (base==null) {
if (intfs.length>0) {
base=intfs[0];
} else {
base = Object.class;
}
}
Set<String> keys = map==EMPTY_CLOSURE_MAP?EMPTY_KEYSET:new HashSet<String>();
for (Object o : map.keySet()) {
keys.add(o.toString());
}
CacheKey key = new CacheKey(base, Object.class, keys, intfs, emptyMethods, false);
ProxyGeneratorAdapter adapter = (ProxyGeneratorAdapter) adapterCache.get(key);
if (adapter==null) {
adapter = new ProxyGeneratorAdapter(map, base, intfs, base.getClassLoader(), emptyMethods, null);
adapterCache.put(key, adapter);
}
return adapter.proxy(map, constructorArgs);
}
public GroovyObject instantiateDelegate(Object delegate) {
return instantiateDelegate(null, delegate);
}
public GroovyObject instantiateDelegate(List<Class> interfaces, Object delegate) {
return instantiateDelegate(null, interfaces, delegate);
}
public GroovyObject instantiateDelegate(Map closureMap, List<Class> interfaces, Object delegate) {
return instantiateDelegateWithBaseClass(closureMap, interfaces, delegate, null);
}
public GroovyObject instantiateDelegateWithBaseClass(Map closureMap, List<Class> interfaces, Object delegate) {
return instantiateDelegateWithBaseClass(closureMap, interfaces, delegate, delegate.getClass());
}
public GroovyObject instantiateDelegateWithBaseClass(Map closureMap, List<Class> interfaces, Object delegate, Class baseClass) {
return instantiateDelegateWithBaseClass(closureMap, interfaces, delegate, baseClass, null);
}
/**
* Creates a proxy with a delegate object.
*
* @param closureMap the closure for methods not handled by the delegate
* @param interfaces interfaces to be implemented
* @param delegate the delegate object
* @param baseClass the base class
* @param name the name of the proxy, unused, but kept for compatibility with previous versions of Groovy.
* @return a proxy object implementing the specified interfaces, and delegating to the provided object
*/
@SuppressWarnings("unchecked")
public GroovyObject instantiateDelegateWithBaseClass(Map closureMap, List<Class> interfaces, Object delegate, Class baseClass, String name) {
Map<Object,Object> map = closureMap!=null?closureMap: EMPTY_CLOSURE_MAP;
Class[] intfs = interfaces!=null? interfaces.toArray(new Class[interfaces.size()]): EMPTY_INTERFACE_ARRAY;
Class base = baseClass;
if (base==null) {
if (intfs.length>0) {
base=intfs[0];
} else {
base = Object.class;
}
}
Set<String> keys = map==EMPTY_CLOSURE_MAP?EMPTY_KEYSET:new HashSet<String>();
for (Object o : map.keySet()) {
keys.add(o.toString());
}
CacheKey key = new CacheKey(base, delegate.getClass(), keys, intfs, emptyMethods, true);
ProxyGeneratorAdapter adapter = (ProxyGeneratorAdapter) adapterCache.get(key);
if (adapter==null) {
adapter = new ProxyGeneratorAdapter(map, base, intfs, delegate.getClass().getClassLoader(), emptyMethods, delegate.getClass());
adapterCache.put(key, adapter);
}
return adapter.delegatingProxy(delegate, map, (Object[])null);
}
private static void setMetaClass(final MetaClass metaClass) {
final MetaClass newMetaClass = new DelegatingMetaClass(metaClass) {
@Override
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
return InvokerHelper.invokeMethod(INSTANCE, methodName, arguments);
}
};
GroovySystem.getMetaClassRegistry().setMetaClass(ProxyGenerator.class, newMetaClass);
}
private static final class CacheKey {
private static final Comparator<Class> INTERFACE_COMPARATOR = new Comparator<Class>() {
public int compare(final Class o1, final Class o2) {
// Traits order *must* be preserved
// See GROOVY-7285
if (Traits.isTrait(o1)) return -1;
if (Traits.isTrait(o2)) return 1;
return o1.getName().compareTo(o2.getName());
}
};
private final boolean emptyMethods;
private final boolean useDelegate;
private final Set<String> methods;
private final ClassReference delegateClass;
private final ClassReference baseClass;
private final ClassReference[] interfaces;
private CacheKey(final Class baseClass, final Class delegateClass, final Set<String> methods, final Class[] interfaces, final boolean emptyMethods, final boolean useDelegate) {
this.useDelegate = useDelegate;
this.baseClass = new ClassReference(baseClass);
this.delegateClass = new ClassReference(delegateClass);
this.emptyMethods = emptyMethods;
this.interfaces = interfaces == null ? null : new ClassReference[interfaces.length];
if (interfaces != null) {
Class[] interfacesCopy = new Class[interfaces.length];
System.arraycopy(interfaces, 0, interfacesCopy, 0, interfaces.length);
Arrays.sort(interfacesCopy, INTERFACE_COMPARATOR);
for (int i = 0; i < interfacesCopy.length; i++) {
Class anInterface = interfacesCopy[i];
this.interfaces[i] = new ClassReference(anInterface);
}
}
this.methods = methods;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final CacheKey cacheKey = (CacheKey) o;
if (emptyMethods != cacheKey.emptyMethods) return false;
if (useDelegate != cacheKey.useDelegate) return false;
if (baseClass != null ? !baseClass.equals(cacheKey.baseClass) : cacheKey.baseClass != null) return false;
if (delegateClass != null ? !delegateClass.equals(cacheKey.delegateClass) : cacheKey.delegateClass != null) return false;
if (!Arrays.equals(interfaces, cacheKey.interfaces)) return false;
if (methods != null ? !methods.equals(cacheKey.methods) : cacheKey.methods != null) return false;
return true;
}
@Override
public int hashCode() {
int result = (emptyMethods ? 1 : 0);
result = 31 * result + (useDelegate ? 1 : 0);
result = 31 * result + (methods != null ? methods.hashCode() : 0);
result = 31 * result + (baseClass != null ? baseClass.hashCode() : 0);
result = 31 * result + (delegateClass != null ? delegateClass.hashCode() : 0);
result = 31 * result + (interfaces != null ? Arrays.hashCode(interfaces) : 0);
return result;
}
/**
* A weak reference which delegates equals and hashcode to the referent.
*/
private static class ClassReference extends WeakReference<Class> {
public ClassReference(Class referent) {
super(referent);
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Class thisClass = this.get();
ClassReference that = (ClassReference) o;
if (thisClass == null) return false;
return thisClass.equals(that.get());
}
@Override
public int hashCode() {
Class thisClass = this.get();
if (thisClass==null) return 0;
return thisClass.hashCode();
}
}
}
}
| |
/*
* Copyright 2004-2005 Alexey Efimov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.images.editor.impl;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.*;
import org.intellij.images.editor.ImageDocument;
import org.intellij.images.editor.ImageEditor;
import org.intellij.images.editor.ImageZoomModel;
import org.intellij.images.fileTypes.ImageFileTypeManager;
import org.intellij.images.options.*;
import org.intellij.images.thumbnail.actionSystem.ThumbnailViewActions;
import org.intellij.images.ui.ImageComponent;
import org.intellij.images.vfs.IfsUtil;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
/**
* Image viewer implementation.
*
* @author <a href="mailto:aefimov.box@gmail.com">Alexey Efimov</a>
*/
final class ImageEditorImpl extends VirtualFileAdapter implements ImageEditor {
private final PropertyChangeListener optionsChangeListener = new OptionsChangeListener();
private final Project project;
private final VirtualFile file;
private final ImageEditorUI editorUI;
private boolean disposed;
ImageEditorImpl(Project project, VirtualFile file) {
this.project = project;
this.file = file;
// Options
Options options = OptionsManager.getInstance().getOptions();
editorUI = new ImageEditorUI(this, options.getEditorOptions());
options.addPropertyChangeListener(optionsChangeListener);
VirtualFileManager.getInstance().addVirtualFileListener(this);
setValue(file);
}
private void setValue(VirtualFile file) {
ImageDocument document = editorUI.getImageComponent().getDocument();
try {
BufferedImage previousImage = document.getValue();
BufferedImage image = IfsUtil.getImage(file);
document.setValue(image);
document.setFormat(IfsUtil.getFormat(file));
ImageZoomModel zoomModel = getZoomModel();
if (image != null && (previousImage == null || !zoomModel.isZoomLevelChanged())) {
// Set smart zooming behaviour on open
Options options = OptionsManager.getInstance().getOptions();
ZoomOptions zoomOptions = options.getEditorOptions().getZoomOptions();
// Open as actual size
zoomModel.setZoomFactor(1.0d);
if (zoomOptions.isSmartZooming()) {
Dimension prefferedSize = zoomOptions.getPrefferedSize();
if (prefferedSize.width > image.getWidth() && prefferedSize.height > image.getHeight()) {
// Resize to preffered size
// Calculate zoom factor
double factor = (prefferedSize.getWidth() / (double) image.getWidth() + prefferedSize.getHeight() / (double) image.getHeight()) / 2.0d;
zoomModel.setZoomFactor(Math.ceil(factor));
}
}
}
} catch (Exception e) {
// Error loading image file
document.setValue(null);
}
}
public boolean isValid() {
ImageDocument document = editorUI.getImageComponent().getDocument();
return document.getValue() != null;
}
public JComponent getComponent() {
return editorUI;
}
public JComponent getContentComponent() {
return editorUI.getImageComponent();
}
public VirtualFile getFile() {
return file;
}
public Project getProject() {
return project;
}
public ImageDocument getDocument() {
return editorUI.getImageComponent().getDocument();
}
public void setTransparencyChessboardVisible(boolean visible) {
editorUI.getImageComponent().setTransparencyChessboardVisible(visible);
editorUI.repaint();
}
public boolean isTransparencyChessboardVisible() {
return editorUI.getImageComponent().isTransparencyChessboardVisible();
}
public boolean isEnabledForActionPlace(String place) {
// Disable for thumbnails action
return !ThumbnailViewActions.ACTION_PLACE.equals(place);
}
public void setGridVisible(boolean visible) {
editorUI.getImageComponent().setGridVisible(visible);
editorUI.repaint();
}
public boolean isGridVisible() {
return editorUI.getImageComponent().isGridVisible();
}
public boolean isDisposed() {
return disposed;
}
public ImageZoomModel getZoomModel() {
return editorUI.getZoomModel();
}
public void dispose() {
Options options = OptionsManager.getInstance().getOptions();
options.removePropertyChangeListener(optionsChangeListener);
editorUI.dispose();
VirtualFileManager.getInstance().removeVirtualFileListener(this);
disposed = true;
}
public void propertyChanged(VirtualFilePropertyEvent event) {
super.propertyChanged(event);
if (file.equals(event.getFile())) {
// Change document
file.refresh(true, false, new Runnable() {
public void run() {
if (ImageFileTypeManager.getInstance().isImage(file)) {
setValue(file);
} else {
setValue(null);
// Close editor
FileEditorManager editorManager = FileEditorManager.getInstance(project);
editorManager.closeFile(file);
}
}
});
}
}
public void contentsChanged(VirtualFileEvent event) {
super.contentsChanged(event);
if (file.equals(event.getFile())) {
// Change document
file.refresh(true, false, new Runnable() {
public void run() {
setValue(file);
}
});
}
}
private class OptionsChangeListener implements PropertyChangeListener {
public void propertyChange(PropertyChangeEvent evt) {
Options options = (Options) evt.getSource();
EditorOptions editorOptions = options.getEditorOptions();
TransparencyChessboardOptions chessboardOptions = editorOptions.getTransparencyChessboardOptions();
GridOptions gridOptions = editorOptions.getGridOptions();
ImageComponent imageComponent = editorUI.getImageComponent();
imageComponent.setTransparencyChessboardCellSize(chessboardOptions.getCellSize());
imageComponent.setTransparencyChessboardWhiteColor(chessboardOptions.getWhiteColor());
imageComponent.setTransparencyChessboardBlankColor(chessboardOptions.getBlackColor());
imageComponent.setGridLineZoomFactor(gridOptions.getLineZoomFactor());
imageComponent.setGridLineSpan(gridOptions.getLineSpan());
imageComponent.setGridLineColor(gridOptions.getLineColor());
}
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.changes;
import com.google.gerrit.client.Dispatcher;
import com.google.gerrit.client.FormatUtil;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.account.AccountInfo;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.ui.CommentPanel;
import com.google.gerrit.client.ui.ComplexDisclosurePanel;
import com.google.gerrit.client.ui.ExpandAllCommand;
import com.google.gerrit.client.ui.LinkMenuBar;
import com.google.gerrit.client.ui.NeedsSignInKeyCommand;
import com.google.gerrit.client.ui.Screen;
import com.google.gerrit.common.data.AccountInfoCache;
import com.google.gerrit.common.data.ChangeDetail;
import com.google.gerrit.common.data.ChangeInfo;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Change.Status;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.i18n.client.LocaleInfo;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.DisclosurePanel;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwtexpui.globalkey.client.GlobalKey;
import com.google.gwtexpui.globalkey.client.KeyCommand;
import com.google.gwtexpui.globalkey.client.KeyCommandSet;
import java.sql.Timestamp;
import java.util.List;
public class ChangeScreen extends Screen
implements ValueChangeHandler<ChangeDetail> {
private final Change.Id changeId;
private final PatchSet.Id openPatchSetId;
private ChangeDetailCache detailCache;
private com.google.gerrit.client.changes.ChangeInfo changeInfo;
private ChangeDescriptionBlock descriptionBlock;
private ApprovalTable approvals;
private IncludedInTable includedInTable;
private DisclosurePanel includedInPanel;
private ComplexDisclosurePanel dependenciesPanel;
private ChangeTable dependencies;
private ChangeTable.Section dependsOn;
private ChangeTable.Section neededBy;
private PatchSetsBlock patchSetsBlock;
private Panel comments;
private KeyCommandSet keysNavigation;
private KeyCommandSet keysAction;
private HandlerRegistration regNavigation;
private HandlerRegistration regAction;
private Grid patchesGrid;
private ListBox patchesList;
/**
* The change id for which the old version history is valid.
*/
private static Change.Id currentChangeId;
/**
* Which patch set id is the diff base.
*/
private static PatchSet.Id diffBaseId;
public ChangeScreen(final Change.Id toShow) {
changeId = toShow;
openPatchSetId = null;
}
public ChangeScreen(final PatchSet.Id toShow) {
changeId = toShow.getParentKey();
openPatchSetId = toShow;
}
public ChangeScreen(final ChangeInfo c) {
this(c.getId());
}
@Override
protected void onLoad() {
super.onLoad();
detailCache.refresh();
}
@Override
protected void onUnload() {
if (regNavigation != null) {
regNavigation.removeHandler();
regNavigation = null;
}
if (regAction != null) {
regAction.removeHandler();
regAction = null;
}
super.onUnload();
}
@Override
public void registerKeys() {
super.registerKeys();
regNavigation = GlobalKey.add(this, keysNavigation);
regAction = GlobalKey.add(this, keysAction);
if (openPatchSetId != null) {
patchSetsBlock.activate(openPatchSetId);
}
}
@Override
protected void onInitUI() {
super.onInitUI();
ChangeCache cache = ChangeCache.get(changeId);
detailCache = cache.getChangeDetailCache();
detailCache.addValueChangeHandler(this);
addStyleName(Gerrit.RESOURCES.css().changeScreen());
addStyleName(Gerrit.RESOURCES.css().screenNoHeader());
keysNavigation = new KeyCommandSet(Gerrit.C.sectionNavigation());
keysAction = new KeyCommandSet(Gerrit.C.sectionActions());
keysNavigation.add(new UpToListKeyCommand(0, 'u', Util.C.upToChangeList()));
keysNavigation.add(new ExpandCollapseDependencySectionKeyCommand(0, 'd', Util.C.expandCollapseDependencies()));
if (Gerrit.isSignedIn()) {
keysAction.add(new PublishCommentsKeyCommand(0, 'r', Util.C
.keyPublishComments()));
}
descriptionBlock = new ChangeDescriptionBlock(keysAction);
add(descriptionBlock);
approvals = new ApprovalTable();
add(approvals);
includedInPanel = new DisclosurePanel(Util.C.changeScreenIncludedIn());
includedInTable = new IncludedInTable(changeId);
includedInPanel.setContent(includedInTable);
add(includedInPanel);
dependencies = new ChangeTable() {
{
table.setWidth("auto");
}
};
dependsOn = new ChangeTable.Section(Util.C.changeScreenDependsOn());
dependsOn.setChangeRowFormatter(new ChangeTable.ChangeRowFormatter() {
@Override
public String getRowStyle(ChangeInfo c) {
if (! c.isLatest() || Change.Status.ABANDONED.equals(c.getStatus())) {
return Gerrit.RESOURCES.css().outdated();
}
return null;
}
@Override
public String getDisplayText(final ChangeInfo c, final String displayText) {
if (! c.isLatest()) {
return displayText + " [OUTDATED]";
}
return displayText;
}
});
neededBy = new ChangeTable.Section(Util.C.changeScreenNeededBy());
dependencies.addSection(dependsOn);
dependencies.addSection(neededBy);
dependenciesPanel = new ComplexDisclosurePanel(
Util.C.changeScreenDependencies(), false);
dependenciesPanel.setContent(dependencies);
add(dependenciesPanel);
patchesList = new ListBox();
patchesList.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
final int index = patchesList.getSelectedIndex();
final String selectedPatchSet = patchesList.getValue(index);
if (index == 0) {
diffBaseId = null;
} else {
diffBaseId = PatchSet.Id.parse(selectedPatchSet);
}
if (patchSetsBlock != null) {
patchSetsBlock.refresh(diffBaseId);
}
}
});
patchesGrid = new Grid(1, 2);
patchesGrid.setStyleName(Gerrit.RESOURCES.css().selectPatchSetOldVersion());
patchesGrid.setText(0, 0, Util.C.oldVersionHistory());
patchesGrid.setWidget(0, 1, patchesList);
add(patchesGrid);
patchSetsBlock = new PatchSetsBlock();
add(patchSetsBlock);
comments = new FlowPanel();
comments.setStyleName(Gerrit.RESOURCES.css().changeComments());
add(comments);
}
private void displayTitle(final Change.Key changeId, final String subject) {
final StringBuilder titleBuf = new StringBuilder();
if (LocaleInfo.getCurrentLocale().isRTL()) {
if (subject != null) {
titleBuf.append(subject);
titleBuf.append(" :");
}
titleBuf.append(Util.M.changeScreenTitleId(changeId.abbreviate()));
} else {
titleBuf.append(Util.M.changeScreenTitleId(changeId.abbreviate()));
if (subject != null) {
titleBuf.append(": ");
titleBuf.append(subject);
}
}
setPageTitle(titleBuf.toString());
setHeaderVisible(false);
}
@Override
public void onValueChange(final ValueChangeEvent<ChangeDetail> event) {
if (isAttached()) {
// Until this screen is fully migrated to the new API, this call must be
// sequential, because we can't start an async get at the source of every
// call that might trigger a value change.
ChangeApi.detail(event.getValue().getChange().getId().get(),
new GerritCallback<com.google.gerrit.client.changes.ChangeInfo>() {
@Override
public void onSuccess(
com.google.gerrit.client.changes.ChangeInfo result) {
changeInfo = result;
display(event.getValue());
}
});
}
}
private void display(final ChangeDetail detail) {
displayTitle(detail.getChange().getKey(), detail.getChange().getSubject());
discardDiffBaseIfNotApplicable(detail.getChange().getId());
if (Status.MERGED == detail.getChange().getStatus()) {
includedInPanel.setVisible(true);
includedInPanel.addOpenHandler(includedInTable);
} else {
includedInPanel.setVisible(false);
}
dependencies.setAccountInfoCache(detail.getAccounts());
descriptionBlock.display(detail.getChange(),
detail.isStarred(),
detail.canEditCommitMessage(),
detail.getCurrentPatchSetDetail().getInfo(),
detail.getAccounts(), detail.getSubmitTypeRecord());
dependsOn.display(detail.getDependsOn());
neededBy.display(detail.getNeededBy());
approvals.display(changeInfo);
patchesList.clear();
if (detail.getCurrentPatchSetDetail().getInfo().getParents().size() > 1) {
patchesList.addItem(Util.C.autoMerge());
} else {
patchesList.addItem(Util.C.baseDiffItem());
}
for (PatchSet pId : detail.getPatchSets()) {
if (patchesList != null) {
patchesList.addItem(Util.M.patchSetHeader(pId.getPatchSetId()), pId
.getId().toString());
}
}
if (diffBaseId != null && patchesList != null) {
patchesList.setSelectedIndex(diffBaseId.get());
}
patchSetsBlock.display(detail, diffBaseId);
addComments(detail);
// If any dependency change is still open, or is outdated,
// or the change is needed by a change that is new or submitted,
// show our dependency list.
//
boolean depsOpen = false;
int outdated = 0;
if (!detail.getChange().getStatus().isClosed()) {
final List<ChangeInfo> dependsOn = detail.getDependsOn();
if (dependsOn != null) {
for (final ChangeInfo ci : dependsOn) {
if (!ci.isLatest()) {
depsOpen = true;
outdated++;
} else if (ci.getStatus() != Change.Status.MERGED) {
depsOpen = true;
}
}
}
}
final List<ChangeInfo> neededBy = detail.getNeededBy();
if (neededBy != null) {
for (final ChangeInfo ci : neededBy) {
if ((ci.getStatus() == Change.Status.NEW) ||
(ci.getStatus() == Change.Status.SUBMITTED) ||
(ci.getStatus() == Change.Status.DRAFT)) {
depsOpen = true;
}
}
}
dependenciesPanel.setOpen(depsOpen);
dependenciesPanel.getHeader().clear();
if (outdated > 0) {
dependenciesPanel.getHeader().add(new InlineLabel(
Util.M.outdatedHeader(outdated)));
}
if (!isCurrentView()) {
display();
}
patchSetsBlock.setRegisterKeys(true);
}
private static void discardDiffBaseIfNotApplicable(final Change.Id toShow) {
if (currentChangeId != null && !currentChangeId.equals(toShow)) {
diffBaseId = null;
}
currentChangeId = toShow;
}
private void addComments(final ChangeDetail detail) {
comments.clear();
final AccountInfoCache accts = detail.getAccounts();
final List<ChangeMessage> msgList = detail.getMessages();
HorizontalPanel title = new HorizontalPanel();
title.setWidth("100%");
title.add(new Label(Util.C.changeScreenComments()));
if (msgList.size() > 1) {
title.add(messagesMenuBar());
}
title.setStyleName(Gerrit.RESOURCES.css().blockHeader());
comments.add(title);
final long AGE = 7 * 24 * 60 * 60 * 1000L;
final Timestamp aged = new Timestamp(System.currentTimeMillis() - AGE);
for (int i = 0; i < msgList.size(); i++) {
final ChangeMessage msg = msgList.get(i);
AccountInfo author;
if (msg.getAuthor() != null) {
author = FormatUtil.asInfo(accts.get(msg.getAuthor()));
} else {
author = AccountInfo.create(0, Util.C.messageNoAuthor(), null);
}
boolean isRecent;
if (i == msgList.size() - 1) {
isRecent = true;
} else {
// TODO Instead of opening messages by strict age, do it by "unread"?
isRecent = msg.getWrittenOn().after(aged);
}
final CommentPanel cp =
new CommentPanel(author, msg.getWrittenOn(), msg.getMessage());
cp.setRecent(isRecent);
cp.addStyleName(Gerrit.RESOURCES.css().commentPanelBorder());
if (i == msgList.size() - 1) {
cp.addStyleName(Gerrit.RESOURCES.css().commentPanelLast());
cp.setOpen(true);
}
comments.add(cp);
}
final Button b = new Button(Util.C.changeScreenAddComment());
b.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
PatchSet.Id currentPatchSetId = patchSetsBlock.getCurrentPatchSet().getId();
Gerrit.display(Dispatcher.toPublish(currentPatchSetId));
}
});
comments.add(b);
comments.setVisible(msgList.size() > 0);
}
private LinkMenuBar messagesMenuBar() {
final Panel c = comments;
final LinkMenuBar menuBar = new LinkMenuBar();
menuBar.addItem(Util.C.messageExpandRecent(), new ExpandAllCommand(c, true) {
@Override
protected void expand(final CommentPanel w) {
w.setOpen(w.isRecent());
}
});
menuBar.addItem(Util.C.messageExpandAll(), new ExpandAllCommand(c, true));
menuBar.addItem(Util.C.messageCollapseAll(), new ExpandAllCommand(c, false));
menuBar.addStyleName(Gerrit.RESOURCES.css().commentPanelMenuBar());
return menuBar;
}
public class UpToListKeyCommand extends KeyCommand {
public UpToListKeyCommand(int mask, char key, String help) {
super(mask, key, help);
}
@Override
public void onKeyPress(final KeyPressEvent event) {
Gerrit.displayLastChangeList();
}
}
public class ExpandCollapseDependencySectionKeyCommand extends KeyCommand {
public ExpandCollapseDependencySectionKeyCommand(int mask, char key, String help) {
super(mask, key, help);
}
@Override
public void onKeyPress(KeyPressEvent event) {
dependenciesPanel.setOpen(!dependenciesPanel.isOpen());
}
}
public class PublishCommentsKeyCommand extends NeedsSignInKeyCommand {
public PublishCommentsKeyCommand(int mask, char key, String help) {
super(mask, key, help);
}
@Override
public void onKeyPress(final KeyPressEvent event) {
PatchSet.Id currentPatchSetId = patchSetsBlock.getCurrentPatchSet().getId();
Gerrit.display(Dispatcher.toPublish(currentPatchSetId));
}
}
}
| |
package apincer.android.uamp;
import android.app.Application;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import com.github.moduth.blockcanary.BlockCanary;
import com.github.moduth.blockcanary.BlockCanaryContext;
import com.github.moduth.blockcanary.internal.BlockInfo;
import com.squareup.leakcanary.LeakCanary;
import org.greenrobot.greendao.database.Database;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import apincer.android.uamp.model.DaoMaster;
import apincer.android.uamp.model.DaoSession;
import apincer.android.uamp.utils.LogHelper;
import timber.log.Timber;
import static timber.log.Timber.DebugTree;
public class MusixMateApp extends Application {
private static Logger jAudioTaggerLogger1 = Logger.getLogger("org.jaudiotagger.audio");
private static Logger jAudioTaggerLogger2 = Logger.getLogger("org.jaudiotagger");
private static DaoSession daoSession;
@Override public void onCreate() {
super.onCreate();
// start service
Intent serviceIntent = new Intent(this, MusicService.class);
startService(serviceIntent);
DaoMaster.DevOpenHelper helper = new DaoMaster.DevOpenHelper(this, "uampdb");
Database db = helper.getWritableDb();
daoSession = new DaoMaster(db).newSession();
if (LeakCanary.isInAnalyzerProcess(this)) {
// This process is dedicated to LeakCanary for heap analysis.
// You should not init your app in this process.
return;
}
LeakCanary.install(this);
BlockCanary.install(this, new BlockCanaryContext() {
/**
* Implement in your project.
*
* @return Qualifier which can specify this installation, like version + flavor.
*/
public String provideQualifier() {
return "unknown";
}
/**
* Implement in your project.
*
* @return user id
*/
public String provideUid() {
return "uid";
}
/**
* Network type
*
* @return {@link String} like 2G, 3G, 4G, wifi, etc.
*/
public String provideNetworkType() {
return "unknown";
}
/**
* Config monitor duration, after this time BlockCanary will stop, use
* with {@code BlockCanary}'s isMonitorDurationEnd
*
* @return monitor last duration (in hour)
*/
public int provideMonitorDuration() {
return -1;
}
/**
* Config block threshold (in millis), dispatch over this duration is regarded as a BLOCK. You may set it
* from performance of device.
*
* @return threshold in mills
*/
public int provideBlockThreshold() {
return 1000;
}
/**
* Thread stack dump interval, use when block happens, BlockCanary will dump on main thread
* stack according to current sample cycle.
* <p>
* Because the implementation mechanism of Looper, real dump interval would be longer than
* the period specified here (especially when cpu is busier).
* </p>
*
* @return dump interval (in millis)
*/
public int provideDumpInterval() {
return provideBlockThreshold();
}
/**
* Path to save log, like "/blockcanary/", will save to sdcard if can.
*
* @return path of log files
*/
public String providePath() {
return "/blockcanary/";
}
/**
* If need notification to notice block.
*
* @return true if need, else if not need.
*/
public boolean displayNotification() {
return true;
}
/**
* Implement in your project, bundle files into a zip file.
*
* @param src files before compress
* @param dest files compressed
* @return true if compression is successful
*/
public boolean zip(File[] src, File dest) {
return false;
}
/**
* Implement in your project, bundled log files.
*
* @param zippedFile zipped file
*/
public void upload(File zippedFile) {
throw new UnsupportedOperationException();
}
/**
* Packages that developer concern, by default it uses process name,
* put high priority one in pre-order.
*
* @return null if simply concern only package with process name.
*/
public List<String> concernPackages() {
return null;
}
/**
* Filter stack without any in concern package, used with @{code concernPackages}.
*
* @return true if filter, false it not.
*/
public boolean filterNonConcernStack() {
return false;
}
/**
* Provide white list, entry in white list will not be shown in ui list.
*
* @return return null if you don't need white-list filter.
*/
public List<String> provideWhiteList() {
LinkedList<String> whiteList = new LinkedList<>();
whiteList.add("org.chromium");
return whiteList;
}
/**
* Whether to delete files whose stack is in white list, used with white-list.
*
* @return true if delete, false it not.
*/
public boolean deleteFilesInWhiteList() {
return true;
}
/**
* Block interceptor, developer may provide their own actions.
*/
public void onBlock(Context context, BlockInfo blockInfo) {
}
}).start();
// TURN OFF log for J audio tagger
jAudioTaggerLogger1.setLevel(Level.WARNING);
jAudioTaggerLogger2.setLevel(Level.WARNING);
/*
TagOptionSingleton.getInstance().setPadNumbers(true);
TagOptionSingleton.getInstance().setLyrics3Save(true);
TagOptionSingleton.getInstance().setID3V2Version(ID3V2Version.ID3_V23);
TagOptionSingleton.getInstance().setWriteMp3GenresAsText(true);
TagOptionSingleton.getInstance().setWriteMp4GenresAsText(true);
// TagOptionSingleton.getInstance().setResetTextEncodingForExistingFrames(true);
TagOptionSingleton.getInstance().setId3v1Save(true);
TagOptionSingleton.getInstance().setLyrics3Save(true);
TagOptionSingleton.getInstance().setWriteChunkSize(2097152);
//TagOptionSingleton.getInstance().setRemoveTrailingTerminatorOnWrite(true);
TagOptionSingleton.getInstance().setId3v2PaddingWillShorten(true);
*/
if (BuildConfig.DEBUG) {
Timber.plant(new DebugTree());
} else {
Timber.plant(new CrashReportingTree());
}
}
/** A tree which logs important information for crash reporting. */
private static class CrashReportingTree extends Timber.Tree {
@Override protected void log(int priority, String tag, String message, Throwable t) {
LogHelper.logToFile("GLIDE", Log.getStackTraceString(t));
}
}
public static DaoSession getDaoSession() {
return daoSession;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.ddl.table.create.show;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
import org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.util.DirectionUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hive.common.util.HiveStringUtils;
import org.stringtemplate.v4.ST;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
/**
* Operation process showing the creation of a table.
*/
public class ShowCreateTableOperation extends DDLOperation<ShowCreateTableDesc> {
private static final String EXTERNAL = "external";
private static final String TEMPORARY = "temporary";
private static final String DATABASE_NAME = "databaseName";
private static final String TABLE_NAME = "tableName";
private static final String LIST_COLUMNS = "columns";
private static final String COMMENT = "comment";
private static final String PARTITIONS = "partitions";
private static final String BUCKETS = "buckets";
private static final String SKEWED = "skewedinfo";
private static final String ROW_FORMAT = "row_format";
private static final String LOCATION_BLOCK = "location_block";
private static final String LOCATION = "location";
private static final String PROPERTIES = "properties";
public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc desc) {
super(context, desc);
}
@Override
public int execute() throws HiveException {
// get the create table statement for the table and populate the output
try (DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) {
Table table = context.getDb().getTable(desc.getDatabaseName(), desc.getTableName());
String command = table.isView() ? getCreateViewCommand(table, desc.isRelative())
: getCreateTableCommand(table, desc.isRelative());
outStream.write(command.getBytes(StandardCharsets.UTF_8));
return 0;
} catch (IOException e) {
LOG.info("Show create table failed", e);
return 1;
} catch (Exception e) {
throw new HiveException(e);
}
}
private static final String CREATE_VIEW_TEMPLATE =
"CREATE VIEW <if(" + DATABASE_NAME + ")>`<" + DATABASE_NAME + ">`.<endif>`<" + TABLE_NAME + ">` AS <SQL>";
private String getCreateViewCommand(Table table, boolean isRelative) {
ST command = new ST(CREATE_VIEW_TEMPLATE);
if (!isRelative) {
command.add(DATABASE_NAME, table.getDbName());
}
command.add(TABLE_NAME, table.getTableName());
command.add("SQL", table.getViewExpandedText());
return command.render();
}
private static final String CREATE_TABLE_TEMPLATE =
"CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE <if(" + DATABASE_NAME + ")>`<" + DATABASE_NAME + ">`.<endif>"
+ "`<" + TABLE_NAME + ">`(\n" +
"<" + LIST_COLUMNS + ">)\n" +
"<" + COMMENT + ">\n" +
"<" + PARTITIONS + ">\n" +
"<" + BUCKETS + ">\n" +
"<" + SKEWED + ">\n" +
"<" + ROW_FORMAT + ">\n" +
"<" + LOCATION_BLOCK + ">" +
"TBLPROPERTIES (\n" +
"<" + PROPERTIES + ">)\n";
private String getCreateTableCommand(Table table, boolean isRelative) {
ST command = new ST(CREATE_TABLE_TEMPLATE);
if (!isRelative) {
command.add(DATABASE_NAME, table.getDbName());
}
command.add(TABLE_NAME, table.getTableName());
command.add(TEMPORARY, getTemporary(table));
command.add(EXTERNAL, getExternal(table));
command.add(LIST_COLUMNS, getColumns(table));
command.add(COMMENT, getComment(table));
command.add(PARTITIONS, getPartitions(table));
command.add(BUCKETS, getBuckets(table));
command.add(SKEWED, getSkewed(table));
command.add(ROW_FORMAT, getRowFormat(table));
command.add(LOCATION_BLOCK, getLocationBlock(table));
command.add(PROPERTIES, getProperties(table));
return command.render();
}
private String getTemporary(Table table) {
return table.isTemporary() ? "TEMPORARY " : "";
}
private String getExternal(Table table) {
return table.getTableType() == TableType.EXTERNAL_TABLE ? "EXTERNAL " : "";
}
private String getColumns(Table table) {
List<String> columnDescs = new ArrayList<String>();
for (FieldSchema column : table.getCols()) {
String columnType = formatType(TypeInfoUtils.getTypeInfoFromTypeString(column.getType()));
String columnDesc = " `" + column.getName() + "` " + columnType;
if (column.getComment() != null) {
columnDesc += " COMMENT '" + HiveStringUtils.escapeHiveCommand(column.getComment()) + "'";
}
columnDescs.add(columnDesc);
}
return StringUtils.join(columnDescs, ", \n");
}
/** Struct fields are identifiers, need to be put between ``. */
private String formatType(TypeInfo typeInfo) {
switch (typeInfo.getCategory()) {
case PRIMITIVE:
return typeInfo.getTypeName();
case STRUCT:
StringBuilder structFormattedType = new StringBuilder();
StructTypeInfo structTypeInfo = (StructTypeInfo)typeInfo;
for (int i = 0; i < structTypeInfo.getAllStructFieldNames().size(); i++) {
if (structFormattedType.length() != 0) {
structFormattedType.append(", ");
}
String structElementName = structTypeInfo.getAllStructFieldNames().get(i);
String structElementType = formatType(structTypeInfo.getAllStructFieldTypeInfos().get(i));
structFormattedType.append("`" + structElementName + "`:" + structElementType);
}
return "struct<" + structFormattedType.toString() + ">";
case LIST:
ListTypeInfo listTypeInfo = (ListTypeInfo)typeInfo;
String elementType = formatType(listTypeInfo.getListElementTypeInfo());
return "array<" + elementType + ">";
case MAP:
MapTypeInfo mapTypeInfo = (MapTypeInfo)typeInfo;
String keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo().getTypeName();
String valueTypeInfo = formatType(mapTypeInfo.getMapValueTypeInfo());
return "map<" + keyTypeInfo + "," + valueTypeInfo + ">";
case UNION:
StringBuilder unionFormattedType = new StringBuilder();
UnionTypeInfo unionTypeInfo = (UnionTypeInfo)typeInfo;
for (TypeInfo unionElementTypeInfo : unionTypeInfo.getAllUnionObjectTypeInfos()) {
if (unionFormattedType.length() != 0) {
unionFormattedType.append(", ");
}
String unionElementType = formatType(unionElementTypeInfo);
unionFormattedType.append(unionElementType);
}
return "uniontype<" + unionFormattedType.toString() + ">";
default:
throw new RuntimeException("Unknown type: " + typeInfo.getCategory());
}
}
private String getComment(Table table) {
String comment = table.getProperty("comment");
return (comment != null) ? "COMMENT '" + HiveStringUtils.escapeHiveCommand(comment) + "'" : "";
}
private String getPartitions(Table table) {
List<FieldSchema> partitionKeys = table.getPartitionKeys();
if (partitionKeys.isEmpty()) {
return "";
}
List<String> partitionDescs = new ArrayList<String>();
for (FieldSchema partitionKey : partitionKeys) {
String partitionDesc = " `" + partitionKey.getName() + "` " + partitionKey.getType();
if (partitionKey.getComment() != null) {
partitionDesc += " COMMENT '" + HiveStringUtils.escapeHiveCommand(partitionKey.getComment()) + "'";
}
partitionDescs.add(partitionDesc);
}
return "PARTITIONED BY ( \n" + StringUtils.join(partitionDescs, ", \n") + ")";
}
private String getBuckets(Table table) {
List<String> bucketCols = table.getBucketCols();
if (bucketCols.isEmpty()) {
return "";
}
String buckets = "CLUSTERED BY ( \n " + StringUtils.join(bucketCols, ", \n ") + ") \n";
List<Order> sortColumns = table.getSortCols();
if (!sortColumns.isEmpty()) {
List<String> sortKeys = new ArrayList<String>();
for (Order sortColumn : sortColumns) {
String sortKeyDesc = " " + sortColumn.getCol() + " " + DirectionUtils.codeToText(sortColumn.getOrder());
sortKeys.add(sortKeyDesc);
}
buckets += "SORTED BY ( \n" + StringUtils.join(sortKeys, ", \n") + ") \n";
}
buckets += "INTO " + table.getNumBuckets() + " BUCKETS";
return buckets;
}
private String getSkewed(Table table) {
SkewedInfo skewedInfo = table.getSkewedInfo();
if (skewedInfo == null || skewedInfo.getSkewedColNames().isEmpty()) {
return "";
}
List<String> columnValuesList = new ArrayList<String>();
for (List<String> columnValues : skewedInfo.getSkewedColValues()) {
columnValuesList.add("('" + StringUtils.join(columnValues, "','") + "')");
}
String skewed =
"SKEWED BY (" + StringUtils.join(skewedInfo.getSkewedColNames(), ",") + ")\n" +
" ON (" + StringUtils.join(columnValuesList, ",") + ")";
if (table.isStoredAsSubDirectories()) {
skewed += "\n STORED AS DIRECTORIES";
}
return skewed;
}
private String getRowFormat(Table table) {
StringBuilder rowFormat = new StringBuilder();
StorageDescriptor sd = table.getTTable().getSd();
SerDeInfo serdeInfo = sd.getSerdeInfo();
rowFormat
.append("ROW FORMAT SERDE \n")
.append(" '" + HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
Map<String, String> serdeParams = serdeInfo.getParameters();
if (table.getStorageHandler() == null) {
// If serialization.format property has the default value, it will not to be included in SERDE properties
if (Warehouse.DEFAULT_SERIALIZATION_FORMAT.equals(serdeParams.get(serdeConstants.SERIALIZATION_FORMAT))) {
serdeParams.remove(serdeConstants.SERIALIZATION_FORMAT);
}
if (!serdeParams.isEmpty()) {
appendSerdeParams(rowFormat, serdeParams);
rowFormat.append(" \n");
}
rowFormat
.append("STORED AS INPUTFORMAT \n '" + HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n")
.append("OUTPUTFORMAT \n '" + HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
} else {
String metaTableStorage = table.getParameters().get(META_TABLE_STORAGE);
rowFormat.append("STORED BY \n '" + HiveStringUtils.escapeHiveCommand(metaTableStorage) + "' \n");
if (!serdeParams.isEmpty()) {
appendSerdeParams(rowFormat, serdeInfo.getParameters());
}
}
return rowFormat.toString();
}
public static void appendSerdeParams(StringBuilder builder, Map<String, String> serdeParams) {
SortedMap<String, String> sortedSerdeParams = new TreeMap<String, String>(serdeParams);
List<String> serdeCols = new ArrayList<String>();
for (Entry<String, String> entry : sortedSerdeParams.entrySet()) {
serdeCols.add(" '" + entry.getKey() + "'='" +
HiveStringUtils.escapeUnicode(HiveStringUtils.escapeHiveCommand(entry.getValue())) + "'");
}
builder
.append("WITH SERDEPROPERTIES ( \n")
.append(StringUtils.join(serdeCols, ", \n"))
.append(')');
}
private static final String CREATE_TABLE_TEMPLATE_LOCATION =
"LOCATION\n" +
"<" + LOCATION + ">\n";
private String getLocationBlock(Table table) {
if (!CreateTableOperation.doesTableNeedLocation(table)) {
return "";
}
ST locationBlock = new ST(CREATE_TABLE_TEMPLATE_LOCATION);
StorageDescriptor sd = table.getTTable().getSd();
locationBlock.add(LOCATION, " '" + HiveStringUtils.escapeHiveCommand(sd.getLocation()) + "'");
return locationBlock.render();
}
private static final Set<String> PROPERTIES_TO_IGNORE_AT_TBLPROPERTIES = Sets.union(
ImmutableSet.<String>of("TEMPORARY", "EXTERNAL", "comment", "SORTBUCKETCOLSPREFIX", META_TABLE_STORAGE),
new HashSet<String>(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
private String getProperties(Table table) {
return DDLUtils.propertiesToString(table.getParameters(), PROPERTIES_TO_IGNORE_AT_TBLPROPERTIES);
}
}
| |
/*
* Copyright (C) 2014 Michell Bak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nmj.nmjmanager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.nmj.base.NMJActivity;
import com.nmj.db.DbAdapterSources;
import com.nmj.functions.FileSource;
import com.nmj.utils.TypefaceUtils;
import java.util.ArrayList;
public class FileSources extends NMJActivity {
private DbAdapterSources mDatabase = NMJManagerApplication.getSourcesAdapter();
private ArrayList<FileSourceListItem> mItems = new ArrayList<FileSourceListItem>();
private ListView mListView;
private LinearLayout mEmptyView;
private ListAdapter mAdapter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mEmptyView = (LinearLayout) findViewById(R.id.noFileSources);
mListView = (ListView) findViewById(R.id.listView1);
mAdapter = new ListAdapter();
mListView.setAdapter(mAdapter);
mListView.setEmptyView(mEmptyView);
loadSources();
LocalBroadcastManager.getInstance(this).registerReceiver(mMessageReceiver, new IntentFilter("NMJManager-filesource-change"));
}
@Override
protected int getLayoutResource() {
return R.layout.filesources_layout;
}
private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
loadSources();
}
};
@Override
public void onStart() {
super.onStart();
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
protected void onDestroy() {
super.onDestroy();
LocalBroadcastManager.getInstance(this).unregisterReceiver(mMessageReceiver);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
break;
case R.id.add_source:
addFileSource();
break;
}
return super.onOptionsItemSelected(item);
}
private void addFileSource() {
//Intent i = new Intent(this, AddFileSource.class);
//startActivity(i);
}
private void loadSources() {
ArrayList<FileSource> sources = new ArrayList<FileSource>();
boolean hasMovies = false, hasShows = false;
// Fetch all movie sources and add them to the array
Cursor cursor = mDatabase.fetchAllSources();
try {
while (cursor.moveToNext()) {
FileSource fs = new FileSource(
cursor.getLong(cursor.getColumnIndex(DbAdapterSources.KEY_ROWID)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_FILEPATH)),
cursor.getInt(cursor.getColumnIndex(DbAdapterSources.KEY_FILESOURCE_TYPE)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_USER)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_PASSWORD)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_DOMAIN)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_TYPE))
);
sources.add(fs);
if (fs.isMovie())
hasMovies = true;
if (!fs.isMovie())
hasShows = true;
}
} catch (Exception ignored) {
} finally {
cursor.close();
}
mItems.clear();
if (hasMovies) {
mItems.add(new FileSourceListItem(null, getString(R.string.chooserMovies), true));
for (int i = 0; i < sources.size(); i++) {
if (sources.get(i).isMovie())
mItems.add(new FileSourceListItem(sources.get(i), sources.get(i).getTitle(), false));
}
}
if (hasShows) {
mItems.add(new FileSourceListItem(null, getString(R.string.chooserTVShows), true));
for (int i = 0; i < sources.size(); i++) {
if (!sources.get(i).isMovie())
mItems.add(new FileSourceListItem(sources.get(i), sources.get(i).getTitle(), false));
}
}
mAdapter.notifyDataSetChanged();
}
public void removeSelectedSource(int id) {
mDatabase.deleteSource(mItems.get(id).getFileSource().getRowId());
loadSources();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.filebrowser, menu);
return true;
}
public class ListAdapter extends BaseAdapter {
private LayoutInflater mInflater;
public ListAdapter() {
mInflater = (LayoutInflater) getApplicationContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public int getViewTypeCount() {
return 2;
}
@Override
public int getItemViewType(int position) {
return mItems.get(position).isHeader() ? 0 : 1;
}
@Override
public boolean isEnabled(int position) {
return !mItems.get(position).isHeader();
}
public View getView(final int position, View convertView, ViewGroup parent) {
if (mItems.get(position).isHeader()) {
convertView = mInflater.inflate(R.layout.file_source_list_header, parent, false);
TextView title = (TextView) convertView.findViewById(R.id.title);
title.setText(mItems.get(position).getTitle());
title.setTypeface(TypefaceUtils.getRobotoMedium(getApplicationContext()));
} else {
convertView = mInflater.inflate(R.layout.filesource_list, parent, false);
((TextView) convertView.findViewById(R.id.txtListTitle)).setText(mItems.get(position).getTitle());
((TextView) convertView.findViewById(R.id.txtListTitle)).setTypeface(TypefaceUtils.getRobotoCondensedRegular(getApplicationContext()));
((TextView) convertView.findViewById(R.id.txtListPlot)).setText(mItems.get(position).getFileSource().getFilepath());
((TextView) convertView.findViewById(R.id.txtListPlot)).setTypeface(TypefaceUtils.getRobotoLight(getApplicationContext()));
((ImageView) convertView.findViewById(R.id.traktIcon)).setImageResource(mItems.get(position).getFileSource().isMovie() ? R.drawable.ic_movie_white_24dp : R.drawable.ic_tv_white_24dp);
convertView.findViewById(R.id.imageView2).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
removeSelectedSource(position);
}
});
}
return convertView;
}
}
private class FileSourceListItem {
private FileSource mFileSource;
private String mTitle;
private boolean mHeader;
public FileSourceListItem(FileSource fileSource, String title, boolean header) {
mFileSource = fileSource;
mTitle = title;
mHeader = header;
}
public FileSource getFileSource() {
return mFileSource;
}
public String getTitle() {
return mTitle;
}
public boolean isHeader() {
return mHeader;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.store.hibernate;
import java.sql.Connection;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import javax.naming.NamingException;
import javax.naming.Reference;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteException;
import org.apache.ignite.Ignition;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.hibernate.Cache;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.SessionBuilder;
import org.hibernate.SessionFactory;
import org.hibernate.StatelessSession;
import org.hibernate.StatelessSessionBuilder;
import org.hibernate.TypeHelper;
import org.hibernate.boot.spi.SessionFactoryOptions;
import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.metadata.ClassMetadata;
import org.hibernate.metadata.CollectionMetadata;
import org.hibernate.stat.Statistics;
import org.junit.Test;
/**
* Test for Cache jdbc blob store factory.
*/
public class CacheHibernateStoreFactorySelfTest extends GridCommonAbstractTest {
/** Cache name. */
private static final String CACHE_NAME = "test";
/** */
static final String MODULE_PATH = "modules/hibernate-5.1/";
/**
* @throws Exception If failed.
*/
@Test
public void testCacheConfiguration() throws Exception {
try (Ignite ignite1 = startGrid(0)) {
IgniteCache<Integer, String> cache1 = ignite1.getOrCreateCache(cacheConfiguration());
checkStore(cache1);
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testXmlConfiguration() throws Exception {
try (Ignite ignite = Ignition.start(MODULE_PATH + "/src/test/config/factory-cache.xml")) {
try(Ignite ignite1 = Ignition.start(MODULE_PATH + "/src/test/config/factory-cache1.xml")) {
checkStore(ignite.<Integer, String>cache(CACHE_NAME), DummySessionFactoryExt.class);
checkStore(ignite1.<Integer, String>cache(CACHE_NAME), DummySessionFactory.class);
}
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testIncorrectBeanConfiguration() throws Exception {
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
try (Ignite ignite =
Ignition.start(MODULE_PATH + "/src/test/config/factory-incorrect-store-cache.xml")) {
ignite.cache(CACHE_NAME).getConfiguration(CacheConfiguration.class).
getCacheStoreFactory().create();
}
return null;
}
}, IgniteException.class, "Failed to load bean in application context");
}
/**
* @return Cache configuration with store.
*/
private CacheConfiguration<Integer, String> cacheConfiguration() {
CacheConfiguration<Integer, String> cfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME);
CacheHibernateBlobStoreFactory<Integer, String> factory = new CacheHibernateBlobStoreFactory();
factory.setHibernateConfigurationPath("/org/apache/ignite/cache/store/hibernate/hibernate.cfg.xml");
cfg.setCacheStoreFactory(factory);
return cfg;
}
/**
* @param cache Ignite cache.
* @param dataSrcClass Data source class.
* @throws Exception If store parameters is not the same as in configuration xml.
*/
private void checkStore(IgniteCache<Integer, String> cache, Class<?> dataSrcClass) throws Exception {
CacheHibernateBlobStore store = (CacheHibernateBlobStore)cache
.getConfiguration(CacheConfiguration.class).getCacheStoreFactory().create();
assertEquals(dataSrcClass,
GridTestUtils.getFieldValue(store, CacheHibernateBlobStore.class, "sesFactory").getClass());
}
/**
* @param cache Ignite cache.
* @throws Exception If store parameters is not the same as in configuration xml.
*/
private void checkStore(IgniteCache<Integer, String> cache) throws Exception {
CacheHibernateBlobStore store = (CacheHibernateBlobStore)cache.getConfiguration(CacheConfiguration.class)
.getCacheStoreFactory().create();
assertEquals("/org/apache/ignite/cache/store/hibernate/hibernate.cfg.xml",
GridTestUtils.getFieldValue(store, CacheHibernateBlobStore.class, "hibernateCfgPath"));
}
/**
*
*/
public static class DummySessionFactoryExt extends DummySessionFactory {
/** */
public DummySessionFactoryExt() {
// No-op.
}
}
/**
*
*/
public static class DummySessionFactory implements SessionFactory {
/** {@inheritDoc} */
@Override public SessionFactoryOptions getSessionFactoryOptions() {
return null;
}
/** {@inheritDoc} */
@Override public SessionBuilder withOptions() {
return null;
}
/** {@inheritDoc} */
@Override public Session openSession() throws HibernateException {
return null;
}
/** {@inheritDoc} */
@Override public Session getCurrentSession() throws HibernateException {
return null;
}
/** {@inheritDoc} */
@Override public StatelessSessionBuilder withStatelessOptions() {
return null;
}
/** {@inheritDoc} */
@Override public StatelessSession openStatelessSession() {
return null;
}
/** {@inheritDoc} */
@Override public StatelessSession openStatelessSession(Connection conn) {
return null;
}
/** {@inheritDoc} */
@Override public ClassMetadata getClassMetadata(Class entityCls) {
return null;
}
/** {@inheritDoc} */
@Override public ClassMetadata getClassMetadata(String entityName) {
return null;
}
/** {@inheritDoc} */
@Override public CollectionMetadata getCollectionMetadata(String roleName) {
return null;
}
/** {@inheritDoc} */
@Override public Map<String, ClassMetadata> getAllClassMetadata() {
return null;
}
/** {@inheritDoc} */
@Override public Map getAllCollectionMetadata() {
return null;
}
/** {@inheritDoc} */
@Override public Statistics getStatistics() {
return null;
}
/** {@inheritDoc} */
@Override public void close() throws HibernateException {
}
/** {@inheritDoc} */
@Override public boolean isClosed() {
return false;
}
/** {@inheritDoc} */
@Override public Cache getCache() {
return null;
}
/** {@inheritDoc} */
@Override public Set getDefinedFilterNames() {
return null;
}
/** {@inheritDoc} */
@Override public FilterDefinition getFilterDefinition(String filterName) throws HibernateException {
return null;
}
/** {@inheritDoc} */
@Override public boolean containsFetchProfileDefinition(String name) {
return false;
}
/** {@inheritDoc} */
@Override public TypeHelper getTypeHelper() {
return null;
}
/** {@inheritDoc} */
@Override public Reference getReference() throws NamingException {
return null;
}
}
}
| |
/**
* Copyright (c) 2015-2019 YCSB contributors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package site.ycsb.db.voltdb.test;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeNoException;
import site.ycsb.ByteIterator;
import site.ycsb.DBException;
import site.ycsb.Status;
import site.ycsb.StringByteIterator;
import site.ycsb.db.voltdb.ConnectionHelper;
import site.ycsb.db.voltdb.VoltClient4;
import org.junit.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;
import java.util.Properties;
/**
* Test harness for YCSB / VoltDB. Note that not much happens if VoltDB isn't
* visible.
*
*/
public class VoltDBClientTest {
private static final String TABLE_NAME = "USERTABLE";
private static final int FIELD_LENGTH = 32;
private static final String FIELD_PREFIX = "FIELD";
private static final int NUM_FIELDS = 3;
private static final String INSERT_TEST_KEY = "InsertReadTest";
private static final String INSERT_DELETE_AND_READ_TEST_KEY = "InsertDeleteReadTest";
private static final String UPDATE_TEST_KEY = "UpdateTest";
private static final String NON_EXISTENT_KEY = "NonExistTest";
private static final String SCAN_KEY_PREFIX = "ScanKey_";
private static final int SCAN_RECORD_COUNT = 5000;
private static final String[] TEST_DATA_KEYS = { INSERT_TEST_KEY, INSERT_DELETE_AND_READ_TEST_KEY, UPDATE_TEST_KEY };
private static VoltClient4 voltClient = null;
private static boolean haveDb = false;
@BeforeClass
public static void setup() {
Properties p = new Properties();
String servers = p.getProperty("voltdb.servers", "localhost");
String user = p.getProperty("voltdb.user", "");
String password = p.getProperty("voltdb.password", "");
String strLimit = p.getProperty("voltdb.ratelimit", "70000");
p.setProperty("voltdb.servers", servers);
p.setProperty("voltdb.user", user);
p.setProperty("voltdb.password", password);
p.setProperty("voltdb.ratelimit", strLimit);
try {
voltClient = new VoltClient4();
voltClient.setProperties(p);
if (ConnectionHelper.checkDBServers(servers)) {
voltClient.init();
haveDb = true;
removeExistingData();
}
} catch (Exception e) {
// The call to checkDBServers above looks for activity on
// the ip and port we expect VoltDB to be on. If we get to this
// line it's because 'something' is running on localhost:21212,
// but whatever it is, it isn't a happy copy of VoltDB.
assumeNoException("Something was running on VoltDB's port but it wasn't a usable copy of VoltDB", e);
}
}
private static void removeExistingData() {
try {
for (int i = 0; i < TEST_DATA_KEYS.length; i++) {
voltClient.delete(TABLE_NAME, TEST_DATA_KEYS[i]);
}
for (int i = 0; i < SCAN_RECORD_COUNT; i++) {
voltClient.delete(TABLE_NAME, SCAN_KEY_PREFIX + i);
}
} catch (Exception e) {
Logger logger = LoggerFactory.getLogger(VoltDBClientTest.class);
logger.error("Error while calling 'removeExistingData()'", e);
fail("Failed removeExistingData");
}
}
@AfterClass
public static void teardown() {
try {
if (voltClient != null && haveDb) {
removeExistingData();
voltClient.cleanup();
}
} catch (DBException e) {
e.printStackTrace();
}
}
@Before
public void prepareTest() {
}
private boolean compareContents(HashMap<String, ByteIterator> inMsg, Map<String, ByteIterator> outMsg) {
if (inMsg == null) {
return false;
}
if (outMsg == null) {
return false;
}
if (inMsg.size() != outMsg.size()) {
return false;
}
@SuppressWarnings("rawtypes")
Iterator it = inMsg.entrySet().iterator();
while (it.hasNext()) {
@SuppressWarnings("rawtypes")
Map.Entry pair = (Map.Entry) it.next();
String key = (String) pair.getKey();
ByteIterator inPayload = inMsg.get(key);
inPayload.reset();
ByteIterator outPayload = outMsg.get(key);
outPayload.reset();
if (inPayload.bytesLeft() != outPayload.bytesLeft()) {
return false;
}
while (inPayload.hasNext()) {
byte inByte = inPayload.nextByte();
byte outByte = outPayload.nextByte();
if (inByte != outByte) {
return false;
}
}
it.remove();
}
return true;
}
@Test
public void insertAndReadTest() {
Assume.assumeTrue(haveDb);
try {
// Create some test data
final String insertKey = INSERT_TEST_KEY;
final Set<String> columns = getColumnNameMap();
// Insert row
HashMap<String, ByteIterator> insertMap = new HashMap<String, ByteIterator>();
for (int i = 0; i < NUM_FIELDS; i++) {
insertMap.put(FIELD_PREFIX + i, new StringByteIterator(buildDeterministicValue(insertKey, FIELD_PREFIX + i)));
}
voltClient.insert(TABLE_NAME, insertKey, insertMap);
// Create a object to put retrieved row in...
Map<String, ByteIterator> testResult = new HashMap<String, ByteIterator>();
// Read row...
Status s = voltClient.read(TABLE_NAME, insertKey, columns, testResult);
if (!s.equals(Status.OK)) {
fail("Didn't get OK on read.");
}
if (!compareContents(insertMap, testResult)) {
fail("Returned data not the same as inserted data");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed insertTest");
}
}
@Test
public void insertDeleteAndReadTest() {
Assume.assumeTrue(haveDb);
try {
// Create some test data
final String insertKey = INSERT_DELETE_AND_READ_TEST_KEY;
final Set<String> columns = getColumnNameMap();
// Insert row
HashMap<String, ByteIterator> insertMap = new HashMap<String, ByteIterator>();
for (int i = 0; i < NUM_FIELDS; i++) {
insertMap.put(FIELD_PREFIX + i, new StringByteIterator(buildDeterministicValue(insertKey, FIELD_PREFIX + i)));
}
voltClient.insert(TABLE_NAME, insertKey, insertMap);
// Create a object to put retrieved row in...
Map<String, ByteIterator> testResult = new HashMap<String, ByteIterator>();
// Read row...
Status s = voltClient.read(TABLE_NAME, insertKey, columns, testResult);
if (!s.equals(Status.OK)) {
fail("Didn't get OK on read.");
}
if (!compareContents(insertMap, testResult)) {
fail("Returned data not the same as inserted data");
}
voltClient.delete(TABLE_NAME, insertKey);
// Create another object to put retrieved row in...
Map<String, ByteIterator> testResultAfterDelete = new HashMap<String, ByteIterator>();
// Read row...
voltClient.read(TABLE_NAME, insertKey, columns, testResultAfterDelete);
if (testResultAfterDelete.size() > 0) {
fail("testResultAfterDelete has value.");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed insertDeleteAndReadTest");
}
}
@Test
public void deleteNonExistentRecordTest() {
Assume.assumeTrue(haveDb);
try {
// Create some test data
final String insertKey = NON_EXISTENT_KEY;
final Set<String> columns = getColumnNameMap();
// Create a object to put retrieved row in...
Map<String, ByteIterator> testResult = new HashMap<String, ByteIterator>();
// Read row...
voltClient.read(TABLE_NAME, insertKey, columns, testResult);
if (testResult.size() > 0) {
fail("testResult.size() > 0.");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed deleteNonExistentRecordTest");
}
}
@Test
public void scanReadTest() {
Assume.assumeTrue(haveDb);
try {
for (int z = 0; z < SCAN_RECORD_COUNT; z++) {
// Create some test data
final String insertKey = SCAN_KEY_PREFIX + z;
// Insert row
HashMap<String, ByteIterator> insertMap = new HashMap<String, ByteIterator>();
for (int i = 0; i < NUM_FIELDS; i++) {
insertMap.put(FIELD_PREFIX + i, new StringByteIterator("Data for " + SCAN_KEY_PREFIX + z + " element " + i));
}
voltClient.insert(TABLE_NAME, insertKey, insertMap);
}
final String firstInsertKey = SCAN_KEY_PREFIX + 0;
final String lastInsertKey = SCAN_KEY_PREFIX + (SCAN_RECORD_COUNT - 1);
final String beyondLastInsertKey = SCAN_KEY_PREFIX + (SCAN_RECORD_COUNT + 1);
final String oneHundredFromEndInsertKey = SCAN_KEY_PREFIX + (SCAN_RECORD_COUNT - 101);
final String fiftyFromEndInsertKey = SCAN_KEY_PREFIX + (SCAN_RECORD_COUNT - 101);
// test non existent records
singleScanReadTest(NON_EXISTENT_KEY, 1000, 0, NON_EXISTENT_KEY);
// test single record
singleScanReadTest(firstInsertKey, 1, 1, firstInsertKey);
// test scan of SCAN_RECORD_COUNT records
singleScanReadTest(firstInsertKey, SCAN_RECORD_COUNT, SCAN_RECORD_COUNT, lastInsertKey);
// test single record in middle
singleScanReadTest(oneHundredFromEndInsertKey, 1, 1, oneHundredFromEndInsertKey);
// test request of 100 starting 50 from end.
singleScanReadTest(fiftyFromEndInsertKey, 100, 50, lastInsertKey);
// test request of 100 starting beyond the end
singleScanReadTest(beyondLastInsertKey, 100, 0, lastInsertKey);
} catch (Exception e) {
e.printStackTrace();
fail("Failed scanReadTest");
}
}
private void singleScanReadTest(String startKey, int requestedCount, int expectedCount, String lastKey) {
Assume.assumeTrue(haveDb);
try {
final Set<String> columns = getColumnNameMap();
// Create a object to put retrieved row in...
Vector<HashMap<String, ByteIterator>> testResult = new Vector<HashMap<String, ByteIterator>>();
// Read row...
Status s = voltClient.scan(TABLE_NAME, startKey, expectedCount, columns, testResult);
if (!s.equals(Status.OK)) {
fail("Didn't get OK on read.");
}
if (testResult.size() != expectedCount) {
fail("Failed singleScanReadTest " + startKey + " " + expectedCount + " " + lastKey);
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed singleScanReadTest " + startKey + ". Asked for " + requestedCount + ", expected " + expectedCount
+ " lastkey=" + lastKey);
}
}
@Test
public void updateTest() {
Assume.assumeTrue(haveDb);
try {
// Create some test data
final String insertKey = UPDATE_TEST_KEY;
// Insert row
// Insert row
HashMap<String, ByteIterator> insertThenUpdateMap = new HashMap<String, ByteIterator>();
for (int i = 0; i < NUM_FIELDS; i++) {
insertThenUpdateMap.put(FIELD_PREFIX + i,
new StringByteIterator(buildDeterministicValue(insertKey, FIELD_PREFIX + i)));
}
voltClient.insert(TABLE_NAME, insertKey, insertThenUpdateMap);
// Change the data we inserted...
for (int i = 0; i < NUM_FIELDS; i++) {
insertThenUpdateMap.put(FIELD_PREFIX + i, new StringByteIterator(FIELD_PREFIX + i + " has changed"));
}
// now do an update
voltClient.update(TABLE_NAME, insertKey, insertThenUpdateMap);
// Create a object to put retrieved row in...
final Set<String> columns = getColumnNameMap();
Map<String, ByteIterator> testResult = new HashMap<String, ByteIterator>();
// Read row...
Status s = voltClient.read(TABLE_NAME, insertKey, columns, testResult);
if (!s.equals(Status.OK)) {
fail("Didn't get OK on read.");
}
if (!compareContents(insertThenUpdateMap, testResult)) {
fail("Returned data not the same as inserted data");
}
} catch (Exception e) {
e.printStackTrace();
fail("Failed updateTest");
}
}
/**
* @return
*/
private Set<String> getColumnNameMap() {
Set<String> columns = new HashSet<String>();
for (int i = 0; i < NUM_FIELDS; i++) {
columns.add(FIELD_PREFIX + i);
}
return columns;
}
/*
* This is a copy of buildDeterministicValue() from
* core:site.ycsb.workloads.CoreWorkload.java. That method is neither
* public nor static so we need a copy.
*/
private String buildDeterministicValue(String key, String fieldkey) {
int size = FIELD_LENGTH;
StringBuilder sb = new StringBuilder(size);
sb.append(key);
sb.append(':');
sb.append(fieldkey);
while (sb.length() < size) {
sb.append(':');
sb.append(sb.toString().hashCode());
}
sb.setLength(size);
return sb.toString();
}
}
| |
package imj.apps;
import static imj.apps.ExtractRegions.loadLods;
import static java.util.Arrays.binarySearch;
import static multij.tools.Tools.baseName;
import static multij.tools.Tools.unchecked;
import static multij.tools.Tools.usedMemory;
import imj.Image;
import imj.apps.modules.Annotations;
import imj.apps.modules.Annotations.Annotation;
import imj.apps.modules.Annotations.Annotation.Region;
import imj.apps.modules.RegionOfInterest;
import imj.apps.modules.ShowActions.UseAnnotationAsROI;
import imj.apps.modules.Sieve;
import imj.apps.modules.SimpleSieve;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import multij.context.Context;
import multij.tools.CommandLineArgumentsParser;
import multij.tools.IllegalInstantiationException;
import multij.tools.TicToc;
/**
* @author codistmonk (creation 2013-03-13)
*/
public final class GenerateROCData {
private GenerateROCData() {
throw new IllegalInstantiationException();
}
/**
* @param commandLineArguments
* <br>Must not be null
* @throws Exception If an error occurs
*/
public static final void main(final String[] commandLineArguments) throws Exception {
final CommandLineArgumentsParser arguments = new CommandLineArgumentsParser(commandLineArguments);
final String imageId = arguments.get("file", "");
final int[] forceLods = arguments.get("lods");
final Class<? extends Sieve> sieveClass =
(Class<? extends Sieve>) Class.forName(arguments.get("sieve", SimpleSieve.class.getName()));
final Sieve sieve = sieveClass.getConstructor(Context.class).newInstance(new Context());
final String classIdFilePath = arguments.get("classIds", "");
System.out.println("sieve: " + sieve);
for (final Map.Entry<String, String> entry : sieve.getParameters().entrySet()) {
entry.setValue(arguments.get(entry.getKey(), entry.getValue()));
System.out.println("parameter: " + entry);
}
final String annotationsId = arguments.get("annotations", baseName(imageId) + ".xml");
generateROCData(imageId, annotationsId, forceLods, sieve, readClassIds(classIdFilePath), new ROCRowGenerator.Default());
}
public static final List<String> readClassIds(final String classIdFilePath) {
final List<String> result = new ArrayList<String>();
try {
final Scanner scanner = new Scanner(new File(classIdFilePath));
while (scanner.hasNext()) {
result.add(scanner.nextLine());
}
} catch (final FileNotFoundException exception) {
throw unchecked(exception);
}
return result;
}
public static final void generateROCData(final String imageId,
final String annotationsId, final int[] forceLods, final Sieve sieve,
final List<String> classIds, final ROCRowGenerator generator) {
final List<Image> lods = loadLods(imageId);
final Annotations annotations = Annotations.fromXML(annotationsId);
final String fileName = new File(imageId).getName();
for (int lod = 0; lod < lods.size(); ++lod) {
if (forceLods.length != 0 && binarySearch(forceLods, lod) < 0) {
continue;
}
final TicToc timer = new TicToc();
System.out.println("Processing lod " + lod + "... (" + new Date(timer.tic()) + ")");
final Image image = lods.get(lod);
final int rowCount = image.getRowCount();
final int columnCount = image.getColumnCount();
System.out.println("Initializing references... (" + new Date(timer.tic()) + ")");
final List<RegionOfInterest> references = generateReferences(annotations, classIds, lod, rowCount, columnCount);
System.out.println("Initializing reference done (time:" + timer.toc() + " memory:" + usedMemory() + ")");
System.out.println("Generating data... (" + new Date(timer.tic()) + ")");
sieve.initialize();
generator.generateROCRow(fileName, image, lod, references, sieve);
System.out.println("Generating data done (time:" + timer.toc() + " memory:" + usedMemory() + ")");
System.out.println("Processing lod " + lod + " done (time:" + timer.getTotalTime() + " memory:" + usedMemory() + ")");
}
}
public static final List<RegionOfInterest> generateReferences(final Annotations annotations, final List<String> classIds,
final int lod, final int rowCount, final int columnCount) {
final List<RegionOfInterest> result = new ArrayList<RegionOfInterest>(classIds.size());
final Collection<Region> allRegions = UseAnnotationAsROI.collectAllRegions(annotations);
for (final Annotation annotation : annotations.getAnnotations()) {
final int classIndex = classIds.indexOf(annotation.getUserObject());
if (classIndex < 0) {
continue;
}
while (result.size() <= classIndex) {
result.add(null);
}
if (result.get(classIndex) == null) {
result.set(classIndex, RegionOfInterest.newInstance(rowCount, columnCount));
}
UseAnnotationAsROI.set(result.get(classIndex), lod, annotation.getRegions(), allRegions);
}
return result;
}
/**
* @author codistmonk (creation 2013-03-13)
*/
public static abstract interface ROCRowGenerator {
public abstract void generateROCRow(String fileName, Image image, int lod,
List<RegionOfInterest> references, Sieve sieve);
/**
* @author codistmonk (creation 2013-03-13)
*/
public static final class Default implements ROCRowGenerator {
@Override
public final void generateROCRow(final String fileName, final Image image, final int lod,
final List<RegionOfInterest> references, final Sieve sieve) {
final TicToc timer = new TicToc();
System.out.println("Appling sieve... (" + new Date(timer.tic()) + ")");
final int rowCount = image.getRowCount();
final int columnCount = image.getColumnCount();
final RegionOfInterest computed = RegionOfInterest.newInstance(rowCount, columnCount);
sieve.setROI(computed, image);
System.out.println("Applying sieve done (time:" + timer.toc() + " memory:" + usedMemory() + ")");
System.out.println("Collecting data... (" + new Date(timer.tic()) + ")");
for (int classIndex = 0; classIndex < references.size(); ++classIndex) {
final RegionOfInterest reference = references.get(classIndex);
if (reference == null) {
continue;
}
int truePositives = 0;
int falsePositives = 0;
int trueNegatives = 0;
int falseNegatives = 0;
final int pixelCount = rowCount * columnCount;
for (int pixel = 0; pixel < pixelCount; ++pixel) {
if (computed.get(pixel)) {
if (reference.get(pixel)) {
++truePositives;
} else {
++falsePositives;
}
} else {
if (reference.get(pixel)) {
++falseNegatives;
} else {
++trueNegatives;
}
}
}
final StringBuilder row = new StringBuilder();
row.append("file_lod_class_");
for (final String parameterName : sieve.getParameters().keySet()) {
row.append(parameterName).append('_');
}
final char separator = ' ';
row.append("tp_fp_tn_fn: ")
.append(fileName).append(separator)
.append(lod).append(separator)
.append(classIndex).append(separator);
for (final String parameterValue : sieve.getParameters().values()) {
row.append(parameterValue).append(separator);
}
row
.append(truePositives).append(separator)
.append(falsePositives).append(separator)
.append(trueNegatives).append(separator)
.append(falseNegatives);
System.out.println(row);
}
System.out.println("Collecting data done (" + new Date(timer.tic()) + ")");
}
}
}
}
| |
import javafx.application.Application;
import javafx.stage.Stage;
import javafx.scene.ImageCursor;
import javafx.scene.Scene;
import javafx.scene.Group;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.image.Image;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.animation.AnimationTimer;
import javafx.scene.shape.*;
public class GameAsn extends Application {
public static void main(String[] args) {
launch(args);
}
public int gameStatus = 1;
public void start(Stage theStage) {
theStage.setTitle("AnimationTimer Example");
Group root = new Group();
Scene theScene = new Scene(root);
theStage.setScene(theScene);
Canvas canvas = new Canvas(512, 512);
root.getChildren().add(canvas);
GraphicsContext gc = canvas.getGraphicsContext2D();
// We need 3 images for this class
// Remember to move them into the src folder!
Image earth = new Image("earth.png");
Image sun = new Image("sun.png");
Image space = new Image("space.png");
Image ufoBlue = new Image("ufo_blue.png");
Image ufoRed = new Image("ufo_red.png");
Image asteroid = new Image("asteroid.png");
// Rectangle to hold place of UFO - starts near top-left
Rectangle ufoBlueData = new Rectangle(10, 10, 20, 20);
Rectangle ufoRedData = new Rectangle(460, 460, 20, 20);
Circle earthData = new Circle(300, 300, 22);
Circle sunData = new Circle(260, 260, 50);
Circle asteroid1Data = new Circle(40, 80, 22);
Circle asteroid2Data = new Circle(80, 80, 22);
// Only for testing
// root.getChildren().add(earthData);
// root.getChildren().add(sunData);
final long startNanoTime = System.nanoTime(); // 60 times per second
theScene.addEventFilter(KeyEvent.ANY, keyEvent -> {
// Move the UFO up
if (keyEvent.getCode() == KeyCode.W) {
ufoBlueData.setY(ufoBlueData.getY() - 10);
}
// Move the UFO left
if (keyEvent.getCode() == KeyCode.A) {
ufoBlueData.setX(ufoBlueData.getX() - 10);
}
// Move the UFO down
if (keyEvent.getCode() == KeyCode.S) {
ufoBlueData.setY(ufoBlueData.getY() + 10);
}
// Move the UFO right
if (keyEvent.getCode() == KeyCode.D) {
ufoBlueData.setX(ufoBlueData.getX() + 10);
}
// Move the UFO up
if (keyEvent.getCode() == KeyCode.UP) {
ufoRedData.setY(ufoRedData.getY() - 10);
}
// Move the UFO left
if (keyEvent.getCode() == KeyCode.LEFT) {
ufoRedData.setX(ufoRedData.getX() - 10);
}
// Move the UFO down
if (keyEvent.getCode() == KeyCode.DOWN) {
ufoRedData.setY(ufoRedData.getY() + 10);
}
// Move the UFO right
if (keyEvent.getCode() == KeyCode.RIGHT) {
ufoRedData.setX(ufoRedData.getX() + 10);
}
});
new AnimationTimer() {
public void handle(long currentNanoTime) {
double t = (currentNanoTime - startNanoTime) / 500000000.0;
double earthX = 232 + 128 * Math.cos(t) * 0.7;
double earthY = 232 + 128 * Math.sin(t) * 0.8;
double asteroid1X = 232 + 200 * Math.cos(t) * -1;
double asteroid2X = 232 + 200 * Math.cos(t);
double asteroid1Y = 232 + 200 * Math.sin(t) * -1;
double asteroid2Y = 232 + 200 * Math.sin(t);
// Clear the canvas
gc.clearRect(0, 0, 512, 512);
earthX++;
asteroid1X++;
asteroid2Y++;
// Background image clear canvas
gc.drawImage(space, 0, 0); // Draw the background
// Draw the earth in a new location each tick
gc.drawImage(sun, 196, 196);
if (gameStatus == 1) {
gc.drawImage(earth, earthX, earthY);
} else if (gameStatus == 2 || gameStatus == 3) {
gc.drawImage(asteroid, asteroid1X, asteroid1Y);
gc.drawImage(asteroid, asteroid2X, asteroid2Y);
}
// Move the data behind the object
earthData.setCenterX(earthX + 24);
earthData.setCenterY(earthY + 24);
asteroid1Data.setCenterX(asteroid1X + 24);
asteroid1Data.setCenterY(asteroid1Y + 24);
asteroid2Data.setCenterX(asteroid2X + 24);
asteroid2Data.setCenterY(asteroid2Y + 24);
// Add the UFOs
if (gameStatus != 3 || gameStatus != 5)
gc.drawImage(ufoBlue, ufoBlueData.getX(), ufoBlueData.getY());
if (gameStatus != 2 || gameStatus != 5)
gc.drawImage(ufoRed, ufoRedData.getX(), ufoRedData.getY());
Alert alert = new Alert(AlertType.INFORMATION);
alert.setTitle("Information Dialog");
final String fail = "Mission failed!";
if (gameStatus == 1) {
// Check if UFO is colliding with the Earth
if (ufoBlueData.intersects(earthData.getBoundsInLocal())) {
this.stop(); // Stop the timer
gameStatus = 2;
alert.setHeaderText("Abduction!");
alert.setContentText("You captured a human and destroyed Earth, now escape the solar system!");
// With more time, the player who did not reach Earth first would be able to shoot down asteroids with the mouse to help the other player
Image cursor = new Image("reticleRed.png");
root.setCursor(new ImageCursor(cursor, cursor.getWidth() / 2, cursor.getHeight() / 2));
alert.showAndWait();
this.start();
}
if (ufoRedData.intersects(earthData.getBoundsInLocal())) {
this.stop();
gameStatus = 3;
alert.setHeaderText("Abduction!");
alert.setContentText("You captured a human and destroyed Earth, now escape the solar system!");
Image cursor = new Image("reticleBlue.png");
root.setCursor(new ImageCursor(cursor, cursor.getWidth() / 2, cursor.getHeight() / 2));
alert.showAndWait();
this.start();
}
}
if (gameStatus == 2) {
// Check if UFO is colliding with an asteroid
if (ufoBlueData.intersects(asteroid1Data.getBoundsInLocal()) || ufoRedData.intersects(asteroid2Data.getBoundsInLocal())) {
gameStatus = 4;
alert.setHeaderText(fail);
alert.setContentText("Lost contact with blue vessel\nCrew presumed dead");
alert.showAndWait();
System.exit(0);
}
if (ufoRedData.intersects(asteroid1Data.getBoundsInLocal()) || ufoRedData.intersects(asteroid2Data.getBoundsInLocal())) {
gameStatus = 4;
alert.setHeaderText(fail);
alert.setContentText("Lost contact with red vessel\nCrew presumed dead");
alert.showAndWait();
System.exit(0);
}
// Check if UFO has escaped the bounds of the stage
if (!ufoBlueData.intersects(canvas.getBoundsInLocal()) && gameStatus != 5)
{
gameStatus = 5;
alert.setHeaderText("Victory!");
alert.setContentText("You escaped!");
alert.showAndWait();
System.exit(0);
}
// This stopped working for the red vessel and I don't know why
if (!ufoRedData.intersects(canvas.getBoundsInLocal()) && gameStatus != 5)
{
gameStatus = 5;
alert.setHeaderText("Victory!");
alert.setContentText("You escaped!");
alert.showAndWait();
System.exit(0);
}
}
// Check if UFO is colliding with the Sun
if (ufoBlueData.intersects(sunData.getBoundsInLocal())) {
this.stop();
alert.setHeaderText(fail);
alert.setContentText("Lost contact with blue vessel\nCrew presumed dead");
alert.showAndWait();
System.exit(0);
}
if (ufoRedData.intersects(sunData.getBoundsInLocal())) {
this.stop();
alert.setHeaderText(fail);
alert.setContentText("Lost contact with red vessel\nCrew presumed dead");
alert.showAndWait();
System.exit(0);
}
// Check if UFO is colliding with another UFO
if (ufoBlueData.intersects(ufoRedData.getBoundsInLocal())) {
this.stop();
alert.setHeaderText(fail);
alert.setContentText("Lost contact with space vessels\nCrew presumed dead");
alert.showAndWait();
System.exit(0);
}
}
}.start();
theStage.show();
}
}
| |
package njfbrowser.misc;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class BrowserLauncher {
private static boolean loadClasses() {
switch (jvm) {
default:
break;
case 0: // '\0'
try {
Class class1 = Class.forName("com.apple.MacOS.AETarget");
Class macOSErrorClass = Class.forName("com.apple.MacOS.MacOSError");
Class class2 = Class.forName("com.apple.MacOS.OSUtils");
Class class3 = Class.forName("com.apple.MacOS.AppleEvent");
Class class4 = Class.forName("com.apple.MacOS.ae");
Class aeDescClass = Class.forName("com.apple.MacOS.AEDesc");
aeTargetConstructor = class1.getDeclaredConstructor(Integer.TYPE);
appleEventConstructor = class3.getDeclaredConstructor(Integer.TYPE, Integer.TYPE, class1, Integer.TYPE, Integer.TYPE);
aeDescConstructor = aeDescClass.getDeclaredConstructor(String.class);
makeOSType = class2.getDeclaredMethod("makeOSType", String.class);
putParameter = class3.getDeclaredMethod("putParameter", Integer.TYPE, aeDescClass);
sendNoReply = class3.getDeclaredMethod("sendNoReply");
Field field1 = class4.getDeclaredField("keyDirectObject");
keyDirectObject = (Integer) field1.get(null);
Field field2 = class3.getDeclaredField("kAutoGenerateReturnID");
kAutoGenerateReturnID = (Integer) field2.get(null);
Field field3 = class3.getDeclaredField("kAnyTransactionID");
kAnyTransactionID = (Integer) field3.get(null);
break;
} catch (ClassNotFoundException classnotfoundexception) {
errorMessage = classnotfoundexception.getMessage();
return false;
} catch (NoSuchMethodException nosuchmethodexception) {
errorMessage = nosuchmethodexception.getMessage();
return false;
} catch (NoSuchFieldException nosuchfieldexception) {
errorMessage = nosuchfieldexception.getMessage();
return false;
} catch (IllegalAccessException illegalaccessexception) {
errorMessage = illegalaccessexception.getMessage();
}
return false;
case 1: // '\001'
try {
Class mrjFileUtilsClass = Class.forName("com.apple.mrj.MRJFileUtils");
Class mrjOSTypeClass = Class.forName("com.apple.mrj.MRJOSType");
Field field = mrjFileUtilsClass.getDeclaredField("kSystemFolderType");
kSystemFolderType = field.get(null);
findFolder = mrjFileUtilsClass.getDeclaredMethod("findFolder", mrjOSTypeClass);
getFileType = mrjFileUtilsClass.getDeclaredMethod("getFileType", File.class);
break;
} catch (ClassNotFoundException classnotfoundexception1) {
errorMessage = classnotfoundexception1.getMessage();
return false;
} catch (NoSuchFieldException nosuchfieldexception1) {
errorMessage = nosuchfieldexception1.getMessage();
return false;
} catch (NoSuchMethodException nosuchmethodexception1) {
errorMessage = nosuchmethodexception1.getMessage();
return false;
} catch (SecurityException securityexception) {
errorMessage = securityexception.getMessage();
return false;
} catch (IllegalAccessException illegalaccessexception1) {
errorMessage = illegalaccessexception1.getMessage();
}
return false;
}
return true;
}
private BrowserLauncher() {
}
public static void openURL(String s)
throws IOException {
if (!loadedWithoutErrors)
throw new IOException("Exception in finding browser: " + errorMessage);
Object obj = locateBrowser();
if (obj == null)
throw new IOException("Unable to locate browser: " + errorMessage);
switch (jvm) {
case 0: // '\0'
try {
try {
Object obj1 = aeDescConstructor.newInstance(s);
putParameter.invoke(obj, keyDirectObject, obj1);
sendNoReply.invoke(obj);
} catch (InvocationTargetException invocationtargetexception) {
throw new IOException("InvocationTargetException while creating AEDesc: " + invocationtargetexception.getMessage());
} catch (IllegalAccessException illegalaccessexception) {
throw new IOException("IllegalAccessException while building AppleEvent: " + illegalaccessexception.getMessage());
} catch (InstantiationException instantiationexception) {
throw new IOException("InstantiationException while creating AEDesc: " + instantiationexception.getMessage());
}
return;
} finally {
obj = null;
}
case 1: // '\001'
Runtime.getRuntime().exec(new String[]{
(String) obj, s
});
return;
case 2: // '\002'
case 3: // '\003'
Runtime.getRuntime().exec(new String[]{
(String) obj, "/c", "start", s
});
return;
case -1:
Process process = Runtime.getRuntime().exec(obj + " -remote 'openURL(" + s + ")'");
try {
int i = process.waitFor();
if (i != 0) {
Runtime.getRuntime().exec(new String[]{
(String) obj, s
});
return;
}
} catch (InterruptedException interruptedexception) {
throw new IOException("InterruptedException while launching browser: " + interruptedexception.getMessage());
}
break;
default:
Runtime.getRuntime().exec(new String[]{
(String) obj, s
});
return;
}
}
private static Object locateBrowser() {
if (browser != null)
return browser;
switch (jvm) {
case 0: // '\0'
try {
Integer integer = (Integer) makeOSType.invoke(null, "MACS");
Object obj = aeTargetConstructor.newInstance(integer);
Integer integer1 = (Integer) makeOSType.invoke(null, "GURL");
Object obj1 = appleEventConstructor.newInstance(integer1, integer1, obj, kAutoGenerateReturnID, kAnyTransactionID);
return obj1;
} catch (IllegalAccessException illegalaccessexception) {
browser = null;
errorMessage = illegalaccessexception.getMessage();
return browser;
} catch (InstantiationException instantiationexception) {
browser = null;
errorMessage = instantiationexception.getMessage();
return browser;
} catch (InvocationTargetException invocationtargetexception) {
browser = null;
errorMessage = invocationtargetexception.getMessage();
return browser;
}
case 1: // '\001'
File file;
try {
file = (File) findFolder.invoke(null, kSystemFolderType);
} catch (IllegalArgumentException illegalargumentexception) {
browser = null;
errorMessage = illegalargumentexception.getMessage();
return browser;
} catch (IllegalAccessException illegalaccessexception1) {
browser = null;
errorMessage = illegalaccessexception1.getMessage();
return browser;
} catch (InvocationTargetException invocationtargetexception1) {
browser = null;
errorMessage = invocationtargetexception1.getTargetException().getClass() + ": " + invocationtargetexception1.getTargetException().getMessage();
return browser;
}
String as[] = file.list();
for (int i = 0; i < as.length; i++)
try {
File file1 = new File(file, as[i]);
if (file1.isFile()) {
Object obj2 = getFileType.invoke(null, file1);
if ("FNDR".equals(obj2.toString())) {
browser = file1.toString();
return browser;
}
}
} catch (IllegalArgumentException illegalargumentexception1) {
browser = browser;
errorMessage = illegalargumentexception1.getMessage();
return null;
} catch (IllegalAccessException illegalaccessexception2) {
browser = null;
errorMessage = illegalaccessexception2.getMessage();
return browser;
} catch (InvocationTargetException invocationtargetexception2) {
browser = null;
errorMessage = invocationtargetexception2.getTargetException().getClass() + ": " + invocationtargetexception2.getTargetException().getMessage();
return browser;
}
browser = null;
break;
case 2: // '\002'
browser = "cmd.exe";
break;
case 3: // '\003'
browser = "command.com";
break;
case -1:
default:
browser = "netscape";
break;
}
return browser;
}
private static int jvm;
private static Object browser;
private static boolean loadedWithoutErrors;
private static Constructor aeTargetConstructor;
private static Constructor appleEventConstructor;
private static Constructor aeDescConstructor;
private static Method findFolder;
private static Method getFileType;
private static Method makeOSType;
private static Method putParameter;
private static Method sendNoReply;
private static Object kSystemFolderType;
private static Integer keyDirectObject;
private static Integer kAutoGenerateReturnID;
private static Integer kAnyTransactionID;
private static final int MRJ_2_0 = 0;
private static final int MRJ_2_1 = 1;
private static final int WINDOWS_NT = 2;
private static final int WINDOWS_9x = 3;
private static final int OTHER = -1;
private static final String FINDER_TYPE = "FNDR";
private static final String FINDER_CREATOR = "MACS";
private static final String GURL_EVENT = "GURL";
private static final String FIRST_WINDOWS_PARAMETER = "/c";
private static final String SECOND_WINDOWS_PARAMETER = "start";
private static final String NETSCAPE_OPEN_PARAMETER_START = " -remote 'openURL(";
private static final String NETSCAPE_OPEN_PARAMETER_END = ")'";
private static String errorMessage;
static {
loadedWithoutErrors = true;
String s = System.getProperty("os.name");
if ("Mac OS".equals(s)) {
String s1 = System.getProperty("mrj.version");
String s2 = s1.substring(0, 3);
try {
double d = Double.valueOf(s2).doubleValue();
if (d == 2D)
jvm = 0;
else if (d >= 2.1000000000000001D) {
jvm = 1;
} else {
loadedWithoutErrors = false;
errorMessage = "Unsupported MRJ version: " + d;
}
} catch (NumberFormatException _ex) {
loadedWithoutErrors = false;
errorMessage = "Invalid MRJ version: " + s1;
}
} else if (s.startsWith("Windows")) {
if (s.indexOf("9") != -1)
jvm = 3;
else
jvm = 2;
} else {
jvm = -1;
}
if (loadedWithoutErrors)
loadedWithoutErrors = loadClasses();
}
}
| |
/*
* Copyright 2013 Anton Karmanov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.antkar.syn.internal.bnf;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.antkar.syn.TokenDescriptor;
import org.antkar.syn.internal.Checks;
import org.antkar.syn.internal.CommonUtil;
/**
* BNF grammar.
*/
public final class BnfGrammar {
private final List<BnfNonterminal> startNonterminals;
private final List<BnfNonterminal> nonterminals;
private final List<TokenDescriptor> tokens;
private final List<BnfElement> elements;
/**
* Constructs a BNF grammar.
*
* @param startNonterminals the list of start nonterminals. Must contain at least one element.
* @param nonterminalsCol the list of all nonterminals used in the grammar.
* @param terminalsCol the list of all terminals used in the grammar.
*/
public BnfGrammar(
List<BnfNonterminal> startNonterminals,
Collection<BnfNonterminal> nonterminalsCol,
Collection<BnfTerminal> terminalsCol)
{
Checks.notNull(startNonterminals);
this.startNonterminals = CommonUtil.unmodifiableListCopy(startNonterminals);
this.elements = calcElementsList(nonterminalsCol, terminalsCol);
verifyElementIndicies();
nonterminals = calcNonterminals(nonterminalsCol);
tokens = calcTokens(terminalsCol);
}
/**
* Returns start nonterminals of this grammar.
*/
public List<BnfNonterminal> getStartNonterminals() {
return startNonterminals;
}
/**
* Returns the list of all nonterminals of this grammar.
*/
public List<BnfNonterminal> getNonterminals() {
return nonterminals;
}
/**
* Returns the list of all tokens defined in this grammar.
*/
public List<TokenDescriptor> getTokens() {
return tokens;
}
/**
* Returns the list of all grammar elements of this grammar.
*/
public List<BnfElement> getElements() {
return elements;
}
/**
* Prints this grammar to the specified print stream. For debug purposes.
*/
void print(PrintStream out) {
for (BnfNonterminal nt : nonterminals) {
nt.print(out);
}
}
/**
* Verifies whether element indices are correct.
*/
private void verifyElementIndicies() {
//Begin with start nonterminals, then go through all elements reachable from them.
//This approach is more reliable than iterating over the collection of nonterminals passed to the
//constructor, because it will detect a problem if a nonterminal is used in a grammar rule, but
//is not included into the collection.
Set<BnfNonterminal> ntSet = new HashSet<>(startNonterminals);
List<BnfNonterminal> ntList = new ArrayList<>(startNonterminals);
//Go through all nonterminals in the list.
for (int pos = 0; pos < ntList.size(); ++pos) {
BnfNonterminal nonterminal = ntList.get(pos);
//Go through all elements used in this nonterminal's productions.
for (BnfProduction production : nonterminal.getProductions()) {
for (BnfElement element : production.getElements()) {
//This method may add other nonterminals to the list.
verifyElementIndex(ntList, ntSet, element);
}
}
}
}
/**
* Verifies that the index of the specified element is correct. If the element is a nonterminal, it is
* added to the list of nonterminals.
*/
private void verifyElementIndex(
List<BnfNonterminal> ntList,
Set<BnfNonterminal> ntSet,
BnfElement element)
{
//Verify the index.
int elIndex = element.getElementIndex();
Checks.state(element == elements.get(elIndex));
if (element instanceof BnfNonterminal) {
//Add the nonterminal to the list.
BnfNonterminal subNonterminal = (BnfNonterminal) element;
if (!ntSet.contains(subNonterminal)) {
ntSet.add(subNonterminal);
ntList.add(subNonterminal);
}
}
}
/**
* Creates a combined list of grammar elements from terminal and nonterminal collections.
* Verifies indices.
*/
private static List<BnfElement> calcElementsList(
Collection<BnfNonterminal> nts,
Collection<BnfTerminal> trs)
{
//Create a combined list.
List<BnfElement> elList = new ArrayList<>(nts.size() + trs.size());
elList.addAll(nts);
elList.addAll(trs);
//Sort the list according to elements' indices.
Collections.sort(elList, ELEMENT_COMPARATOR);
//Check that indices correspond to positions in the list.
for (int i = 0, n = elList.size(); i < n; ++i) {
int index = elList.get(i).getElementIndex();
if (index != i) {
throw new IllegalStateException("Invalid element index: " + i + ", " + index);
}
}
return Collections.unmodifiableList(elList);
}
/**
* Sorts a list of nonterminals and verified their indices.
*/
private static void sortNonterminals(List<BnfNonterminal> nonterminals) {
Collections.sort(nonterminals, NONTERMINAL_COMPARATOR);
for (int i = 0, n = nonterminals.size(); i < n; ++i) {
int index = nonterminals.get(i).getIndex();
if (index != i) {
throw new IllegalStateException("Invalid nonterminal index: " + i + ", " + index);
}
}
}
/**
* Creates an ordered list of nonterminals. Verifies indices.
*/
private static List<BnfNonterminal> calcNonterminals(Collection<BnfNonterminal> nonterminalsCol) {
List<BnfNonterminal> tmpNonterminals = new ArrayList<>(nonterminalsCol);
sortNonterminals(tmpNonterminals);
return Collections.unmodifiableList(tmpNonterminals);
}
/**
* Creates an ordered list of tokens.
*/
private static List<TokenDescriptor> calcTokens(Collection<BnfTerminal> terminals) {
List<BnfTerminal> tmpTerminals = new ArrayList<>(terminals);
//Sort terminals by indices in order to make the behavior definite.
Collections.sort(tmpTerminals, ELEMENT_COMPARATOR);
//Created list of token descriptors.
List<TokenDescriptor> tokens = new ArrayList<>();
for (BnfTerminal terminal : tmpTerminals) {
TokenDescriptor tokenDescriptor = terminal.getTokenDescriptor();
tokens.add(tokenDescriptor);
}
List<TokenDescriptor> result = Collections.unmodifiableList(tokens);
return result;
}
/**
* Element comparator. Compares grammar elements by their element indices.
*/
private static final Comparator<BnfElement> ELEMENT_COMPARATOR =
new Comparator<BnfElement>()
{
@Override
public int compare(BnfElement o1, BnfElement o2) {
return o1.getElementIndex() - o2.getElementIndex();
}
};
/**
* Nonterminal comparator. Compares nonterminal indices.
*/
private static final Comparator<BnfNonterminal> NONTERMINAL_COMPARATOR =
new Comparator<BnfNonterminal>()
{
@Override
public int compare(BnfNonterminal o1, BnfNonterminal o2) {
return o1.getIndex() - o2.getIndex();
}
};
}
| |
/*
* Copyright 2016 "Henry Tao <hi@henrytao.me>"
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.henrytao.downloadmanager;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.UUID;
import me.henrytao.downloadmanager.internal.Precondition;
/**
* Created by henrytao on 12/12/16.
*/
public class Request {
private static final int DEFAULT_RETRY = 30;
private final String mDescription;
private final Uri mDestUri;
private final int mRetry;
private final String mTag;
private final Uri mTempUri;
private final String mTitle;
private final Uri mUri;
private long mId;
private Request(Uri uri, String tag, String title, String description, Uri destUri, Uri tempUri, int retry) {
mId = -1;
mTag = tag;
mUri = uri;
mTitle = title;
mDescription = description;
mDestUri = destUri;
mTempUri = tempUri;
mRetry = retry;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Request request = (Request) o;
if (mRetry != request.mRetry) {
return false;
}
if (mId != request.mId) {
return false;
}
if (mDescription != null ? !mDescription.equals(request.mDescription) : request.mDescription != null) {
return false;
}
if (!mDestUri.equals(request.mDestUri)) {
return false;
}
if (!mTag.equals(request.mTag)) {
return false;
}
if (!mTempUri.equals(request.mTempUri)) {
return false;
}
if (mTitle != null ? !mTitle.equals(request.mTitle) : request.mTitle != null) {
return false;
}
return mUri.equals(request.mUri);
}
@Override
public int hashCode() {
int result = mDescription != null ? mDescription.hashCode() : 0;
result = 31 * result + mDestUri.hashCode();
result = 31 * result + mRetry;
result = 31 * result + mTag.hashCode();
result = 31 * result + mTempUri.hashCode();
result = 31 * result + (mTitle != null ? mTitle.hashCode() : 0);
result = 31 * result + mUri.hashCode();
result = 31 * result + (int) (mId ^ (mId >>> 32));
return result;
}
public long enqueue() {
return DownloadManager.getInstance().enqueue(this);
}
public String getDescription() {
return mDescription;
}
public Uri getDestUri() {
return mDestUri;
}
public long getId() {
return mId;
}
void setId(long id) {
mId = id;
}
public int getRetry() {
return mRetry;
}
public String getTag() {
return mTag;
}
public Uri getTempUri() {
return mTempUri;
}
public String getTitle() {
return mTitle;
}
public Uri getUri() {
return mUri;
}
public boolean isEnqueued() {
return mId >= 0;
}
public static final class Builder {
private final Uri mUri;
private String mDescription;
private String mDestFilename;
private Uri mDestPath;
private Uri mDestUri;
private int mRetry;
private String mTag;
private String mTempFilename;
private Uri mTempPath;
private Uri mTempUri;
private String mTitle;
public Builder(@NonNull Uri uri) {
mUri = uri;
mRetry = DEFAULT_RETRY;
}
public Request build() {
mDestFilename = Precondition.checkNotEmpty(mDestFilename, mUri.getLastPathSegment());
mTempFilename = Precondition.checkNotEmpty(mTempFilename, UUID.randomUUID().toString());
return new Request(
Precondition.checkNotNull(mUri),
Precondition.checkNotEmpty(mTag, ""),
mTitle,
mDescription,
Precondition.checkNotNull(mDestUri, Uri.withAppendedPath(Precondition.checkNotNull(mDestPath), mDestFilename)),
Precondition.checkNotNull(mTempUri, Uri.withAppendedPath(Precondition.checkNotNull(mTempPath), mTempFilename)),
mRetry
);
}
public Builder setDescription(String description) {
mDescription = description;
return this;
}
public Builder setDestFilename(String destFilename) {
mDestFilename = destFilename;
return this;
}
public Builder setDestPath(Uri destPath) {
mDestPath = destPath;
return this;
}
public Builder setDestUri(Uri destUri) {
mDestUri = destUri;
return this;
}
public Builder setRetry(int count) {
mRetry = count;
return this;
}
public Builder setTag(String tag) {
mTag = tag;
return this;
}
public Builder setTempFilename(String tempFilename) {
mTempFilename = tempFilename;
return this;
}
public Builder setTempPath(Uri tempPath) {
mTempPath = tempPath;
return this;
}
public Builder setTempUri(Uri tempUri) {
mTempUri = tempUri;
return this;
}
public Builder setTitle(String title) {
mTitle = title;
return this;
}
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android.toolchain.ndk.impl;
import static org.junit.Assert.assertFalse;
import com.facebook.buck.android.AndroidBuckConfig;
import com.facebook.buck.android.relinker.Symbols;
import com.facebook.buck.android.toolchain.ndk.AndroidNdk;
import com.facebook.buck.android.toolchain.ndk.NdkCxxPlatform;
import com.facebook.buck.android.toolchain.ndk.NdkCxxPlatformCompiler;
import com.facebook.buck.config.FakeBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatformUtils;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.keys.config.TestRuleKeyConfigurationFactory;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.testutil.integration.ZipInspector;
import com.facebook.buck.toolchain.ToolchainCreationContext;
import com.facebook.buck.toolchain.impl.ToolchainProviderBuilder;
import com.facebook.buck.util.DefaultProcessExecutor;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.environment.Platform;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Optional;
import java.util.zip.ZipFile;
import org.tukaani.xz.XZInputStream;
public class AndroidNdkHelper {
private static final Logger LOG = Logger.get(AndroidNdkHelper.class);
private AndroidNdkHelper() {}
public static final AndroidBuckConfig DEFAULT_CONFIG =
new AndroidBuckConfig(FakeBuckConfig.builder().build(), Platform.detect());
public static Optional<AndroidNdk> detectAndroidNdk(ProjectFilesystem filesystem) {
Optional<AndroidNdk> androidNdk;
try {
androidNdk =
new AndroidNdkFactory()
.createToolchain(
new ToolchainProviderBuilder().build(),
ToolchainCreationContext.of(
ImmutableMap.copyOf(System.getenv()),
FakeBuckConfig.builder().build(),
filesystem,
new DefaultProcessExecutor(new TestConsole()),
new ExecutableFinder(),
TestRuleKeyConfigurationFactory.create()));
} catch (HumanReadableException e) {
LOG.warn(e, "Cannot detect Android NDK");
androidNdk = Optional.empty();
}
return androidNdk;
}
public static NdkCxxPlatform getNdkCxxPlatform(ProjectFilesystem filesystem)
throws IOException, InterruptedException {
// TODO(cjhopman): is this really the simplest way to get the objdump tool?
Optional<AndroidNdk> androidNdk = detectAndroidNdk(filesystem);
Path ndkPath = androidNdk.get().getNdkRootPath();
String ndkVersion = AndroidNdkResolver.findNdkVersionFromDirectory(ndkPath).get();
String gccVersion = NdkCxxPlatforms.getDefaultGccVersionForNdk(ndkVersion);
ImmutableCollection<NdkCxxPlatform> platforms =
NdkCxxPlatforms.getPlatforms(
CxxPlatformUtils.DEFAULT_CONFIG,
AndroidNdkHelper.DEFAULT_CONFIG,
filesystem,
ndkPath,
NdkCxxPlatformCompiler.builder()
.setType(NdkCxxPlatforms.DEFAULT_COMPILER_TYPE)
.setVersion(gccVersion)
.setGccVersion(gccVersion)
.build(),
NdkCxxPlatforms.DEFAULT_CXX_RUNTIME,
NdkCxxPlatforms.DEFAULT_TARGET_APP_PLATFORM,
NdkCxxPlatforms.DEFAULT_CPU_ABIS,
Platform.detect())
.values();
assertFalse(platforms.isEmpty());
return platforms.iterator().next();
}
private static Path unzip(Path tmpDir, Path zipPath, String name) throws IOException {
File nameFile = new File(name);
Path outPath = tmpDir.resolve(zipPath.getFileName() + "_" + nameFile.getName());
try (ZipFile zipFile = new ZipFile(zipPath.toFile())) {
Files.copy(
zipFile.getInputStream(zipFile.getEntry(name)),
outPath,
StandardCopyOption.REPLACE_EXISTING);
return outPath;
}
}
public static class SymbolGetter {
private final ProcessExecutor executor;
private final Path tmpDir;
private final Tool objdump;
private final SourcePathResolver resolver;
public SymbolGetter(
ProcessExecutor executor, Path tmpDir, Tool objdump, SourcePathResolver resolver) {
this.executor = executor;
this.tmpDir = tmpDir;
this.objdump = objdump;
this.resolver = resolver;
}
private Path unpack(Path apkPath, String libName) throws IOException {
new ZipInspector(apkPath).assertFileExists(libName);
return unzip(tmpDir, apkPath, libName);
}
private void advanceStream(InputStream stream, int bytes) throws IOException {
byte[] buf = new byte[bytes];
int read = stream.read(buf, 0, bytes);
if (read != bytes) {
throw new IOException("unable to read " + bytes + " bytes");
}
}
public Symbols getDynamicSymbols(Path apkPath, String libName)
throws IOException, InterruptedException {
Path lib = unpack(apkPath, libName);
return Symbols.getDynamicSymbols(executor, objdump, resolver, lib);
}
public Symbols getNormalSymbols(Path apkPath, String libName)
throws IOException, InterruptedException {
Path lib = unpack(apkPath, libName);
return Symbols.getNormalSymbols(executor, objdump, resolver, lib);
}
public Symbols getDynamicSymbolsFromFile(Path sharedObject)
throws IOException, InterruptedException {
return Symbols.getDynamicSymbols(executor, objdump, resolver, sharedObject);
}
public Symbols getNormalSymbolsFromFile(Path sharedObject)
throws IOException, InterruptedException {
return Symbols.getNormalSymbols(executor, objdump, resolver, sharedObject);
}
public Symbols getXzsSymbols(Path apkPath, String libName, String xzsName, String metadataName)
throws IOException, InterruptedException {
Path xzs = unpack(apkPath, xzsName);
Path metadata = unpack(apkPath, metadataName);
Path lib = tmpDir.resolve(libName);
try (BufferedReader metadataReader = new BufferedReader(new FileReader(metadata.toFile()))) {
try (XZInputStream xzInput =
new XZInputStream(new FileInputStream(xzs.toFile()), -1, false)) {
String line = metadataReader.readLine();
while (line != null) {
String[] tokens = line.split(" ");
File metadataFile = new File(tokens[0]);
if (metadataFile.getName().equals(libName)) {
break;
}
advanceStream(xzInput, Integer.parseInt(tokens[1]));
line = metadataReader.readLine();
}
Files.copy(xzInput, lib, StandardCopyOption.REPLACE_EXISTING);
}
}
return Symbols.getDynamicSymbols(executor, objdump, resolver, lib);
}
public SymbolsAndDtNeeded getSymbolsAndDtNeeded(Path apkPath, String libName)
throws IOException, InterruptedException {
Path lib = unpack(apkPath, libName);
Symbols symbols = Symbols.getDynamicSymbols(executor, objdump, resolver, lib);
ImmutableSet<String> dtNeeded = Symbols.getDtNeeded(executor, objdump, resolver, lib);
return new SymbolsAndDtNeeded(symbols, dtNeeded);
}
}
public static class SymbolsAndDtNeeded {
public final Symbols symbols;
public final ImmutableSet<String> dtNeeded;
private SymbolsAndDtNeeded(Symbols symbols, ImmutableSet<String> dtNeeded) {
this.symbols = symbols;
this.dtNeeded = dtNeeded;
}
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.query;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.metamx.common.UOE;
import com.metamx.common.guava.Sequence;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.aggregation.post.ArithmeticPostAggregator;
import io.druid.query.aggregation.post.ConstantPostAggregator;
import io.druid.query.aggregation.post.FieldAccessPostAggregator;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import io.druid.segment.IncrementalIndexSegment;
import io.druid.segment.QueryableIndex;
import io.druid.segment.QueryableIndexSegment;
import io.druid.segment.Segment;
import io.druid.segment.TestIndex;
import io.druid.segment.incremental.IncrementalIndex;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
*/
public class QueryRunnerTestHelper
{
public static final QueryWatcher NOOP_QUERYWATCHER = new QueryWatcher()
{
@Override
public void registerQuery(Query query, ListenableFuture future)
{
}
};
public static final String segmentId = "testSegment";
public static final String dataSource = "testing";
public static final UnionDataSource unionDataSource = new UnionDataSource(
Lists.transform(
Lists.newArrayList(dataSource, dataSource, dataSource, dataSource), new Function<String, TableDataSource>()
{
@Nullable
@Override
public TableDataSource apply(@Nullable String input)
{
return new TableDataSource(input);
}
}
)
);
public static final QueryGranularity dayGran = QueryGranularity.DAY;
public static final QueryGranularity allGran = QueryGranularity.ALL;
public static final String marketDimension = "market";
public static final String qualityDimension = "quality";
public static final String placementDimension = "placement";
public static final String placementishDimension = "placementish";
public static final String indexMetric = "index";
public static final String uniqueMetric = "uniques";
public static final String addRowsIndexConstantMetric = "addRowsIndexConstant";
public static String dependentPostAggMetric = "dependentPostAgg";
public static final CountAggregatorFactory rowsCount = new CountAggregatorFactory("rows");
public static final LongSumAggregatorFactory indexLongSum = new LongSumAggregatorFactory("index", "index");
public static final LongSumAggregatorFactory __timeLongSum = new LongSumAggregatorFactory("sumtime", "__time");
public static final DoubleSumAggregatorFactory indexDoubleSum = new DoubleSumAggregatorFactory("index", "index");
public static final String JS_COMBINE_A_PLUS_B = "function combine(a, b) { return a + b; }";
public static final String JS_RESET_0 = "function reset() { return 0; }";
public static final JavaScriptAggregatorFactory jsIndexSumIfPlacementishA = new JavaScriptAggregatorFactory(
"nindex",
Arrays.asList("placementish", "index"),
"function aggregate(current, a, b) { if ((Array.isArray(a) && a.indexOf('a') > -1) || a === 'a') { return current + b; } else { return current; } }",
JS_RESET_0,
JS_COMBINE_A_PLUS_B
);
public static final JavaScriptAggregatorFactory jsCountIfTimeGreaterThan = new JavaScriptAggregatorFactory(
"ntimestamps",
Arrays.asList("__time"),
"function aggregate(current, t) { if (t > " +
new DateTime("2011-04-01T12:00:00Z").getMillis() +
") { return current + 1; } else { return current; } }",
JS_RESET_0,
JS_COMBINE_A_PLUS_B
);
public static final JavaScriptAggregatorFactory jsPlacementishCount = new JavaScriptAggregatorFactory(
"pishcount",
Arrays.asList("placementish", "index"),
"function aggregate(current, a) { if (Array.isArray(a)) { return current + a.length; } else if (typeof a === 'string') { return current + 1; } else { return current; } }",
JS_RESET_0,
JS_COMBINE_A_PLUS_B
);
public static final HyperUniquesAggregatorFactory qualityUniques = new HyperUniquesAggregatorFactory(
"uniques",
"quality_uniques"
);
public static final CardinalityAggregatorFactory qualityCardinality = new CardinalityAggregatorFactory(
"cardinality",
Arrays.asList("quality"),
false
);
public static final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
public static final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
public static final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
public static final ArithmeticPostAggregator addRowsIndexConstant =
new ArithmeticPostAggregator(
addRowsIndexConstantMetric, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)
);
// dependent on AddRowsIndexContact postAgg
public static final ArithmeticPostAggregator dependentPostAgg = new ArithmeticPostAggregator(
dependentPostAggMetric,
"+",
Lists.newArrayList(
constant,
new FieldAccessPostAggregator(addRowsIndexConstantMetric, addRowsIndexConstantMetric),
new FieldAccessPostAggregator("rows", "rows")
)
);
public static final String hyperUniqueFinalizingPostAggMetric = "hyperUniqueFinalizingPostAggMetric";
public static ArithmeticPostAggregator hyperUniqueFinalizingPostAgg = new ArithmeticPostAggregator(
hyperUniqueFinalizingPostAggMetric,
"+",
Lists.newArrayList(new HyperUniqueFinalizingPostAggregator(uniqueMetric, uniqueMetric), new ConstantPostAggregator(null, 1))
);
public static final List<AggregatorFactory> commonAggregators = Arrays.asList(
rowsCount,
indexDoubleSum,
qualityUniques
);
public static final double UNIQUES_9 = 9.019833517963864;
public static final double UNIQUES_2 = 2.000977198748901d;
public static final double UNIQUES_1 = 1.0002442201269182d;
public static final String[] expectedFullOnIndexValues = new String[]{
"4500.0", "6077.949111938477", "4922.488838195801", "5726.140853881836", "4698.468170166016",
"4651.030891418457", "4398.145851135254", "4596.068244934082", "4434.630561828613", "0.0",
"6162.801361083984", "5590.292701721191", "4994.298484802246", "5179.679672241211", "6288.556800842285",
"6025.663551330566", "5772.855537414551", "5346.517524719238", "5497.331253051758", "5909.684387207031",
"5862.711364746094", "5958.373008728027", "5224.882194519043", "5456.789611816406", "5456.095397949219",
"4642.481948852539", "5023.572692871094", "5155.821723937988", "5350.3723220825195", "5236.997489929199",
"4910.097717285156", "4507.608840942383", "4659.80500793457", "5354.878845214844", "4945.796455383301",
"6459.080368041992", "4390.493583679199", "6545.758262634277", "6922.801231384277", "6023.452911376953",
"6812.107475280762", "6368.713348388672", "6381.748748779297", "5631.245086669922", "4976.192253112793",
"6541.463027954102", "5983.8513107299805", "5967.189498901367", "5567.139289855957", "4863.5944747924805",
"4681.164360046387", "6122.321441650391", "5410.308860778809", "4846.676376342773", "5333.872688293457",
"5013.053741455078", "4836.85563659668", "5264.486434936523", "4581.821243286133", "4680.233596801758",
"4771.363662719727", "5038.354717254639", "4816.808464050293", "4684.095504760742", "5023.663467407227",
"5889.72257232666", "4984.973915100098", "5664.220512390137", "5572.653915405273", "5537.123138427734",
"5980.422874450684", "6243.834693908691", "5372.147285461426", "5690.728981018066", "5827.796455383301",
"6141.0769119262695", "6082.3237228393555", "5678.771339416504", "6814.467971801758", "6626.151596069336",
"5833.2095947265625", "4679.222328186035", "5367.9403076171875", "5410.445640563965", "5689.197135925293",
"5240.5018310546875", "4790.912239074707", "4992.670921325684", "4796.888023376465", "5479.439590454102",
"5506.567192077637", "4743.144546508789", "4913.282669067383", "4723.869743347168"
};
public static final DateTime skippedDay = new DateTime("2011-01-21T00:00:00.000Z");
public static final QuerySegmentSpec firstToThird = new MultipleIntervalSegmentSpec(
Arrays.asList(new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"))
);
public static final QuerySegmentSpec secondOnly = new MultipleIntervalSegmentSpec(
Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/P1D"))
);
public static final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec(
Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"))
);
public static final QuerySegmentSpec emptyInterval = new MultipleIntervalSegmentSpec(
Arrays.asList(new Interval("2020-04-02T00:00:00.000Z/P1D"))
);
public static Iterable<Object[]> transformToConstructionFeeder(Iterable<?> in)
{
return Iterables.transform(
in, new Function<Object, Object[]>()
{
@Nullable
@Override
public Object[] apply(@Nullable Object input)
{
return new Object[]{input};
}
}
);
}
public static <T, QueryType extends Query<T>> List<QueryRunner<T>> makeQueryRunners(
QueryRunnerFactory<T, QueryType> factory
)
throws IOException
{
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false);
final QueryableIndex mMappedTestIndex = TestIndex.getMMappedTestIndex();
final QueryableIndex mergedRealtimeIndex = TestIndex.mergedRealtimeIndex();
final IncrementalIndex rtIndexOffheap = TestIndex.getIncrementalTestIndex(true);
return ImmutableList.of(
makeQueryRunner(factory, new IncrementalIndexSegment(rtIndex, segmentId)),
makeQueryRunner(factory, new QueryableIndexSegment(segmentId, mMappedTestIndex)),
makeQueryRunner(factory, new QueryableIndexSegment(segmentId, mergedRealtimeIndex)),
makeQueryRunner(factory, new IncrementalIndexSegment(rtIndexOffheap, segmentId))
);
}
@SuppressWarnings("unchecked")
public static Collection<?> makeUnionQueryRunners(
QueryRunnerFactory factory,
DataSource unionDataSource
)
throws IOException
{
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false);
final QueryableIndex mMappedTestIndex = TestIndex.getMMappedTestIndex();
final QueryableIndex mergedRealtimeIndex = TestIndex.mergedRealtimeIndex();
final IncrementalIndex rtIndexOffheap = TestIndex.getIncrementalTestIndex(true);
return Arrays.asList(
new Object[][]{
{
makeUnionQueryRunner(factory, new IncrementalIndexSegment(rtIndex, segmentId), unionDataSource)
},
{
makeUnionQueryRunner(factory, new QueryableIndexSegment(segmentId, mMappedTestIndex), unionDataSource)
},
{
makeUnionQueryRunner(
factory,
new QueryableIndexSegment(segmentId, mergedRealtimeIndex),
unionDataSource
)
},
{
makeUnionQueryRunner(factory, new IncrementalIndexSegment(rtIndexOffheap, segmentId), unionDataSource)
}
}
);
}
/**
* Iterate through the iterables in a synchronous manner and return each step as an Object[]
* @param in The iterables to step through. (effectively columns)
* @return An iterable of Object[] containing the "rows" of the input (effectively rows)
*/
public static Iterable<Object[]> transformToConstructionFeeder(Iterable<?>... in)
{
if (in == null) {
return ImmutableList.<Object[]>of();
}
final List<Iterable<?>> iterables = Arrays.asList(in);
final int length = in.length;
final List<Iterator<?>> iterators = new ArrayList<>(in.length);
for (Iterable<?> iterable : iterables) {
iterators.add(iterable.iterator());
}
return new Iterable<Object[]>()
{
@Override
public Iterator<Object[]> iterator()
{
return new Iterator<Object[]>()
{
@Override
public boolean hasNext()
{
int hasMore = 0;
for (Iterator<?> it : iterators) {
if (it.hasNext()) {
++hasMore;
}
}
return hasMore == length;
}
@Override
public Object[] next()
{
final ArrayList<Object> list = new ArrayList<Object>(length);
for (Iterator<?> it : iterators) {
list.add(it.next());
}
return list.toArray();
}
@Override
public void remove()
{
throw new UOE("Remove not supported");
}
};
}
};
}
public static <T, QueryType extends Query<T>> QueryRunner<T> makeQueryRunner(
QueryRunnerFactory<T, QueryType> factory,
Segment adapter
)
{
return new FinalizeResultsQueryRunner<T>(
new BySegmentQueryRunner<T>(
segmentId, adapter.getDataInterval().getStart(),
factory.createRunner(adapter)
),
(QueryToolChest<T, Query<T>>)factory.getToolchest()
);
}
public static <T> QueryRunner<T> makeUnionQueryRunner(
final QueryRunnerFactory<T, Query<T>> factory,
final Segment adapter,
final DataSource unionDataSource
)
{
return new FinalizeResultsQueryRunner<T>(
factory.getToolchest().postMergeQueryDecoration(
factory.getToolchest().mergeResults(
new UnionQueryRunner<T>(
Iterables.transform(
unionDataSource.getNames(), new Function<String, QueryRunner>()
{
@Nullable
@Override
public QueryRunner apply(@Nullable String input)
{
return new BySegmentQueryRunner<T>(
segmentId, adapter.getDataInterval().getStart(),
factory.createRunner(adapter)
);
}
}
),
factory.getToolchest()
)
)
),
factory.getToolchest()
);
}
public static IntervalChunkingQueryRunnerDecorator NoopIntervalChunkingQueryRunnerDecorator()
{
return new IntervalChunkingQueryRunnerDecorator(null, null, null) {
@Override
public <T> QueryRunner<T> decorate(final QueryRunner<T> delegate,
QueryToolChest<T, ? extends Query<T>> toolChest) {
return new QueryRunner<T>() {
@Override
public Sequence<T> run(Query<T> query, Map<String, Object> responseContext)
{
return delegate.run(query, responseContext);
}
};
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration2.builder;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.configuration2.FileBasedConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.XMLPropertiesConfiguration;
import org.apache.commons.configuration2.event.ConfigurationEvent;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.configuration2.io.FileHandler;
import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.lang3.StringUtils;
/**
* <p>
* A specialized {@code ConfigurationBuilder} implementation which can handle
* configurations read from a {@link FileHandler}.
* </p>
* <p>
* This class extends its base class by the support of a
* {@link FileBasedBuilderParametersImpl} object, and especially of the
* {@link FileHandler} contained in this object. When the builder creates a new
* object the resulting {@code Configuration} instance is associated with the
* {@code FileHandler}. If the {@code FileHandler} has a location set, the
* {@code Configuration} is directly loaded from this location.
* </p>
* <p>
* The {@code FileHandler} is kept by this builder and can be queried later on.
* It can be used for instance to save the current {@code Configuration} after
* it was modified. Some care has to be taken when changing the location of the
* {@code FileHandler}: The new location is recorded and also survives an
* invocation of the {@code resetResult()} method. However, when the builder's
* initialization parameters are reset by calling {@code resetParameters()} the
* location is reset, too.
* </p>
*
* @version $Id$
* @since 2.0
* @param <T> the concrete type of {@code Configuration} objects created by this
* builder
*/
public class FileBasedConfigurationBuilder<T extends FileBasedConfiguration>
extends BasicConfigurationBuilder<T>
{
/** A map for storing default encodings for specific configuration classes. */
private static final Map<Class<?>, String> DEFAULT_ENCODINGS =
initializeDefaultEncodings();
/** Stores the FileHandler associated with the current configuration. */
private FileHandler currentFileHandler;
/** A specialized listener for the auto save mechanism. */
private AutoSaveListener autoSaveListener;
/** A flag whether the builder's parameters were reset. */
private boolean resetParameters;
/**
* Creates a new instance of {@code FileBasedConfigurationBuilder} which
* produces result objects of the specified class.
*
* @param resCls the result class (must not be <b>null</b>
* @throws IllegalArgumentException if the result class is <b>null</b>
*/
public FileBasedConfigurationBuilder(Class<? extends T> resCls)
{
super(resCls);
}
/**
* Creates a new instance of {@code FileBasedConfigurationBuilder} which
* produces result objects of the specified class and sets initialization
* parameters.
*
* @param resCls the result class (must not be <b>null</b>
* @param params a map with initialization parameters
* @throws IllegalArgumentException if the result class is <b>null</b>
*/
public FileBasedConfigurationBuilder(Class<? extends T> resCls,
Map<String, Object> params)
{
super(resCls, params);
}
/**
* Creates a new instance of {@code FileBasedConfigurationBuilder} which
* produces result objects of the specified class and sets initialization
* parameters and the <em>allowFailOnInit</em> flag.
*
* @param resCls the result class (must not be <b>null</b>
* @param params a map with initialization parameters
* @param allowFailOnInit the <em>allowFailOnInit</em> flag
* @throws IllegalArgumentException if the result class is <b>null</b>
*/
public FileBasedConfigurationBuilder(Class<? extends T> resCls,
Map<String, Object> params, boolean allowFailOnInit)
{
super(resCls, params, allowFailOnInit);
}
/**
* Returns the default encoding for the specified configuration class. If an
* encoding has been set for the specified class (or one of its super
* classes), it is returned. Otherwise, result is <b>null</b>.
*
* @param configClass the configuration class in question
* @return the default encoding for this class (may be <b>null</b>)
*/
public static String getDefaultEncoding(Class<?> configClass)
{
String enc = DEFAULT_ENCODINGS.get(configClass);
if (enc != null || configClass == null)
{
return enc;
}
List<Class<?>> superclasses =
ClassUtils.getAllSuperclasses(configClass);
for (Class<?> cls : superclasses)
{
enc = DEFAULT_ENCODINGS.get(cls);
if (enc != null)
{
return enc;
}
}
List<Class<?>> interfaces = ClassUtils.getAllInterfaces(configClass);
for (Class<?> cls : interfaces)
{
enc = DEFAULT_ENCODINGS.get(cls);
if (enc != null)
{
return enc;
}
}
return null;
}
/**
* Sets a default encoding for a specific configuration class. This encoding
* is used if an instance of this configuration class is to be created and
* no encoding has been set in the parameters object for this builder. The
* encoding passed here not only applies to the specified class but also to
* its sub classes. If the encoding is <b>null</b>, it is removed.
*
* @param configClass the name of the configuration class (must not be
* <b>null</b>)
* @param encoding the default encoding for this class
* @throws IllegalArgumentException if the class is <b>null</b>
*/
public static void setDefaultEncoding(Class<?> configClass, String encoding)
{
if (configClass == null)
{
throw new IllegalArgumentException(
"Configuration class must not be null!");
}
if (encoding == null)
{
DEFAULT_ENCODINGS.remove(configClass);
}
else
{
DEFAULT_ENCODINGS.put(configClass, encoding);
}
}
/**
* {@inheritDoc} This method is overridden here to change the result type.
*/
@Override
public FileBasedConfigurationBuilder<T> configure(
BuilderParameters... params)
{
super.configure(params);
return this;
}
/**
* Returns the {@code FileHandler} associated with this builder. If already
* a result object has been created, this {@code FileHandler} can be used to
* save it. Otherwise, the {@code FileHandler} from the initialization
* parameters is returned (which is not associated with a {@code FileBased}
* object). Result is never <b>null</b>.
*
* @return the {@code FileHandler} associated with this builder
*/
public synchronized FileHandler getFileHandler()
{
return (currentFileHandler != null) ? currentFileHandler
: fetchFileHandlerFromParameters();
}
/**
* {@inheritDoc} This implementation just records the fact that new
* parameters have been set. This means that the next time a result object
* is created, the {@code FileHandler} has to be initialized from
* initialization parameters rather than reusing the existing one.
*/
@Override
public synchronized BasicConfigurationBuilder<T> setParameters(
Map<String, Object> params)
{
super.setParameters(params);
resetParameters = true;
return this;
}
/**
* Convenience method which saves the associated configuration. This method
* expects that the managed configuration has already been created and that
* a valid file location is available in the current {@code FileHandler}.
* The file handler is then used to store the configuration.
*
* @throws ConfigurationException if an error occurs
*/
public void save() throws ConfigurationException
{
getFileHandler().save();
}
/**
* Returns a flag whether auto save mode is currently active.
*
* @return <b>true</b> if auto save is enabled, <b>false</b> otherwise
*/
public synchronized boolean isAutoSave()
{
return autoSaveListener != null;
}
/**
* Enables or disables auto save mode. If auto save mode is enabled, every
* update of the managed configuration causes it to be saved automatically;
* so changes are directly written to disk.
*
* @param enabled <b>true</b> if auto save mode is to be enabled,
* <b>false</b> otherwise
*/
public synchronized void setAutoSave(boolean enabled)
{
if (enabled)
{
installAutoSaveListener();
}
else
{
removeAutoSaveListener();
}
}
/**
* {@inheritDoc} This implementation deals with the creation and
* initialization of a {@code FileHandler} associated with the new result
* object.
*/
@Override
protected void initResultInstance(T obj) throws ConfigurationException
{
super.initResultInstance(obj);
FileHandler srcHandler =
(currentFileHandler != null && !resetParameters) ? currentFileHandler
: fetchFileHandlerFromParameters();
currentFileHandler = new FileHandler(obj, srcHandler);
if (autoSaveListener != null)
{
autoSaveListener.updateFileHandler(currentFileHandler);
}
initFileHandler(currentFileHandler);
resetParameters = false;
}
/**
* Initializes the new current {@code FileHandler}. When a new result object
* is created, a new {@code FileHandler} is created, too, and associated
* with the result object. This new handler is passed to this method. If a
* location is defined, the result object is loaded from this location.
* Note: This method is called from a synchronized block.
*
* @param handler the new current {@code FileHandler}
* @throws ConfigurationException if an error occurs
*/
protected void initFileHandler(FileHandler handler)
throws ConfigurationException
{
initEncoding(handler);
if (handler.isLocationDefined())
{
handler.locate();
handler.load();
}
}
/**
* Obtains the {@code FileHandler} from this builder's parameters. If no
* {@code FileBasedBuilderParametersImpl} object is found in this builder's
* parameters, a new one is created now and stored. This makes it possible
* to change the location of the associated file even if no parameters
* object was provided.
*
* @return the {@code FileHandler} from initialization parameters
*/
private FileHandler fetchFileHandlerFromParameters()
{
FileBasedBuilderParametersImpl fileParams =
FileBasedBuilderParametersImpl.fromParameters(getParameters(),
false);
if (fileParams == null)
{
fileParams = new FileBasedBuilderParametersImpl();
addParameters(fileParams.getParameters());
}
return fileParams.getFileHandler();
}
/**
* Installs the listener for the auto save mechanism if it is not yet
* active.
*/
private void installAutoSaveListener()
{
if (autoSaveListener == null)
{
autoSaveListener = new AutoSaveListener(this);
addEventListener(ConfigurationEvent.ANY, autoSaveListener);
autoSaveListener.updateFileHandler(getFileHandler());
}
}
/**
* Removes the listener for the auto save mechanism if it is currently
* active.
*/
private void removeAutoSaveListener()
{
if (autoSaveListener != null)
{
removeEventListener(ConfigurationEvent.ANY, autoSaveListener);
autoSaveListener.updateFileHandler(null);
autoSaveListener = null;
}
}
/**
* Initializes the encoding of the specified file handler. If already an
* encoding is set, it is used. Otherwise, the default encoding for the
* result configuration class is obtained and set.
*
* @param handler the handler to be initialized
*/
private void initEncoding(FileHandler handler)
{
if (StringUtils.isEmpty(handler.getEncoding()))
{
String encoding = getDefaultEncoding(getResultClass());
if (encoding != null)
{
handler.setEncoding(encoding);
}
}
}
/**
* Creates a map with default encodings for configuration classes and
* populates it with default entries.
*
* @return the map with default encodings
*/
private static Map<Class<?>, String> initializeDefaultEncodings()
{
Map<Class<?>, String> enc = new ConcurrentHashMap<Class<?>, String>();
enc.put(PropertiesConfiguration.class,
PropertiesConfiguration.DEFAULT_ENCODING);
enc.put(XMLPropertiesConfiguration.class,
XMLPropertiesConfiguration.DEFAULT_ENCODING);
return enc;
}
}
| |
/*
* JIPS - JIPS Image Processing Software
* Copyright (C) 2006 - 2017 Markus Karnik (markus.karnik@gmail.com)
*
* This file is licensed to you under the MIT license.
* See the LICENSE file in the project root for more information.
*
*/
package de.karnik.jips.gui.desk.connector;
import de.karnik.jips.CommonFunctions;
import de.karnik.jips.JIPSExceptionHandler;
import de.karnik.jips.ProjectUIListener;
import de.karnik.jips.common.JIPSException;
import de.karnik.jips.common.config.JIPSVariables;
import javax.swing.*;
import javax.swing.event.MouseInputListener;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.Vector;
public class BaseConnectionUI extends JComponent implements MouseInputListener {
private static final long serialVersionUID = 9220864492693429348L;
private Color outerConnectionColor = null;
private Color innerConnectionColor = null;
private Color selectedInnerConnectionColor = null;
private JIPSVariables vars;
private BasicStroke bs1 = null;
private BasicStroke bs2 = null;
private boolean selected = false;
private Vector<ProjectUIListener> projectUIListeners = new Vector<ProjectUIListener>();
/**
* Connector with the output.
*/
private BaseConnectorUI output = null;
/**
* Connector with the input.
*/
private BaseConnectorUI input = null;
private Point start = new Point( 0,0 );
private Point end = new Point( 0,0 );
private int length = 0;
/**
* This class is uninstantiable with the default constructor.
*/
private BaseConnectionUI() {}
public BaseConnectionUI( BaseConnectorUI output, BaseConnectorUI input ) throws JIPSException {
this();
vars = JIPSVariables.getInstance();
this.output = output;
this.input = input;
outerConnectionColor = vars.getColor( "connection_color_outer" );
innerConnectionColor = vars.getColor( "connection_color_inner" );
selectedInnerConnectionColor = vars.getColor( "select_color" );
bs1 = new BasicStroke( 4 );
bs2 = new BasicStroke( 2 );
addMouseListener( this );
addMouseMotionListener( this );
this.setOpaque( false );
calcConnectionBounds();
}
public void removeProjectUIListeners(ProjectUIListener listener ) {
projectUIListeners.remove( listener );
}
public void addProjectUIListeners(ProjectUIListener listener ) {
projectUIListeners.add( listener );
}
public boolean isSelected() {
return selected;
}
public void setSelected(boolean selected ) {
this.selected = selected;
repaint();
}
private void calcConnectionBounds() {
int connectorWidth = input.getWidth();
int connectorHeight = input.getHeight();
int width = input.getAbsX() - output.getAbsX();
int height = input.getAbsY() - output.getAbsY();
if( width < 0 ) width *= -1;
if( height < 0 ) height *= -1;
width += connectorWidth;
height += connectorHeight;
int x = 0;
int y = 0;
if( CommonFunctions.compare( input.getAbsX(), output.getAbsX() ) < 1 ) {
x = input.getAbsX();
start.x = connectorWidth / 2;
end.x = getWidth() - ( connectorWidth / 2 );
} else {
x = output.getAbsX();
start.x = getWidth() - ( connectorWidth / 2 );
end.x = connectorWidth / 2;
}
if( CommonFunctions.compare( input.getAbsY(), output.getAbsY() ) < 1 ) {
y = input.getAbsY();
start.y = connectorHeight / 2;
end.y = getHeight() - ( connectorHeight / 2 );
} else {
y = output.getAbsY();
start.y = getHeight() - ( connectorHeight / 2 );
end.y = connectorHeight / 2;
}
setBounds( x - ( connectorWidth / 2 ), y - ( connectorHeight / 2 ), width, height);
}
public void paintComponent( Graphics g ) {
calcConnectionBounds();
Graphics2D g2 = ( Graphics2D )g;
Stroke bs = g2.getStroke();
//if( length == 0 )
length = ( end.x - start.x ) / 2;
g2.setStroke( bs1 );
g2.setColor( outerConnectionColor );
drawLine( g2 );
g2.setStroke( bs2 );
if( isSelected() )
g2.setColor( selectedInnerConnectionColor );
else
g2.setColor( innerConnectionColor );
drawLine( g2 );
g2.setStroke( bs );
}
private void drawLine( Graphics2D g2 ) {
g2.drawLine( start.x, start.y, start.x + length, start.y );
g2.drawLine( start.x + length, start.y, start.x + length, end.y );
g2.drawLine( start.x + length, end.y, end.x, end.y );
}
private boolean isBetween( int first, int second, int search ) {
if( search < first && search > second )
return true;
if( search > first && search < second )
return true;
return false;
}
private boolean isBetween( Point p1, Point p2, Point search, boolean horizontal ) {
int connectorWidth = input.getWidth();
int connectorHeight = input.getHeight();
if( horizontal ) {
if( isBetween( p1.x, p2.x, search.x )
&& isBetween( p1.y + ( connectorHeight / 2 ), p2.y - ( connectorHeight / 2 ), search.y ) )
return true;
} else {
if( isBetween( p1.y, p2.y, search.y )
&& isBetween( p1.x + ( connectorWidth / 2 ), p2.x - ( connectorWidth / 2 ), search.x ) )
return true;
}
return false;
}
private boolean intersects( int x, int y ) {
Point search = new Point( x, y );
if( isBetween( start, new Point( start.x + length, start.y ) , search, true ) )
return true;
if( isBetween( new Point( start.x + length, start.y ), new Point( start.x + length, end.y ), search, false ) )
return true;
if( isBetween( new Point( start.x + length, end.y ), end, search, true ) )
return true;
return false;
}
public Rectangle getFirstRect() {
return getRect( 0 );
}
public Rectangle getSecondRect() {
return getRect( 1 );
}
public Rectangle getThirdRect() {
return getRect( 2 );
}
private Rectangle getRect( int what ) {
Rectangle rect = new Rectangle();
int connectorWidth = input.getWidth();
int connectorHeight = input.getHeight();
/*
if( isBetween( start, new Point( start.x + length, start.y ) , search, true ) )
return true;
if( isBetween( new Point( start.x + length, start.y ), new Point( start.x + length, end.y ), search, false ) )
return true;
if( isBetween( new Point( start.x + length, end.y ), end, search, true ) )
return true;*/
switch( what ) {
case 0:
rect.setBounds( start.x, start.y - ( connectorHeight / 2 ), length, connectorHeight );
case 1:
rect.setBounds( start.x, start.y - ( connectorHeight / 2 ), length, connectorHeight );
case 2:
rect.setBounds( start.x + length, end.y + ( connectorHeight / 2 ), length, connectorHeight );
}
return rect;
}
@Override
public void mouseClicked( MouseEvent e ) {
try {
if( e.getButton() == MouseEvent.BUTTON1 ) {
int x = e.getX();
int y = e.getY();
if( !e.isControlDown() )
for(int i = 0; i < projectUIListeners.size(); i++ )
projectUIListeners.get( i ).deselectAll();
if( intersects( x, y ) )
setSelected( !isSelected() );
}
} catch ( JIPSException je ) {
JIPSExceptionHandler.handleException( je );
}
}
@Override
public void mouseEntered(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseExited(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mousePressed(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseReleased(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseDragged(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseMoved(MouseEvent e) {
// TODO Auto-generated method stub
}
}
| |
/*
* Copyright 2013-2014 Richard M. Hightower
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* __________ _____ __ .__
* \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____
* | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\
* | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ >
* |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ /
* \/ \/ \/ \/ \/ \//_____/
* ____. ___________ _____ ______________.___.
* | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | |
* | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | |
* /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ |
* \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______|
* \/ \/ \/ \/ \/ \/
*/
package org.boon.core;
import com.sun.management.UnixOperatingSystemMXBean;
import org.boon.Exceptions;
import org.boon.IO;
import org.boon.Lists;
import org.boon.Str;
import org.boon.core.reflection.Annotations;
import org.boon.core.reflection.Reflection;
import org.boon.core.timer.TimeKeeper;
import org.boon.core.timer.TimeKeeperBasic;
import org.boon.json.JsonParserFactory;
import org.boon.logging.Logging;
import java.io.File;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.file.Path;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
public class Sys {
public static ConcurrentHashMap <Object, Object> systemProperties = new ConcurrentHashMap<>(System.getProperties());
public static ConcurrentHashMap <String, String> env = new ConcurrentHashMap<>(System.getenv());
private final static boolean isWindows = System.getProperty ( "os.name" ).contains ( "Windows" );
private final static boolean inContainer;
private final static boolean is1_7OorLater;
private final static int buildNumber;
private final static BigDecimal version;
private final static boolean is1_7;
private final static boolean is1_8;
public final static Object DEFAULT_NULL_NOT_EMPTY = new Object();
static {
BigDecimal v = new BigDecimal ( "-1" );
int b = -1;
String sversion = System.getProperty ( "java.version" );
if ( sversion.indexOf ( "_" ) != -1 ) {
final String[] split = sversion.split ( "_" );
try {
String ver = split [0];
if (ver.startsWith ( "1.8" )) {
v = new BigDecimal ("1.8" );
}
if (ver.startsWith ( "1.7" )) {
v = new BigDecimal ("1.7" );
}
if (ver.startsWith ( "1.6" )) {
v = new BigDecimal ("1.6" );
}
if (ver.startsWith ( "1.5" )) {
v = new BigDecimal ("1.5" );
}
if (ver.startsWith ( "1.9" )) {
v = new BigDecimal ("1.9" );
}
String build = split[ 1 ];
if (build.endsWith("-ea")) {
build = build.substring(0, build.length() - 3);
}
b = Integer.parseInt ( build );
} catch ( Exception ex ) {
ex.printStackTrace ();
System.err.println ( "Unable to determine build number or version" );
}
} else if ("1.8.0".equals(sversion) || "1.8.0-ea".equals(sversion)) {
b = -1;
v = new BigDecimal("1.8");
} else {
try {
v = new BigDecimal(sversion);
b = -1;
}catch (Exception ex) {
if (sversion.startsWith ( "1.7" )) {
v = new BigDecimal ("1.7" );
} else if (sversion.startsWith ( "1.8" )) {
v = new BigDecimal ("1.8" );
} else {
v = new BigDecimal ("-1.0" );
}
}
}
buildNumber = b;
version = v;
is1_7OorLater = version.compareTo ( new BigDecimal ( "1.7" )) >=0;
is1_7 = version.compareTo ( new BigDecimal ( "1.7" ))==0;
is1_8 = version.compareTo ( new BigDecimal ( "1.8" ))==0;
}
public static void println ( String message ) {
System.out.println ( message );
}
public static void print ( String message ) {
System.out.print ( message );
}
public static boolean isWindows () {
return isWindows;
}
public static boolean is1_7OrLater () {
return is1_7OorLater;
}
public static boolean is1_7() {
return is1_7;
}
public static boolean is1_8() {
return is1_8;
}
public static int buildNumber () {
return buildNumber;
}
public static char windowsPathSeparator () {
return '\\';
}
final static AtomicReference<TimeKeeper> timer = new AtomicReference<TimeKeeper> ( new TimeKeeperBasic () );
public static TimeKeeper timer () {
return timer.get ();
}
public static long time () {
return timer.get ().time ();
}
static {
boolean _inContainer;
boolean forceInContainer = Boolean.parseBoolean ( System.getProperty ( "org.boon.forceInContainer", "false" ) );
boolean forceNoContainer = Boolean.parseBoolean ( System.getProperty ( "org.boon.forceNoContainer", "false" ) );
if ( forceNoContainer ) {
_inContainer = false;
} else if ( forceInContainer ) {
_inContainer = true;
} else {
_inContainer = detectContainer ();
}
inContainer = _inContainer;
}
private static boolean detectContainer () {
boolean _inContainer;
try {
Class.forName ( "javax.servlet.http.HttpServlet" );
_inContainer = true;
} catch ( ClassNotFoundException e ) {
_inContainer = false;
}
if ( !_inContainer ) {
try {
Class.forName ( "javax.ejb.EJBContext" );
_inContainer = true;
} catch ( ClassNotFoundException e ) {
_inContainer = false;
}
}
return _inContainer;
}
public static boolean inContainer () {
return inContainer;
}
/* Everything that has a cache you need to hold on to, should use this so they can
* all be stuffed into application context of web-app or ear if you use Java EE. */
public static Object contextToHold () {
return Lists.list ( Reflection.contextToHold (),
Annotations.contextToHold (),
Logging.contextToHold() );
}
public static String sysPropMultipleKeys(String... keys) {
for (String key : keys) {
String value = _sysProp(key, DEFAULT_NULL_NOT_EMPTY);
if (value != null) {
return value;
}
}
return null;
}
public static String sysProp(String key) {
return _sysProp(key, null);
}
public static String sysPropDefaultNull(String key) {
return _sysProp(key, DEFAULT_NULL_NOT_EMPTY);
}
/**
* Checks for the key under system property.
* Then checks it as an environment variable.
* (Looks up in env using straight key and performing underBarCase on it.)
* Then converts defaultValue into a string.
* @param key
* @param defaultValue
* @return
*/
public static String sysProp(String key, Object defaultValue) {
return _sysProp(key, defaultValue);
}
private static String _sysProp(String key, Object defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
if (property == null && defaultValue != DEFAULT_NULL_NOT_EMPTY) {
property = Conversions.toString(defaultValue);
}
}
}
return property;
}
public static boolean sysPropBoolean(String key) {
return sysProp(key, false);
}
public static boolean sysProp(String key, boolean defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toBoolean(property);
}
public static int sysPropInt(String key) {
return sysProp(key, -1);
}
public static int sysProp(String key, int defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toInt(property);
}
public static File sysProp(String key, File defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return new File(property);
}
public static Path sysProp(String key, Path defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return IO.path(property);
}
public static int sysPropLong(String key) {
return sysProp(key, -1);
}
public static long sysProp(String key, long defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toLong(property);
}
public static short sysPropShort(String key) {
return sysProp(key, (short) -1);
}
public static short sysProp(String key, short defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toShort(property);
}
public static byte sysPropByte(String key) {
return sysProp(key, (byte) -1);
}
public static byte sysProp(String key, byte defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toByte(property);
}
public static BigDecimal sysPropBigDecimal(String key) {
return sysPropBigDecima(key, (BigDecimal) null);
}
public static BigDecimal sysPropBigDecima(String key, BigDecimal defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toBigDecimal(property);
}
public static BigInteger sysPropBigInteger(String key) {
return sysPropBigInteger(key, (BigInteger) null);
}
public static BigInteger sysPropBigInteger(String key, BigInteger defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toBigInteger(property);
}
public static <T extends Enum> T sysPropEnum(Class<T> cls, String key) {
return sysProp(cls, key, null);
}
public static <T extends Enum> T sysProp(Class<T> cls, String key, T defaultValue) {
String property = (String) systemProperties.get(key);
if (property == null) {
property = env.get(key);
}
if (property == null) {
String newKey = Str.underBarCase(key);
property = env.get(newKey);
}
if (property == null) {
return defaultValue;
}
return Conversions.toEnum(cls, property);
}
public static String putSysProp(String key, Object value) {
return (String) systemProperties.put(key, Conversions.toString(value));
}
public static boolean hasSysProp(String propertyName) {
return systemProperties.containsKey(propertyName);
}
public static void sleep(long duration) {
try {
Thread.sleep(duration);
} catch (InterruptedException e) {
Thread.interrupted();
}
}
public static int availableProcessors() {
return Runtime.getRuntime().availableProcessors();
}
public static long freeMemory() {
return Runtime.getRuntime().freeMemory();
}
public static long totalMemory() {
return Runtime.getRuntime().totalMemory();
}
public static long maxMemory() {
return Runtime.getRuntime().maxMemory();
}
static boolean _oracleJVMAndUnix = false;
static {
try {
Class.forName("com.sun.management.UnixOperatingSystemMXBean");
_oracleJVMAndUnix = true;
} catch (ClassNotFoundException e) {
_oracleJVMAndUnix = false;
}
}
private final static boolean oracleJVMAndUnix = _oracleJVMAndUnix;
public static List<GarbageCollectorMXBean> gc() {
return ManagementFactory.getGarbageCollectorMXBeans();
}
public static double loadAverage() {
return ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage();
}
public static long maxFileDescriptorCount() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getMaxFileDescriptorCount();
}else {
return -1;
}
}
public static long openFileDescriptorCount() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getOpenFileDescriptorCount();
}else {
return -1;
}
}
public static long committedVirtualMemorySize() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getCommittedVirtualMemorySize();
}else {
return -1;
}
}
public static long totalSwapSpaceSize() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getTotalSwapSpaceSize();
}else {
return -1;
}
}
public static long freeSwapSpaceSize() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getFreeSwapSpaceSize();
}else {
return -1;
}
}
public static long processCpuTime() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getProcessCpuTime();
}else {
return -1;
}
}
public static long freePhysicalMemorySize() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getFreePhysicalMemorySize();
}else {
return -1;
}
}
public static long totalPhysicalMemorySize() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getTotalPhysicalMemorySize();
}else {
return -1;
}
}
public static double systemCpuLoad() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getSystemCpuLoad();
}else {
return -1;
}
}
public static double processCpuLoad() {
if (oracleJVMAndUnix) {
UnixOperatingSystemMXBean unix = (UnixOperatingSystemMXBean) ManagementFactory.getOperatingSystemMXBean();
return unix.getProcessCpuLoad();
}else {
return -1;
}
}
public static long uptime() {
return ManagementFactory.getRuntimeMXBean().getUptime();
}
public static long startTime() {
return ManagementFactory.getRuntimeMXBean().getStartTime();
}
public static int pendingFinalizationCount() {
return ManagementFactory.getMemoryMXBean().getObjectPendingFinalizationCount();
}
public static MemoryUsage heapMemoryUsage() {
return ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
}
public static MemoryUsage nonHeapMemoryUsage() {
return ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage();
}
public static int threadPeakCount() {
return ManagementFactory.getThreadMXBean().getPeakThreadCount();
}
public static int threadCount() {
return ManagementFactory.getThreadMXBean().getThreadCount();
}
public static long threadsStarted() {
return ManagementFactory.getThreadMXBean().getTotalStartedThreadCount();
}
public static long threadCPUTime() {
return ManagementFactory.getThreadMXBean().getCurrentThreadCpuTime();
}
public static long threadUserTime() {
return ManagementFactory.getThreadMXBean().getCurrentThreadUserTime();
}
public static int threadDaemonCount() {
return ManagementFactory.getThreadMXBean().getDaemonThreadCount();
}
public static <T> T loadFromFileLocation(Class<T> clazz, String... fileLocations) {
for (String fileLocation : fileLocations) {
if (fileLocation != null && IO.exists(fileLocation)) {
try {
return new JsonParserFactory().create().parseFile(clazz, fileLocation);
}
catch (Exception ex) {
ex.printStackTrace();
Exceptions.handle(ex, "Unable to read file from ", fileLocation);
return null;
}
}
}
try {
return clazz.newInstance();
}
catch (InstantiationException | IllegalAccessException e) {
Exceptions.handle(e, "Unable to create instance of " + clazz.getName());
return null;
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.cluster.routing.IndexRouting;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.Locale;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
/**
* Generic interface to group ActionRequest, which perform writes to a single document
* Action requests implementing this can be part of {@link org.elasticsearch.action.bulk.BulkRequest}
*/
public interface DocWriteRequest<T> extends IndicesRequest, Accountable {
// Flag set for disallowing index auto creation for an individual write request.
String REQUIRE_ALIAS = "require_alias";
/**
* Set the index for this request
* @return the Request
*/
T index(String index);
/**
* Get the index that this request operates on
* @return the index
*/
String index();
/**
* Get the id of the document for this request
* @return the id
*/
String id();
/**
* Get the options for this request
* @return the indices options
*/
@Override
IndicesOptions indicesOptions();
/**
* Set the routing for this request
* @return the Request
*/
T routing(String routing);
/**
* Get the routing for this request
* @return the Routing
*/
String routing();
/**
* Get the document version for this request
* @return the document version
*/
long version();
/**
* Sets the version, which will perform the operation only if a matching
* version exists and no changes happened on the doc since then.
*/
T version(long version);
/**
* Get the document version type for this request
* @return the document version type
*/
VersionType versionType();
/**
* Sets the versioning type. Defaults to {@link VersionType#INTERNAL}.
*/
T versionType(VersionType versionType);
/**
* only perform this request if the document was last modification was assigned the given
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
*
* If the document last modification was assigned a different sequence number a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
T setIfSeqNo(long seqNo);
/**
* only performs this request if the document was last modification was assigned the given
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
*
* If the document last modification was assigned a different term a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
T setIfPrimaryTerm(long term);
/**
* If set, only perform this request if the document was last modification was assigned this sequence number.
* If the document last modification was assigned a different sequence number a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
long ifSeqNo();
/**
* If set, only perform this request if the document was last modification was assigned this primary term.
*
* If the document last modification was assigned a different term a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
long ifPrimaryTerm();
/**
* Get the requested document operation type of the request
* @return the operation type {@link OpType}
*/
OpType opType();
/**
* Should this request override specifically require the destination to be an alias?
* @return boolean flag, when true specifically requires an alias
*/
boolean isRequireAlias();
/**
* Finalize the request before executing or routing it.
*/
void process();
/**
* Pick the appropriate shard id to receive this request.
*/
int route(IndexRouting indexRouting);
/**
* Requested operation type to perform on the document
*/
enum OpType {
/**
* Index the source. If there an existing document with the id, it will
* be replaced.
*/
INDEX(0),
/**
* Creates the resource. Simply adds it to the index, if there is an existing
* document with the id, then it won't be removed.
*/
CREATE(1),
/** Updates a document */
UPDATE(2),
/** Deletes a document */
DELETE(3);
private final byte op;
private final String lowercase;
OpType(int op) {
this.op = (byte) op;
this.lowercase = this.toString().toLowerCase(Locale.ROOT);
}
public byte getId() {
return op;
}
public String getLowercase() {
return lowercase;
}
public static OpType fromId(byte id) {
switch (id) {
case 0:
return INDEX;
case 1:
return CREATE;
case 2:
return UPDATE;
case 3:
return DELETE;
default:
throw new IllegalArgumentException("Unknown opType: [" + id + "]");
}
}
public static OpType fromString(String sOpType) {
String lowerCase = sOpType.toLowerCase(Locale.ROOT);
for (OpType opType : OpType.values()) {
if (opType.getLowercase().equals(lowerCase)) {
return opType;
}
}
throw new IllegalArgumentException("Unknown opType: [" + sOpType + "]");
}
}
/**
* Read a document write (index/delete/update) request
*
* @param shardId shard id of the request. {@code null} when reading as part of a {@link org.elasticsearch.action.bulk.BulkRequest}
* that does not have a unique shard id.
*/
static DocWriteRequest<?> readDocumentRequest(@Nullable ShardId shardId, StreamInput in) throws IOException {
byte type = in.readByte();
DocWriteRequest<?> docWriteRequest;
if (type == 0) {
docWriteRequest = new IndexRequest(shardId, in);
} else if (type == 1) {
docWriteRequest = new DeleteRequest(shardId, in);
} else if (type == 2) {
docWriteRequest = new UpdateRequest(shardId, in);
} else {
throw new IllegalStateException("invalid request type [" + type + " ]");
}
return docWriteRequest;
}
/** write a document write (index/delete/update) request*/
static void writeDocumentRequest(StreamOutput out, DocWriteRequest<?> request) throws IOException {
if (request instanceof IndexRequest) {
out.writeByte((byte) 0);
((IndexRequest) request).writeTo(out);
} else if (request instanceof DeleteRequest) {
out.writeByte((byte) 1);
((DeleteRequest) request).writeTo(out);
} else if (request instanceof UpdateRequest) {
out.writeByte((byte) 2);
((UpdateRequest) request).writeTo(out);
} else {
throw new IllegalStateException("invalid request [" + request.getClass().getSimpleName() + " ]");
}
}
/** write a document write (index/delete/update) request without shard id*/
static void writeDocumentRequestThin(StreamOutput out, DocWriteRequest<?> request) throws IOException {
if (request instanceof IndexRequest) {
out.writeByte((byte) 0);
((IndexRequest) request).writeThin(out);
} else if (request instanceof DeleteRequest) {
out.writeByte((byte) 1);
((DeleteRequest) request).writeThin(out);
} else if (request instanceof UpdateRequest) {
out.writeByte((byte) 2);
((UpdateRequest) request).writeThin(out);
} else {
throw new IllegalStateException("invalid request [" + request.getClass().getSimpleName() + " ]");
}
}
static ActionRequestValidationException validateSeqNoBasedCASParams(
DocWriteRequest<?> request,
ActionRequestValidationException validationException
) {
final long version = request.version();
final VersionType versionType = request.versionType();
if (versionType.validateVersionForWrites(version) == false) {
validationException = addValidationError(
"illegal version value [" + version + "] for version type [" + versionType.name() + "]",
validationException
);
}
if (versionType == VersionType.INTERNAL && version != Versions.MATCH_ANY && version != Versions.MATCH_DELETED) {
validationException = addValidationError(
"internal versioning can not be used for optimistic concurrency control. "
+ "Please use `if_seq_no` and `if_primary_term` instead",
validationException
);
}
if (request.ifSeqNo() != UNASSIGNED_SEQ_NO && (versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY)) {
validationException = addValidationError("compare and write operations can not use versioning", validationException);
}
if (request.ifPrimaryTerm() == UNASSIGNED_PRIMARY_TERM && request.ifSeqNo() != UNASSIGNED_SEQ_NO) {
validationException = addValidationError("ifSeqNo is set, but primary term is [0]", validationException);
}
if (request.ifPrimaryTerm() != UNASSIGNED_PRIMARY_TERM && request.ifSeqNo() == UNASSIGNED_SEQ_NO) {
validationException = addValidationError(
"ifSeqNo is unassigned, but primary term is [" + request.ifPrimaryTerm() + "]",
validationException
);
}
return validationException;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Sets.newHashSet;
import static java.lang.String.valueOf;
import static java.util.Collections.singletonMap;
import static java.util.regex.Pattern.compile;
import static org.apache.jackrabbit.oak.commons.IOUtils.closeQuietly;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentId.isDataSegmentId;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.util.Date;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import java.util.regex.Pattern;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.json.JsonObject;
import org.apache.jackrabbit.oak.commons.json.JsopTokenizer;
import org.apache.jackrabbit.oak.plugins.segment.file.FileStore.ReadOnlyStore;
/**
* Utility graph for parsing a segment graph.
*/
public final class SegmentGraph {
private SegmentGraph() { }
/**
* Visitor for receiving call backs while traversing the
* segment graph.
*/
public interface SegmentGraphVisitor {
/**
* A call to this method indicates that the {@code from} segment
* references the {@code to} segment. Or if {@code to} is {@code null}
* that the {@code from} has no references.
*
* @param from
* @param to
*/
void accept(@Nonnull UUID from, @CheckForNull UUID to);
}
/**
* A simple graph representation for a graph with node of type {@code T}.
*/
public static class Graph<T> {
/** The vertices of this graph */
private final Set<T> vertices = newHashSet();
/** The edges of this graph */
private final Map<T, Multiset<T>> edges = newHashMap();
private void addVertex(T vertex) {
vertices.add(vertex);
}
private void addEdge(T from, T to) {
Multiset<T> tos = edges.get(from);
if (tos == null) {
tos = HashMultiset.create();
edges.put(from, tos);
}
tos.add(to);
}
/**
* @return the vertices of this graph
*/
public Iterable<T> vertices() {
return vertices;
}
/**
* @param vertex
* @return {@code true} iff this graph contains {@code vertex}
*/
public boolean containsVertex(T vertex) {
return vertices.contains(vertex);
}
/**
* @return the edges of this graph
*/
public Set<Entry<T, Multiset<T>>> edges() {
return edges.entrySet();
}
/**
* @param from
* @return the edges from {@code from} or {@code null} if none.
*/
public Multiset<T> getEdge(T from) {
return edges.get(from);
}
}
/**
* Write the segment graph of a file store to a stream.
* <p>
* The graph is written in
* <a href="https://gephi.github.io/users/supported-graph-formats/gdf-format/">the Guess GDF format</a>,
* which is easily imported into <a href="https://gephi.github.io/">Gephi</a>.
* As GDF only supports integers but the segment time stamps are encoded as long
* the {@code epoch} argument is used as a negative offset translating all timestamps
* into a valid int range.
*
* @param fileStore file store to graph
* @param out stream to write the graph to
* @param epoch epoch (in milliseconds)
* @param pattern regular expression specifying inclusion of nodes or {@code null}
* for all nodes.
* @throws Exception
*/
public static void writeSegmentGraph(
@Nonnull ReadOnlyStore fileStore,
@Nonnull OutputStream out,
@Nonnull Date epoch,
@CheckForNull String pattern) throws Exception {
checkNotNull(epoch);
PrintWriter writer = new PrintWriter(checkNotNull(out));
try {
SegmentNodeState root = checkNotNull(fileStore).getHead();
Predicate<UUID> filter = pattern == null
? Predicates.<UUID>alwaysTrue()
: createRegExpFilter(pattern, fileStore.getTracker());
Graph<UUID> segmentGraph = parseSegmentGraph(fileStore, filter);
Graph<UUID> headGraph = parseHeadGraph(root.getRecordId());
writer.write("nodedef>name VARCHAR, label VARCHAR, type VARCHAR, wid VARCHAR, gc INT, t INT, size INT, head BOOLEAN\n");
for (UUID segment : segmentGraph.vertices()) {
writeNode(segment, writer, headGraph.containsVertex(segment), epoch, fileStore.getTracker());
}
writer.write("edgedef>node1 VARCHAR, node2 VARCHAR, head BOOLEAN\n");
for (Entry<UUID, Multiset<UUID>> edge : segmentGraph.edges()) {
UUID from = edge.getKey();
for (UUID to : edge.getValue()) {
if (!from.equals(to)) {
Multiset<UUID> he = headGraph.getEdge(from);
boolean inHead = he != null && he.contains(to);
writer.write(from + "," + to + "," + inHead + "\n");
}
}
}
} finally {
writer.close();
}
}
/**
* Create a regular expression based inclusion filter for segment.
*
* @param pattern regular expression specifying inclusion of nodes.
* @param tracker the segment tracker of the store acting upon.
* @return
*/
public static Predicate<UUID> createRegExpFilter(
@Nonnull String pattern,
@Nonnull final SegmentTracker tracker) {
final Pattern regExp = compile(checkNotNull(pattern));
checkNotNull(tracker);
return new Predicate<UUID>() {
@Override
public boolean apply(UUID segment) {
try {
String info = getSegmentInfo(segment, tracker);
if (info == null) {
info = "NULL";
}
return regExp.matcher(info).matches();
} catch (Exception e) {
System.err.println("Error accessing segment " + segment + ": " + e);
return false;
}
}
};
}
/**
* Parse the segment graph of a file store.
*
* @param fileStore file store to parse
* @param filter inclusion criteria for vertices and edges. An edge is only included if
* both its source and target vertex are included.
* @return the segment graph rooted as the segment containing the head node
* state of {@code fileStore}.
* @throws IOException
*/
@Nonnull
public static Graph<UUID> parseSegmentGraph(
@Nonnull ReadOnlyStore fileStore,
@Nonnull Predicate<UUID> filter) throws IOException {
SegmentNodeState root = checkNotNull(fileStore).getHead();
HashSet<UUID> roots = newHashSet(root.getRecordId().asUUID());
return parseSegmentGraph(fileStore, roots, filter, Functions.<UUID>identity());
}
/**
* Write the gc generation graph of a file store to a stream.
* <p>
* The graph is written in
* <a href="https://gephi.github.io/users/supported-graph-formats/gdf-format/">the Guess GDF format</a>,
* which is easily imported into <a href="https://gephi.github.io/">Gephi</a>.
*
* @param fileStore file store to graph
* @param out stream to write the graph to
* @throws Exception
*/
public static void writeGCGraph(@Nonnull ReadOnlyStore fileStore, @Nonnull OutputStream out)
throws Exception {
PrintWriter writer = new PrintWriter(checkNotNull(out));
try {
Graph<String> gcGraph = parseGCGraph(checkNotNull(fileStore));
writer.write("nodedef>name VARCHAR\n");
for (String gen : gcGraph.vertices()) {
writer.write(gen + "\n");
}
writer.write("edgedef>node1 VARCHAR, node2 VARCHAR, weight INT\n");
for (Entry<String, Multiset<String>> edge : gcGraph.edges()) {
String from = edge.getKey();
Multiset<String> tos = edge.getValue();
for (String to : tos.elementSet()) {
if (!from.equals(to) && !to.isEmpty()) {
writer.write(from + "," + to + "," + tos.count(to) + "\n");
}
}
}
} finally {
writer.close();
}
}
/**
* Parse the gc generation graph of a file store.
*
* @param fileStore file store to parse
* @return the gc generation graph rooted ad the segment containing the head node
* state of {@code fileStore}.
* @throws IOException
*/
@Nonnull
public static Graph<String> parseGCGraph(@Nonnull final ReadOnlyStore fileStore)
throws IOException {
SegmentNodeState root = checkNotNull(fileStore).getHead();
HashSet<UUID> roots = newHashSet(root.getRecordId().asUUID());
return parseSegmentGraph(fileStore, roots, Predicates.<UUID>alwaysTrue(), new Function<UUID, String>() {
@Override @Nullable
public String apply(UUID segmentId) {
Map<String, String> info = getSegmentInfoMap(segmentId, fileStore.getTracker());
if (info != null) {
String error = info.get("error");
if (error != null) {
return "Error";
} else {
return info.get("gc");
}
} else if (!isDataSegmentId(segmentId.getLeastSignificantBits())) {
return "bulk";
} else {
return "null";
}
}
});
}
/**
* Parse the segment graph of a file store starting with a given set of root segments.
* The full segment graph is mapped through the passed {@code map} to the
* graph returned by this function.
*
* @param fileStore file store to parse
* @param roots the initial set of segments
* @param map map defining an homomorphism from the segment graph into the returned graph
* @param filter inclusion criteria for vertices and edges. An edge is only included if
* both its source and target vertex are included.
* @return the segment graph of {@code fileStore} rooted at {@code roots} and mapped
* by {@code map}
* @throws IOException
*/
@Nonnull
public static <T> Graph<T> parseSegmentGraph(
@Nonnull final ReadOnlyStore fileStore,
@Nonnull Set<UUID> roots,
@Nonnull final Predicate<UUID> filter,
@Nonnull final Function<UUID, T> map) throws IOException {
final Graph<T> graph = new Graph<T>();
checkNotNull(filter);
checkNotNull(map);
checkNotNull(fileStore).traverseSegmentGraph(checkNotNull(roots),
new SegmentGraphVisitor() {
@Override
public void accept(@Nonnull UUID from, @CheckForNull UUID to) {
T fromT = null;
T toT = null;
if (filter.apply(from)) {
fromT = map.apply(from);
graph.addVertex(fromT);
}
if (to != null && filter.apply(to)) {
toT = map.apply(to);
graph.addVertex(toT);
}
if (fromT != null && toT != null) {
graph.addEdge(fromT, toT);
}
}
});
return graph;
}
/**
* Parser the head graph. The head graph is the sub graph of the segment
* graph containing the {@code root}.
* @param root
* @return the head graph of {@code root}.
*/
@Nonnull
public static Graph<UUID> parseHeadGraph(@Nonnull RecordId root) {
final Graph<UUID> graph = new Graph<UUID>();
try {
new SegmentParser() {
private void addEdge(RecordId from, RecordId to) {
graph.addVertex(from.asUUID());
graph.addVertex(to.asUUID());
graph.addEdge(from.asUUID(), to.asUUID());
}
@Override
protected void onNode(RecordId parentId, RecordId nodeId) {
super.onNode(parentId, nodeId);
addEdge(parentId, nodeId);
}
@Override
protected void onTemplate(RecordId parentId, RecordId templateId) {
super.onTemplate(parentId, templateId);
addEdge(parentId, templateId);
}
@Override
protected void onMap(RecordId parentId, RecordId mapId, MapRecord map) {
super.onMap(parentId, mapId, map);
addEdge(parentId, mapId);
}
@Override
protected void onMapDiff(RecordId parentId, RecordId mapId, MapRecord map) {
super.onMapDiff(parentId, mapId, map);
addEdge(parentId, mapId);
}
@Override
protected void onMapLeaf(RecordId parentId, RecordId mapId, MapRecord map) {
super.onMapLeaf(parentId, mapId, map);
addEdge(parentId, mapId);
}
@Override
protected void onMapBranch(RecordId parentId, RecordId mapId, MapRecord map) {
super.onMapBranch(parentId, mapId, map);
addEdge(parentId, mapId);
}
@Override
protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
super.onProperty(parentId, propertyId, template);
addEdge(parentId, propertyId);
}
@Override
protected void onValue(RecordId parentId, RecordId valueId, Type<?> type) {
super.onValue(parentId, valueId, type);
addEdge(parentId, valueId);
}
@Override
protected void onBlob(RecordId parentId, RecordId blobId) {
super.onBlob(parentId, blobId);
addEdge(parentId, blobId);
}
@Override
protected void onString(RecordId parentId, RecordId stringId) {
super.onString(parentId, stringId);
addEdge(parentId, stringId);
}
@Override
protected void onList(RecordId parentId, RecordId listId, int count) {
super.onList(parentId, listId, count);
addEdge(parentId, listId);
}
@Override
protected void onListBucket(RecordId parentId, RecordId listId, int index, int count, int capacity) {
super.onListBucket(parentId, listId, index, count, capacity);
addEdge(parentId, listId);
}
}.parseNode(checkNotNull(root));
} catch (SegmentNotFoundException e) {
System.err.println("Error head graph parsing: " + e);
}
return graph;
}
private static void writeNode(UUID node, PrintWriter writer, boolean inHead, Date epoch, SegmentTracker tracker) {
Map<String, String> sInfo = getSegmentInfoMap(node, tracker);
if (!sInfo.containsKey("t")) {
writer.write(node + ",b,bulk,b,-1,-1," + inHead + "\n");
} else {
String error = sInfo.get("error");
if (error != null) {
writer.write(node +
"," + firstLine(error) +
",error,e,-1,-1," + inHead + "\n");
} else {
long t = asLong(sInfo.get("t"));
long ts = t - epoch.getTime();
checkArgument(ts >= Integer.MIN_VALUE && ts <= Integer.MAX_VALUE,
"Time stamp (" + new Date(t) + ") not in epoch (" +
new Date(epoch.getTime() + Integer.MIN_VALUE) + " - " +
new Date(epoch.getTime() + Integer.MAX_VALUE) + ")");
writer.write(node +
"," + sInfo.get("sno") +
",data" +
"," + sInfo.get("wid") +
"," + sInfo.get("gc") +
"," + ts +
"," + sInfo.get("size") +
"," + inHead + "\n");
}
}
}
private static String firstLine(String string) {
BufferedReader reader = new BufferedReader(new StringReader(string));
try {
return reader.readLine();
} catch (IOException e) {
return string;
} finally {
closeQuietly(reader);
}
}
private static long asLong(String string) {
return Long.valueOf(string);
}
private static Map<String, String> getSegmentInfoMap(UUID segment, SegmentTracker tracker) {
return new SegmentInfo(segment, tracker).getInfoMap();
}
private static String getSegmentInfo(UUID segment, SegmentTracker tracker) {
return new SegmentInfo(segment, tracker).getInfo();
}
private static class SegmentInfo {
private final UUID uuid;
private final SegmentTracker tracker;
private SegmentId id;
SegmentInfo(UUID uuid, SegmentTracker tracker) {
this.uuid = uuid;
this.tracker = tracker;
}
boolean isData() {
return isDataSegmentId(uuid.getLeastSignificantBits());
}
SegmentId getSegmentId() {
if (id == null) {
id = tracker.getSegmentId(
uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
}
return id;
}
int getSize() {
return getSegmentId().getSegment().size();
}
String getInfo() {
if (isData()) {
return getSegmentId().getSegment().getSegmentInfo();
} else {
return null;
}
}
Map<String, String> getInfoMap() {
try {
Map<String, String> infoMap = newHashMap();
String info = getInfo();
if (info != null) {
JsopTokenizer tokenizer = new JsopTokenizer(info);
tokenizer.read('{');
infoMap.putAll(JsonObject.create(tokenizer).getProperties());
}
infoMap.put("size", valueOf(getSize()));
return infoMap;
} catch (SegmentNotFoundException e) {
return singletonMap("error", getStackTraceAsString(e));
}
}
}
}
| |
// Part of SourceAFIS for Java: https://sourceafis.machinezoo.com/java
package com.machinezoo.sourceafis.engine.primitives;
import java.util.*;
public class CircularList<T> implements List<T> {
private final CircularArray inner = new CircularArray(16);
@Override
public boolean add(T item) {
inner.insert(inner.size, 1);
inner.set(inner.size - 1, item);
return true;
}
@Override
public void add(int index, T item) {
inner.insert(index, 1);
inner.set(index, item);
}
@Override
public boolean addAll(Collection<? extends T> collection) {
for (T item : collection)
add(item);
return !collection.isEmpty();
}
@Override
public boolean addAll(int index, Collection<? extends T> collection) {
inner.insert(index, collection.size());
for (T item : collection) {
inner.set(index, item);
++index;
}
return !collection.isEmpty();
}
@Override
public void clear() {
inner.remove(0, inner.size);
}
@Override
public boolean contains(Object item) {
for (int i = 0; i < size(); ++i)
if (Objects.equals(get(i), item))
return true;
return false;
}
@Override
public boolean containsAll(Collection<?> collection) {
for (Object item : collection)
if (!contains(item))
return false;
return true;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof List<?>))
return false;
List<?> other = (List<?>)obj;
if (size() != other.size())
return false;
for (int i = 0; i < size(); ++i)
if (!Objects.equals(get(i), other.get(i)))
return false;
return true;
}
@SuppressWarnings("unchecked")
@Override
public T get(int index) {
return (T)inner.get(index);
}
@Override
public int hashCode() {
int hash = 1;
for (int i = 0; i < size(); ++i)
hash = 31 * hash + Objects.hashCode(inner.get(i));
return hash;
}
@Override
public int indexOf(Object item) {
for (int i = 0; i < size(); ++i)
if (Objects.equals(get(i), item))
return i;
return -1;
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public Iterator<T> iterator() {
return new ArrayIterator();
}
@Override
public int lastIndexOf(Object item) {
for (int i = size() - 1; i >= 0; --i)
if (Objects.equals(get(i), item))
return i;
return -1;
}
@Override
public ListIterator<T> listIterator() {
return new ArrayIterator();
}
@Override
public ListIterator<T> listIterator(int index) {
inner.validateCursorIndex(index);
ArrayIterator iterator = new ArrayIterator();
iterator.index = index;
return iterator;
}
@Override
public T remove(int index) {
T result = get(index);
inner.remove(index, 1);
return result;
}
@Override
public boolean remove(Object item) {
int index = indexOf(item);
if (index >= 0) {
remove(index);
return true;
} else
return false;
}
@Override
public boolean removeAll(Collection<?> collection) {
boolean changed = false;
for (Object item : collection)
changed |= remove(item);
return changed;
}
@Override
public boolean retainAll(Collection<?> collection) {
boolean changed = false;
for (int i = size() - 1; i >= 0; --i)
if (!collection.contains(get(i))) {
remove(i);
changed = true;
}
return changed;
}
@Override
public T set(int index, T element) {
T previous = get(index);
inner.set(index, element);
return previous;
}
@Override
public int size() {
return inner.size;
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
throw new UnsupportedOperationException();
}
@Override
public Object[] toArray() {
Object[] array = new Object[size()];
for (int i = 0; i < size(); ++i)
array[i] = get(i);
return array;
}
@Override
public <U> U[] toArray(U[] array) {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
StringBuilder s = new StringBuilder();
s.append("[");
for (int i = 0; i < inner.size; ++i) {
if (i > 0)
s.append(", ");
s.append(Objects.toString(inner.get(i)));
}
s.append("]");
return s.toString();
}
private class ArrayIterator implements ListIterator<T> {
int index = 0;
@Override
public void add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean hasNext() {
return index < size();
}
@Override
public T next() {
if (index >= size())
throw new NoSuchElementException();
++index;
return get(index - 1);
}
@Override
public int nextIndex() {
return index;
}
@Override
public boolean hasPrevious() {
return index > 0;
}
@Override
public T previous() {
if (index <= 0)
throw new NoSuchElementException();
--index;
return get(index);
}
@Override
public int previousIndex() {
return index - 1;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public void set(T e) {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.clouddirectory.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/clouddirectory-2017-01-11/UpdateFacet" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateFacetRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see <a>arns</a>.
* </p>
*/
private String schemaArn;
/**
* <p>
* The name of the facet.
* </p>
*/
private String name;
/**
* <p>
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* </p>
*/
private java.util.List<FacetAttributeUpdate> attributeUpdates;
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*/
private String objectType;
/**
* <p>
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see <a>arns</a>.
* </p>
*
* @param schemaArn
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see
* <a>arns</a>.
*/
public void setSchemaArn(String schemaArn) {
this.schemaArn = schemaArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see <a>arns</a>.
* </p>
*
* @return The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see
* <a>arns</a>.
*/
public String getSchemaArn() {
return this.schemaArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see <a>arns</a>.
* </p>
*
* @param schemaArn
* The Amazon Resource Name (ARN) that is associated with the <a>Facet</a>. For more information, see
* <a>arns</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFacetRequest withSchemaArn(String schemaArn) {
setSchemaArn(schemaArn);
return this;
}
/**
* <p>
* The name of the facet.
* </p>
*
* @param name
* The name of the facet.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the facet.
* </p>
*
* @return The name of the facet.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the facet.
* </p>
*
* @param name
* The name of the facet.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFacetRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* </p>
*
* @return List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
*/
public java.util.List<FacetAttributeUpdate> getAttributeUpdates() {
return attributeUpdates;
}
/**
* <p>
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* </p>
*
* @param attributeUpdates
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
*/
public void setAttributeUpdates(java.util.Collection<FacetAttributeUpdate> attributeUpdates) {
if (attributeUpdates == null) {
this.attributeUpdates = null;
return;
}
this.attributeUpdates = new java.util.ArrayList<FacetAttributeUpdate>(attributeUpdates);
}
/**
* <p>
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAttributeUpdates(java.util.Collection)} or {@link #withAttributeUpdates(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param attributeUpdates
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFacetRequest withAttributeUpdates(FacetAttributeUpdate... attributeUpdates) {
if (this.attributeUpdates == null) {
setAttributeUpdates(new java.util.ArrayList<FacetAttributeUpdate>(attributeUpdates.length));
}
for (FacetAttributeUpdate ele : attributeUpdates) {
this.attributeUpdates.add(ele);
}
return this;
}
/**
* <p>
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* </p>
*
* @param attributeUpdates
* List of attributes that need to be updated in a given schema <a>Facet</a>. Each attribute is followed by
* <code>AttributeAction</code>, which specifies the type of update operation to perform.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFacetRequest withAttributeUpdates(java.util.Collection<FacetAttributeUpdate> attributeUpdates) {
setAttributeUpdates(attributeUpdates);
return this;
}
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*
* @param objectType
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more
* details.
* @see ObjectType
*/
public void setObjectType(String objectType) {
this.objectType = objectType;
}
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*
* @return The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more
* details.
* @see ObjectType
*/
public String getObjectType() {
return this.objectType;
}
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*
* @param objectType
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more
* details.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ObjectType
*/
public UpdateFacetRequest withObjectType(String objectType) {
setObjectType(objectType);
return this;
}
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*
* @param objectType
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more
* details.
* @see ObjectType
*/
public void setObjectType(ObjectType objectType) {
withObjectType(objectType);
}
/**
* <p>
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more details.
* </p>
*
* @param objectType
* The object type that is associated with the facet. See <a>CreateFacetRequest$ObjectType</a> for more
* details.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ObjectType
*/
public UpdateFacetRequest withObjectType(ObjectType objectType) {
this.objectType = objectType.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSchemaArn() != null)
sb.append("SchemaArn: ").append(getSchemaArn()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getAttributeUpdates() != null)
sb.append("AttributeUpdates: ").append(getAttributeUpdates()).append(",");
if (getObjectType() != null)
sb.append("ObjectType: ").append(getObjectType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateFacetRequest == false)
return false;
UpdateFacetRequest other = (UpdateFacetRequest) obj;
if (other.getSchemaArn() == null ^ this.getSchemaArn() == null)
return false;
if (other.getSchemaArn() != null && other.getSchemaArn().equals(this.getSchemaArn()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getAttributeUpdates() == null ^ this.getAttributeUpdates() == null)
return false;
if (other.getAttributeUpdates() != null && other.getAttributeUpdates().equals(this.getAttributeUpdates()) == false)
return false;
if (other.getObjectType() == null ^ this.getObjectType() == null)
return false;
if (other.getObjectType() != null && other.getObjectType().equals(this.getObjectType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSchemaArn() == null) ? 0 : getSchemaArn().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getAttributeUpdates() == null) ? 0 : getAttributeUpdates().hashCode());
hashCode = prime * hashCode + ((getObjectType() == null) ? 0 : getObjectType().hashCode());
return hashCode;
}
@Override
public UpdateFacetRequest clone() {
return (UpdateFacetRequest) super.clone();
}
}
| |
/*
* Copyright (c) 1994, 2004, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.java;
/**
* This class represents an Java class declaration. It refers
* to either a binary or source definition.
*
* ClassDefinitions are loaded on demand, this means that
* class declarations are late bound. The definition of the
* class is obtained in stages. The status field describes
* the state of the class definition:
*
* CS_UNDEFINED - the definition is not yet loaded
* CS_UNDECIDED - a binary definition is loaded, but it is
* still unclear if the source definition need to
* be loaded
* CS_BINARY - the binary class is loaded
* CS_PARSED - the class is loaded from the source file, the
* type information is available, but the class has
* not yet been compiled.
* CS_CHECKED - the class is loaded from the source file and has
* been type-checked.
* CS_COMPILED - the class has been type checked, compiled,
* and written out.
* CS_NOTFOUND - no class definition could be found
*
* WARNING: The contents of this source file are not part of any
* supported API. Code that depends on them does so at its own risk:
* they are subject to change or removal without notice.
*/
public final
class ClassDeclaration implements Constants {
int status;
Type type;
ClassDefinition definition;
/**
* Constructor
*/
public ClassDeclaration(Identifier name) {
this.type = Type.tClass(name);
}
/**
* Get the status of the class
*/
public int getStatus() {
return status;
}
/**
* Get the name of the class
*/
public Identifier getName() {
return type.getClassName();
}
/**
* Get the type of the class
*/
public Type getType() {
return type;
}
/**
* Check if the class is defined
*/
public boolean isDefined() {
switch (status) {
case CS_BINARY:
case CS_PARSED:
case CS_CHECKED:
case CS_COMPILED:
return true;
}
return false;
}
/**
* Get the definition of this class. Returns null if
* the class is not yet defined.
*/
public ClassDefinition getClassDefinition() {
return definition;
}
/**
* This is a flag for use by getClassDefinition(env). It is
* used to mark that a class has been successfully looked up
* by that method before.
*/
private boolean found = false;
/**
* Get the definition of this class, if the class is not
* yet defined, load the definition. Loading a class may
* throw various exceptions.
*/
public ClassDefinition getClassDefinition(Environment env)
throws ClassNotFound {
if (tracing) env.dtEvent("getClassDefinition: " +
getName() + ", status " + getStatus());
// The majority of calls to getClassDefinition() are duplicates.
// This check makes them fast. It also allows us to avoid
// duplicate, useless calls to basicCheck(). In the future it
// would be good to add an additional status value, CS_BASICCHECKED.
if (found) {
return definition;
}
for(;;) {
switch (status) {
case CS_UNDEFINED:
case CS_UNDECIDED:
case CS_SOURCE:
env.loadDefinition(this);
break;
case CS_BINARY:
case CS_PARSED:
//+FIX FOR BUGID 4056065
//definition.basicCheck(env);
if (!definition.isInsideLocal()) {
// Classes inside a block, including anonymous classes,
// are checked when their surrounding member is checked.
definition.basicCheck(env);
}
//-FIX FOR BUGID 4056065
found = true;
return definition;
case CS_CHECKED:
case CS_COMPILED:
found = true;
return definition;
default:
throw new ClassNotFound(getName());
}
}
}
/**
* Get the definition of this class, if the class is not
* yet defined, load the definition. Loading a class may
* throw various exceptions. Perform no basicCheck() on this
* class.
*/
public ClassDefinition getClassDefinitionNoCheck(Environment env) throws ClassNotFound {
if (tracing) env.dtEvent("getClassDefinition: " +
getName() + ", status " + getStatus());
for(;;) {
switch (status) {
case CS_UNDEFINED:
case CS_UNDECIDED:
case CS_SOURCE:
env.loadDefinition(this);
break;
case CS_BINARY:
case CS_PARSED:
case CS_CHECKED:
case CS_COMPILED:
return definition;
default:
throw new ClassNotFound(getName());
}
}
}
/**
* Set the class definition
*/
public void setDefinition(ClassDefinition definition, int status) {
// Sanity checks.
// The name of the definition should match that of the declaration.
if ((definition != null) && !getName().equals(definition.getName())) {
throw new CompilerError("setDefinition: name mismatch: " +
this + ", " + definition);
}
// The status states can be considered ordered in the same
// manner as their numerical values. We expect classes to
// progress through a sequence of monotonically increasing
// states. NOTE: There are currently exceptions to this rule
// which are believed to be legitimate. In particular, a
// class may be checked more than once, though we believe that
// this is unnecessary and may be avoided.
/*-----------------*
if (status <= this.status) {
System.out.println("STATUS REGRESSION: " +
this + " FROM " + this.status + " TO " + status);
}
*------------------*/
this.definition = definition;
this.status = status;
}
/**
* Equality
*/
public boolean equals(Object obj) {
if ((obj != null) && (obj instanceof ClassDeclaration)) {
return type.equals(((ClassDeclaration)obj).type);
}
return false;
}
/**
* toString
*/
public String toString() {
String name = getName().toString();
String type = "type ";
String nested = getName().isInner() ? "nested " : "";
if (getClassDefinition() != null) {
if (getClassDefinition().isInterface()) {
type = "interface ";
} else {
type = "class ";
}
if (!getClassDefinition().isTopLevel()) {
nested = "inner ";
if (getClassDefinition().isLocal()) {
nested = "local ";
if (!getClassDefinition().isAnonymous()) {
name = getClassDefinition().getLocalName() +
" (" + name + ")";
}
}
}
}
return nested + type + name;
}
}
| |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.bgp.controller.impl;
import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import org.jboss.netty.channel.Channel;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.IpAddress;
import org.onosproject.bgp.controller.BgpController;
import org.onosproject.bgp.controller.BgpLocalRib;
import org.onosproject.bgp.controller.BgpPeer;
import org.onosproject.bgp.controller.BgpSessionInfo;
import org.onosproject.bgpio.exceptions.BgpParseException;
import org.onosproject.bgpio.protocol.BgpEvpnNlri;
import org.onosproject.bgpio.protocol.BgpFactories;
import org.onosproject.bgpio.protocol.BgpFactory;
import org.onosproject.bgpio.protocol.BgpLSNlri;
import org.onosproject.bgpio.protocol.BgpMessage;
import org.onosproject.bgpio.protocol.flowspec.BgpFlowSpecNlri;
import org.onosproject.bgpio.protocol.flowspec.BgpFlowSpecRouteKey;
import org.onosproject.bgpio.protocol.linkstate.BgpLinkLsNlriVer4;
import org.onosproject.bgpio.protocol.linkstate.BgpNodeLSNlriVer4;
import org.onosproject.bgpio.protocol.linkstate.BgpPrefixIPv4LSNlriVer4;
import org.onosproject.bgpio.protocol.linkstate.PathAttrNlriDetails;
import org.onosproject.bgpio.types.AsPath;
import org.onosproject.bgpio.types.As4Path;
import org.onosproject.bgpio.types.BgpExtendedCommunity;
import org.onosproject.bgpio.types.BgpValueType;
import org.onosproject.bgpio.types.LocalPref;
import org.onosproject.bgpio.types.Med;
import org.onosproject.bgpio.types.MpReachNlri;
import org.onosproject.bgpio.types.MpUnReachNlri;
import org.onosproject.bgpio.types.MultiProtocolExtnCapabilityTlv;
import org.onosproject.bgpio.types.Origin;
import org.onosproject.bgpio.types.attr.WideCommunity;
import org.onosproject.bgpio.types.RpdCapabilityTlv;
import org.onosproject.bgpio.util.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.concurrent.RejectedExecutionException;
/**
* BGPPeerImpl implements BGPPeer, maintains peer information and store updates
* in RIB .
*/
public class BgpPeerImpl implements BgpPeer {
protected final Logger log = LoggerFactory.getLogger(BgpPeerImpl.class);
private static final String SHUTDOWN_MSG = "Worker has already been shutdown";
private BgpController bgpController;
private Channel channel;
protected String channelId;
private boolean connected;
protected boolean isHandShakeComplete = false;
private BgpSessionInfo sessionInfo;
private BgpPacketStatsImpl pktStats;
private BgpLocalRib bgplocalRib;
private BgpLocalRib bgplocalRibVpn;
private AdjRibIn adjRib;
private VpnAdjRibIn vpnAdjRib;
/**
* Return the adjacency RIB-IN.
*
* @return adjRib the adjacency RIB-IN
*/
public AdjRibIn adjacencyRib() {
return adjRib;
}
/**
* Return the adjacency RIB-IN with VPN.
*
* @return vpnAdjRib the adjacency RIB-IN with VPN
*/
public VpnAdjRibIn vpnAdjacencyRib() {
return vpnAdjRib;
}
@Override
public BgpSessionInfo sessionInfo() {
return sessionInfo;
}
/**
* Initialize peer.
*
* @param bgpController controller instance
* @param sessionInfo bgp session info
* @param pktStats packet statistics
*/
public BgpPeerImpl(BgpController bgpController, BgpSessionInfo sessionInfo,
BgpPacketStatsImpl pktStats) {
this.bgpController = bgpController;
this.sessionInfo = sessionInfo;
this.pktStats = pktStats;
this.bgplocalRib = bgpController.bgpLocalRib();
this.bgplocalRibVpn = bgpController.bgpLocalRibVpn();
this.adjRib = new AdjRibIn();
this.vpnAdjRib = new VpnAdjRibIn();
}
/**
* Check if peer support capability.
*
* @param type capability type
* @param afi address family identifier
* @param sAfi subsequent address family identifier
* @return true if capability is supported, otherwise false
*/
public final boolean isCapabilitySupported(short type, short afi,
byte sAfi) {
List<BgpValueType> capability = sessionInfo.remoteBgpCapability();
ListIterator<BgpValueType> listIterator = capability.listIterator();
while (listIterator.hasNext()) {
BgpValueType tlv = listIterator.next();
if (tlv.getType() == type) {
if (tlv.getType() == MultiProtocolExtnCapabilityTlv.TYPE) {
MultiProtocolExtnCapabilityTlv temp = (MultiProtocolExtnCapabilityTlv) tlv;
if ((temp.getAfi() == afi) && (temp.getSafi() == sAfi)) {
return true;
}
} else if (tlv.getType() == RpdCapabilityTlv.TYPE) {
RpdCapabilityTlv temp = (RpdCapabilityTlv) tlv;
if ((temp.getAfi() == afi) && (temp.getSafi() == sAfi)) {
return true;
}
}
}
}
return false;
}
/**
* Send flow specification update message to peer.
*
* @param operType operation type
* @param routeKey flow rule key
* @param flowSpec flow specification details
* @param wideCommunity for route policy
*/
public final void sendFlowSpecUpdateMessageToPeer(OperationType operType,
BgpFlowSpecRouteKey routeKey,
BgpFlowSpecNlri flowSpec,
WideCommunity wideCommunity) {
List<BgpValueType> attributesList = new LinkedList<>();
byte sessionType = sessionInfo.isIbgpSession() ? (byte) 0 : (byte) 1;
byte sAfi = Constants.SAFI_FLOWSPEC_VALUE;
boolean isFsCapabilitySet = isCapabilitySupported(MultiProtocolExtnCapabilityTlv.TYPE,
Constants.AFI_FLOWSPEC_VALUE,
Constants.SAFI_FLOWSPEC_VALUE);
boolean isVpnFsCapabilitySet = isCapabilitySupported(MultiProtocolExtnCapabilityTlv.TYPE,
Constants.AFI_FLOWSPEC_VALUE,
Constants.VPN_SAFI_FLOWSPEC_VALUE);
boolean isRpdCapabilitySet = isCapabilitySupported(RpdCapabilityTlv.TYPE,
Constants.AFI_FLOWSPEC_RPD_VALUE,
Constants.SAFI_FLOWSPEC_RPD_VALUE);
boolean isVpnRpdCapabilitySet = isCapabilitySupported(RpdCapabilityTlv.TYPE,
Constants.AFI_FLOWSPEC_RPD_VALUE,
Constants.VPN_SAFI_FLOWSPEC_RDP_VALUE);
if ((!isFsCapabilitySet) && (!isVpnFsCapabilitySet)
&& (!isRpdCapabilitySet) && (!isVpnRpdCapabilitySet)) {
log.debug("Peer do not support BGP flow spec capability",
channel.getRemoteAddress());
return;
}
if (isVpnFsCapabilitySet) {
sAfi = Constants.VPN_SAFI_FLOWSPEC_VALUE;
} else if (isVpnRpdCapabilitySet) {
sAfi = Constants.VPN_SAFI_FLOWSPEC_RDP_VALUE;
}
attributesList.add(new Origin((byte) 0));
if (sessionType != 0) {
// EBGP
if (!bgpController.getConfig().getLargeASCapability()) {
List<Short> aspathSet = new ArrayList<>();
List<Short> aspathSeq = new ArrayList<>();
aspathSeq.add((short) bgpController.getConfig().getAsNumber());
AsPath asPath = new AsPath(aspathSet, aspathSeq);
attributesList.add(asPath);
} else {
List<Integer> aspathSet = new ArrayList<>();
List<Integer> aspathSeq = new ArrayList<>();
aspathSeq.add(bgpController.getConfig().getAsNumber());
As4Path as4Path = new As4Path(aspathSet, aspathSeq);
attributesList.add(as4Path);
}
attributesList.add(new Med(0));
} else {
attributesList.add(new AsPath());
attributesList.add(new Med(0));
attributesList.add(new LocalPref(100));
}
attributesList.add(new BgpExtendedCommunity(flowSpec.fsActionTlv()));
if (wideCommunity != null) {
attributesList.add(wideCommunity);
}
if (operType == OperationType.ADD) {
attributesList
.add(new MpReachNlri(flowSpec, Constants.AFI_FLOWSPEC_VALUE,
sAfi));
} else if (operType == OperationType.DELETE) {
attributesList
.add(new MpUnReachNlri(flowSpec,
Constants.AFI_FLOWSPEC_VALUE, sAfi));
}
BgpMessage msg = Controller.getBgpMessageFactory4()
.updateMessageBuilder().setBgpPathAttributes(attributesList)
.build();
log.debug("Sending Flow spec Update message to {}",
channel.getRemoteAddress());
channel.write(Collections.singletonList(msg));
}
@Override
public void updateFlowSpec(OperationType operType,
BgpFlowSpecRouteKey routeKey,
BgpFlowSpecNlri flowSpec,
WideCommunity wideCommunity) {
Preconditions
.checkNotNull(operType,
"flow specification operation type cannot be null");
Preconditions.checkNotNull(routeKey,
"flow specification prefix cannot be null");
Preconditions.checkNotNull(flowSpec,
"flow specification details cannot be null");
Preconditions.checkNotNull(flowSpec.fsActionTlv(),
"flow specification action cannot be null");
sendFlowSpecUpdateMessageToPeer(operType, routeKey, flowSpec,
wideCommunity);
}
@Override
public void updateEvpn(OperationType operType, Ip4Address nextHop,
List<BgpValueType> extCommunit,
List<BgpEvpnNlri> eVpnComponents) {
Preconditions.checkNotNull(operType, "Operation type cannot be null");
Preconditions.checkNotNull(eVpnComponents, "Evpn nlri cannot be null");
Preconditions.checkNotNull(nextHop, "Next hop cannot be null");
sendEvpnUpdateMessageToPeer(operType, nextHop, extCommunit, eVpnComponents);
}
private void sendEvpnUpdateMessageToPeer(OperationType operType,
Ip4Address nextHop,
List<BgpValueType> extCommunit,
List<BgpEvpnNlri> eVpnComponents) {
List<BgpValueType> attributesList = new LinkedList<>();
byte sessionType = sessionInfo.isIbgpSession() ? (byte) 0 : (byte) 1;
short afi = Constants.AFI_EVPN_VALUE;
byte safi = Constants.SAFI_EVPN_VALUE;
boolean isEvpnCapabilitySet = isCapabilitySupported(MultiProtocolExtnCapabilityTlv.TYPE,
afi, safi);
if (!isEvpnCapabilitySet) {
log.debug("Peer do not support BGP Evpn capability",
channel.getRemoteAddress());
return;
}
attributesList.add(new Origin((byte) 0));
if (sessionType != 0) {
// EBGP
if (!bgpController.getConfig().getLargeASCapability()) {
List<Short> aspathSet = new ArrayList<>();
List<Short> aspathSeq = new ArrayList<>();
aspathSeq.add((short) bgpController.getConfig().getAsNumber());
AsPath asPath = new AsPath(aspathSet, aspathSeq);
attributesList.add(asPath);
} else {
List<Integer> aspathSet = new ArrayList<>();
List<Integer> aspathSeq = new ArrayList<>();
aspathSeq.add(bgpController.getConfig().getAsNumber());
As4Path as4Path = new As4Path(aspathSet, aspathSeq);
attributesList.add(as4Path);
}
} else {
attributesList.add(new AsPath());
}
attributesList.add(new BgpExtendedCommunity(extCommunit));
if (operType == OperationType.ADD) {
attributesList
.add(new MpReachNlri(eVpnComponents, afi, safi, nextHop));
} else if (operType == OperationType.DELETE) {
attributesList.add(new MpUnReachNlri(eVpnComponents, afi, safi));
}
BgpMessage msg = Controller.getBgpMessageFactory4()
.updateMessageBuilder().setBgpPathAttributes(attributesList)
.build();
channel.write(Collections.singletonList(msg));
}
@Override
public void buildAdjRibIn(List<BgpValueType> pathAttr)
throws BgpParseException {
ListIterator<BgpValueType> iterator = pathAttr.listIterator();
while (iterator.hasNext()) {
BgpValueType attr = iterator.next();
if (attr instanceof MpReachNlri) {
MpReachNlri mpReachNlri = (MpReachNlri) attr;
switch (mpReachNlri.getNlriDetailsType()) {
case LINK_STATE:
List<BgpLSNlri> lsNlri = ((MpReachNlri) attr).bgpLSNlri();
callAdd(this, lsNlri, pathAttr);
break;
case EVPN:
List<BgpEvpnNlri> evpnNlri = ((MpReachNlri) attr)
.bgpEvpnNlri();
break;
default:
break;
}
}
if (attr instanceof MpUnReachNlri) {
MpReachNlri mpUnReachNlri = (MpReachNlri) attr;
switch (mpUnReachNlri.getNlriDetailsType()) {
case LINK_STATE:
List<BgpLSNlri> lsNlri = ((MpUnReachNlri) attr).bgpLSNlri();
callRemove(this, lsNlri);
break;
case EVPN:
List<BgpEvpnNlri> evpnNlri = ((MpUnReachNlri) attr)
.bgpEvpnNlri();
break;
default:
break;
}
}
}
}
/**
* Updates NLRI identifier node in a tree separately based on afi and safi.
*
* @param peerImpl BGP peer instance
* @param nlri MpReachNlri path attribute
* @param pathAttr list of BGP path attributes
* @throws BgpParseException throws exception
*/
public void callAdd(BgpPeerImpl peerImpl, List<BgpLSNlri> nlri,
List<BgpValueType> pathAttr)
throws BgpParseException {
ListIterator<BgpLSNlri> listIterator = nlri.listIterator();
while (listIterator.hasNext()) {
BgpLSNlri nlriInfo = listIterator.next();
if (nlriInfo instanceof BgpNodeLSNlriVer4) {
PathAttrNlriDetails details = setPathAttrDetails(nlriInfo,
pathAttr);
if (!((BgpNodeLSNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.add(nlriInfo, details);
bgplocalRib.add(sessionInfo(), nlriInfo, details);
} else {
vpnAdjRib.addVpn(nlriInfo, details,
((BgpNodeLSNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.add(sessionInfo(), nlriInfo, details,
((BgpNodeLSNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
} else if (nlriInfo instanceof BgpLinkLsNlriVer4) {
PathAttrNlriDetails details = setPathAttrDetails(nlriInfo,
pathAttr);
if (!((BgpLinkLsNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.add(nlriInfo, details);
bgplocalRib.add(sessionInfo(), nlriInfo, details);
} else {
vpnAdjRib.addVpn(nlriInfo, details,
((BgpLinkLsNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.add(sessionInfo(), nlriInfo, details,
((BgpLinkLsNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
} else if (nlriInfo instanceof BgpPrefixIPv4LSNlriVer4) {
PathAttrNlriDetails details = setPathAttrDetails(nlriInfo,
pathAttr);
if (!((BgpPrefixIPv4LSNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.add(nlriInfo, details);
bgplocalRib.add(sessionInfo(), nlriInfo, details);
} else {
vpnAdjRib.addVpn(nlriInfo, details,
((BgpPrefixIPv4LSNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.add(sessionInfo(), nlriInfo, details,
((BgpPrefixIPv4LSNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
}
}
}
/**
* Sets BGP path attribute and NLRI details.
*
* @param nlriInfo MpReachNlri path attribute
* @param pathAttr list of BGP path attributes
* @return details object of PathAttrNlriDetails
* @throws BgpParseException throw exception
*/
public PathAttrNlriDetails setPathAttrDetails(BgpLSNlri nlriInfo,
List<BgpValueType> pathAttr)
throws BgpParseException {
PathAttrNlriDetails details = new PathAttrNlriDetails();
details.setProtocolID(nlriInfo.getProtocolId());
details.setIdentifier(nlriInfo.getIdentifier());
details.setPathAttribute(pathAttr);
return details;
}
/**
* Removes NLRI identifier node in a tree separately based on afi and safi.
*
* @param peerImpl BGP peer instance
* @param nlri NLRI information
* @throws BgpParseException BGP parse exception
*/
public void callRemove(BgpPeerImpl peerImpl, List<BgpLSNlri> nlri)
throws BgpParseException {
ListIterator<BgpLSNlri> listIterator = nlri.listIterator();
while (listIterator.hasNext()) {
BgpLSNlri nlriInfo = listIterator.next();
if (nlriInfo instanceof BgpNodeLSNlriVer4) {
if (!((BgpNodeLSNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.remove(nlriInfo);
bgplocalRib.delete(nlriInfo);
} else {
vpnAdjRib.removeVpn(nlriInfo, ((BgpNodeLSNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.delete(nlriInfo,
((BgpNodeLSNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
} else if (nlriInfo instanceof BgpLinkLsNlriVer4) {
if (!((BgpLinkLsNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.remove(nlriInfo);
bgplocalRib.delete(nlriInfo);
} else {
vpnAdjRib.removeVpn(nlriInfo, ((BgpLinkLsNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.delete(nlriInfo,
((BgpLinkLsNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
} else if (nlriInfo instanceof BgpPrefixIPv4LSNlriVer4) {
if (!((BgpPrefixIPv4LSNlriVer4) nlriInfo).isVpnPresent()) {
adjRib.remove(nlriInfo);
bgplocalRib.delete(nlriInfo);
} else {
vpnAdjRib.removeVpn(nlriInfo,
((BgpPrefixIPv4LSNlriVer4) nlriInfo)
.getRouteDistinguisher());
bgplocalRibVpn.delete(nlriInfo,
((BgpPrefixIPv4LSNlriVer4) nlriInfo)
.getRouteDistinguisher());
}
}
}
}
/**
* Return the adjacency RIB-IN.
*
* @return adjRib the adjacency RIB-IN
*/
public AdjRibIn adjRib() {
return adjRib;
}
/**
* Return the adjacency RIB-IN with VPN.
*
* @return vpnAdjRib the adjacency RIB-IN with VPN
*/
public VpnAdjRibIn vpnAdjRib() {
return vpnAdjRib;
}
/**
* Update localRIB on peer disconnect.
*
* @throws BgpParseException while updating local RIB
*/
public void updateLocalRibOnPeerDisconnect() throws BgpParseException {
BgpLocalRibImpl localRib = (BgpLocalRibImpl) bgplocalRib;
BgpLocalRibImpl localRibVpn = (BgpLocalRibImpl) bgplocalRibVpn;
localRib.localRibUpdate(adjacencyRib());
localRibVpn.localRibUpdate(vpnAdjacencyRib());
}
// ************************
// Channel related
// ************************
@Override
public final void disconnectPeer() {
this.channel.close();
}
@Override
public final void sendMessage(BgpMessage m) {
log.debug("Sending message to {}", channel.getRemoteAddress());
try {
channel.write(Collections.singletonList(m));
this.pktStats.addOutPacket();
} catch (RejectedExecutionException e) {
log.warn(e.getMessage());
if (!e.getMessage().contains(SHUTDOWN_MSG)) {
throw e;
}
}
}
@Override
public final void sendMessage(List<BgpMessage> msgs) {
try {
channel.write(msgs);
this.pktStats.addOutPacket(msgs.size());
} catch (RejectedExecutionException e) {
log.warn(e.getMessage());
if (!e.getMessage().contains(SHUTDOWN_MSG)) {
throw e;
}
}
}
@Override
public final boolean isConnected() {
return this.connected;
}
@Override
public final void setConnected(boolean connected) {
this.connected = connected;
};
@Override
public final void setChannel(Channel channel) {
this.channel = channel;
final SocketAddress address = channel.getRemoteAddress();
if (address instanceof InetSocketAddress) {
final InetSocketAddress inetAddress = (InetSocketAddress) address;
final IpAddress ipAddress = IpAddress
.valueOf(inetAddress.getAddress());
if (ipAddress.isIp4()) {
channelId = ipAddress.toString() + ':' + inetAddress.getPort();
} else {
channelId = '[' + ipAddress.toString() + "]:"
+ inetAddress.getPort();
}
}
};
@Override
public final Channel getChannel() {
return this.channel;
};
@Override
public String channelId() {
return channelId;
}
@Override
public BgpFactory factory() {
return BgpFactories.getFactory(sessionInfo.remoteBgpVersion());
}
@Override
public boolean isHandshakeComplete() {
return isHandShakeComplete;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass()).omitNullValues()
.add("channel", channelId())
.add("BgpId", sessionInfo().remoteBgpId()).toString();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Erik Ramfelt, Seiji Sogabe, Martin Eigenbrodt, Alan Harder
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import com.infradna.tool.bridge_method_injector.BridgeMethodsAdded;
import com.infradna.tool.bridge_method_injector.WithBridgeMethods;
import hudson.Extension;
import hudson.Util;
import hudson.model.Descriptor.FormException;
import hudson.util.CaseInsensitiveComparator;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import hudson.views.ListViewColumn;
import hudson.views.ViewJobFilter;
import jenkins.model.Jenkins;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
/**
* Displays {@link Job}s in a flat list view.
*
* @author Kohsuke Kawaguchi
*/
public class ListView extends View implements Saveable {
/**
* List of job names. This is what gets serialized.
*/
/*package*/ final SortedSet<String> jobNames = new TreeSet<String>(CaseInsensitiveComparator.INSTANCE);
private DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> jobFilters;
private DescribableList<ListViewColumn, Descriptor<ListViewColumn>> columns;
/**
* Include regex string.
*/
private String includeRegex;
/**
* Compiled include pattern from the includeRegex string.
*/
private transient Pattern includePattern;
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
private Boolean statusFilter;
@DataBoundConstructor
public ListView(String name) {
super(name);
initColumns();
initJobFilters();
}
public ListView(String name, ViewGroup owner) {
this(name);
this.owner = owner;
}
private Object readResolve() {
if(includeRegex!=null)
includePattern = Pattern.compile(includeRegex);
initColumns();
initJobFilters();
return this;
}
protected void initColumns() {
if (columns == null)
columns = new DescribableList<ListViewColumn, Descriptor<ListViewColumn>>(this,ListViewColumn.createDefaultInitialColumnList());
}
protected void initJobFilters() {
if (jobFilters == null)
jobFilters = new DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>>(this);
}
/**
* Used to determine if we want to display the Add button.
*/
public boolean hasJobFilterExtensions() {
return !ViewJobFilter.all().isEmpty();
}
public DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> getJobFilters() {
return jobFilters;
}
public DescribableList<ListViewColumn, Descriptor<ListViewColumn>> getColumns() {
return columns;
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
*/
public synchronized List<TopLevelItem> getItems() {
SortedSet<String> names = new TreeSet<String>(jobNames);
if (includePattern != null) {
for (Item item : getOwnerItemGroup().getItems()) {
String itemName = item.getName();
if (includePattern.matcher(itemName).matches()) {
names.add(itemName);
}
}
}
List<TopLevelItem> items = new ArrayList<TopLevelItem>(names.size());
for (String n : names) {
TopLevelItem item = getOwnerItemGroup().getItem(n);
// Add if no status filter or filter matches enabled/disabled status:
if(item!=null && (statusFilter == null || !(item instanceof AbstractProject)
|| ((AbstractProject)item).isDisabled() ^ statusFilter))
items.add(item);
}
// check the filters
Iterable<ViewJobFilter> jobFilters = getJobFilters();
List<TopLevelItem> allItems = new ArrayList<TopLevelItem>(getOwnerItemGroup().getItems());
for (ViewJobFilter jobFilter: jobFilters) {
items = jobFilter.filter(items, allItems, this);
}
// for sanity, trim off duplicates
items = new ArrayList<TopLevelItem>(new LinkedHashSet<TopLevelItem>(items));
return items;
}
public boolean contains(TopLevelItem item) {
return jobNames.contains(item.getName());
}
/**
* Adds the given item to this view.
*
* @since 1.389
*/
public void add(TopLevelItem item) throws IOException {
jobNames.add(item.getName());
save();
}
public String getIncludeRegex() {
return includeRegex;
}
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
public Boolean getStatusFilter() {
return statusFilter;
}
public synchronized Item doCreateItem(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
ItemGroup<? extends TopLevelItem> ig = getOwnerItemGroup();
if (ig instanceof ModifiableItemGroup) {
TopLevelItem item = ((ModifiableItemGroup<? extends TopLevelItem>)ig).doCreateItem(req, rsp);
if(item!=null) {
jobNames.add(item.getName());
owner.save();
}
return item;
}
return null;
}
@Override
public synchronized void onJobRenamed(Item item, String oldName, String newName) {
if(jobNames.remove(oldName) && newName!=null)
jobNames.add(newName);
}
/**
* Handles the configuration submission.
*
* Load view-specific properties here.
*/
@Override
protected void submit(StaplerRequest req) throws ServletException, FormException, IOException {
jobNames.clear();
for (TopLevelItem item : getOwnerItemGroup().getItems()) {
if(req.getParameter(item.getName())!=null)
jobNames.add(item.getName());
}
if (req.getParameter("useincluderegex") != null) {
includeRegex = Util.nullify(req.getParameter("includeRegex"));
if (includeRegex == null)
includePattern = null;
else
includePattern = Pattern.compile(includeRegex);
} else {
includeRegex = null;
includePattern = null;
}
if (columns == null) {
columns = new DescribableList<ListViewColumn,Descriptor<ListViewColumn>>(this);
}
columns.rebuildHetero(req, req.getSubmittedForm(), ListViewColumn.all(), "columns");
if (jobFilters == null) {
jobFilters = new DescribableList<ViewJobFilter,Descriptor<ViewJobFilter>>(this);
}
jobFilters.rebuildHetero(req, req.getSubmittedForm(), ViewJobFilter.all(), "jobFilters");
String filter = Util.fixEmpty(req.getParameter("statusFilter"));
statusFilter = filter != null ? "1".equals(filter) : null;
}
@Extension
public static final class DescriptorImpl extends ViewDescriptor {
public String getDisplayName() {
return Messages.ListView_DisplayName();
}
/**
* Checks if the include regular expression is valid.
*/
public FormValidation doCheckIncludeRegex( @QueryParameter String value ) throws IOException, ServletException, InterruptedException {
String v = Util.fixEmpty(value);
if (v != null) {
try {
Pattern.compile(v);
} catch (PatternSyntaxException pse) {
return FormValidation.error(pse.getMessage());
}
}
return FormValidation.ok();
}
}
/**
* @deprecated as of 1.391
* Use {@link ListViewColumn#createDefaultInitialColumnList()}
*/
public static List<ListViewColumn> getDefaultColumns() {
return ListViewColumn.createDefaultInitialColumnList();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.math.BigInteger;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.tool.BulkLoadHFiles;
import org.apache.hadoop.hbase.tool.BulkLoadHFiles.LoadQueueItem;
import org.apache.hadoop.hbase.tool.BulkLoadHFilesTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* It is used for replicating HFile entries. It will first copy parallely all the hfiles to a local
* staging directory and then it will use ({@link BulkLoadHFiles} to prepare a collection of
* {@link LoadQueueItem} which will finally be loaded(replicated) into the table of this cluster.
* Call {@link #close()} when done.
*/
@InterfaceAudience.Private
public class HFileReplicator implements Closeable {
/** Maximum number of threads to allow in pool to copy hfiles during replication */
public static final String REPLICATION_BULKLOAD_COPY_MAXTHREADS_KEY =
"hbase.replication.bulkload.copy.maxthreads";
public static final int REPLICATION_BULKLOAD_COPY_MAXTHREADS_DEFAULT = 10;
/** Number of hfiles to copy per thread during replication */
public static final String REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_KEY =
"hbase.replication.bulkload.copy.hfiles.perthread";
public static final int REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT = 10;
private static final Logger LOG = LoggerFactory.getLogger(HFileReplicator.class);
private static final String UNDERSCORE = "_";
private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
private Configuration sourceClusterConf;
private String sourceBaseNamespaceDirPath;
private String sourceHFileArchiveDirPath;
private Map<String, List<Pair<byte[], List<String>>>> bulkLoadHFileMap;
private FileSystem sinkFs;
private FsDelegationToken fsDelegationToken;
private UserProvider userProvider;
private Configuration conf;
private AsyncClusterConnection connection;
private Path hbaseStagingDir;
private ThreadPoolExecutor exec;
private int maxCopyThreads;
private int copiesPerThread;
private List<String> sourceClusterIds;
public HFileReplicator(Configuration sourceClusterConf,
String sourceBaseNamespaceDirPath, String sourceHFileArchiveDirPath,
Map<String, List<Pair<byte[], List<String>>>> tableQueueMap, Configuration conf,
AsyncClusterConnection connection, List<String> sourceClusterIds) throws IOException {
this.sourceClusterConf = sourceClusterConf;
this.sourceBaseNamespaceDirPath = sourceBaseNamespaceDirPath;
this.sourceHFileArchiveDirPath = sourceHFileArchiveDirPath;
this.bulkLoadHFileMap = tableQueueMap;
this.conf = conf;
this.connection = connection;
this.sourceClusterIds = sourceClusterIds;
userProvider = UserProvider.instantiate(conf);
fsDelegationToken = new FsDelegationToken(userProvider, "renewer");
this.hbaseStagingDir =
new Path(CommonFSUtils.getRootDir(conf), HConstants.BULKLOAD_STAGING_DIR_NAME);
this.maxCopyThreads =
this.conf.getInt(REPLICATION_BULKLOAD_COPY_MAXTHREADS_KEY,
REPLICATION_BULKLOAD_COPY_MAXTHREADS_DEFAULT);
this.exec = Threads.getBoundedCachedThreadPool(maxCopyThreads, 60, TimeUnit.SECONDS,
new ThreadFactoryBuilder().setDaemon(true)
.setNameFormat("HFileReplicationCopier-%1$d-" + this.sourceBaseNamespaceDirPath).
build());
this.copiesPerThread =
conf.getInt(REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_KEY,
REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT);
sinkFs = FileSystem.get(conf);
}
@Override
public void close() throws IOException {
if (this.exec != null) {
this.exec.shutdown();
}
}
public Void replicate() throws IOException {
// Copy all the hfiles to the local file system
Map<String, Path> tableStagingDirsMap = copyHFilesToStagingDir();
int maxRetries = conf.getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);
for (Entry<String, Path> tableStagingDir : tableStagingDirsMap.entrySet()) {
String tableNameString = tableStagingDir.getKey();
Path stagingDir = tableStagingDir.getValue();
TableName tableName = TableName.valueOf(tableNameString);
// Prepare collection of queue of hfiles to be loaded(replicated)
Deque<LoadQueueItem> queue = new LinkedList<>();
BulkLoadHFilesTool.prepareHFileQueue(conf, connection, tableName, stagingDir, queue, false,
false);
if (queue.isEmpty()) {
LOG.warn("Did not find any files to replicate in directory {}", stagingDir.toUri());
return null;
}
fsDelegationToken.acquireDelegationToken(sinkFs);
try {
doBulkLoad(conf, tableName, stagingDir, queue, maxRetries);
} finally {
cleanup(stagingDir);
}
}
return null;
}
private void doBulkLoad(Configuration conf, TableName tableName, Path stagingDir,
Deque<LoadQueueItem> queue, int maxRetries) throws IOException {
BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf);
// Set the staging directory which will be used by BulkLoadHFilesTool for loading the data
loader.setBulkToken(stagingDir.toString());
//updating list of cluster ids where this bulkload event has already been processed
loader.setClusterIds(sourceClusterIds);
for (int count = 0; !queue.isEmpty(); count++) {
if (count != 0) {
LOG.warn("Error replicating HFiles; retry={} with {} remaining.", count, queue.size());
}
if (maxRetries != 0 && count >= maxRetries) {
throw new IOException("Retry attempted " + count + " times without completing, bailing.");
}
// Try bulk load
loader.loadHFileQueue(connection, tableName, queue, false);
}
}
private void cleanup(Path stagingDir) {
// Release the file system delegation token
fsDelegationToken.releaseDelegationToken();
// Delete the staging directory
if (stagingDir != null) {
try {
sinkFs.delete(stagingDir, true);
} catch (IOException e) {
LOG.warn("Failed to delete the staging directory " + stagingDir, e);
}
}
// Do not close the file system
}
private Map<String, Path> copyHFilesToStagingDir() throws IOException {
Map<String, Path> mapOfCopiedHFiles = new HashMap<>();
Pair<byte[], List<String>> familyHFilePathsPair;
List<String> hfilePaths;
byte[] family;
Path familyStagingDir;
int familyHFilePathsPairsListSize;
int totalNoOfHFiles;
List<Pair<byte[], List<String>>> familyHFilePathsPairsList;
FileSystem sourceFs = null;
try {
Path sourceClusterPath = new Path(sourceBaseNamespaceDirPath);
/*
* Path#getFileSystem will by default get the FS from cache. If both source and sink cluster
* has same FS name service then it will return peer cluster FS. To avoid this we explicitly
* disable the loading of FS from cache, so that a new FS is created with source cluster
* configuration.
*/
String sourceScheme = sourceClusterPath.toUri().getScheme();
String disableCacheName =
String.format("fs.%s.impl.disable.cache", new Object[] { sourceScheme });
sourceClusterConf.setBoolean(disableCacheName, true);
sourceFs = sourceClusterPath.getFileSystem(sourceClusterConf);
User user = userProvider.getCurrent();
// For each table name in the map
for (Entry<String, List<Pair<byte[], List<String>>>> tableEntry : bulkLoadHFileMap
.entrySet()) {
String tableName = tableEntry.getKey();
// Create staging directory for each table
Path stagingDir =
createStagingDir(hbaseStagingDir, user, TableName.valueOf(tableName));
familyHFilePathsPairsList = tableEntry.getValue();
familyHFilePathsPairsListSize = familyHFilePathsPairsList.size();
// For each list of family hfile paths pair in the table
for (int i = 0; i < familyHFilePathsPairsListSize; i++) {
familyHFilePathsPair = familyHFilePathsPairsList.get(i);
family = familyHFilePathsPair.getFirst();
hfilePaths = familyHFilePathsPair.getSecond();
familyStagingDir = new Path(stagingDir, Bytes.toString(family));
totalNoOfHFiles = hfilePaths.size();
// For each list of hfile paths for the family
List<Future<Void>> futures = new ArrayList<>();
Callable<Void> c;
Future<Void> future;
int currentCopied = 0;
// Copy the hfiles parallely
while (totalNoOfHFiles > currentCopied + this.copiesPerThread) {
c =
new Copier(sourceFs, familyStagingDir, hfilePaths.subList(currentCopied,
currentCopied + this.copiesPerThread));
future = exec.submit(c);
futures.add(future);
currentCopied += this.copiesPerThread;
}
int remaining = totalNoOfHFiles - currentCopied;
if (remaining > 0) {
c =
new Copier(sourceFs, familyStagingDir, hfilePaths.subList(currentCopied,
currentCopied + remaining));
future = exec.submit(c);
futures.add(future);
}
for (Future<Void> f : futures) {
try {
f.get();
} catch (InterruptedException e) {
InterruptedIOException iioe =
new InterruptedIOException(
"Failed to copy HFiles to local file system. This will be retried again "
+ "by the source cluster.");
iioe.initCause(e);
throw iioe;
} catch (ExecutionException e) {
throw new IOException("Failed to copy HFiles to local file system. This will "
+ "be retried again by the source cluster.", e);
}
}
}
// Add the staging directory to this table. Staging directory contains all the hfiles
// belonging to this table
mapOfCopiedHFiles.put(tableName, stagingDir);
}
return mapOfCopiedHFiles;
} finally {
if (sourceFs != null) {
sourceFs.close();
}
if(exec != null) {
exec.shutdown();
}
}
}
private Path createStagingDir(Path baseDir, User user, TableName tableName) throws IOException {
String tblName = tableName.getNameAsString().replace(":", UNDERSCORE);
int RANDOM_WIDTH = 320;
int RANDOM_RADIX = 32;
String doubleUnderScore = UNDERSCORE + UNDERSCORE;
String randomDir = user.getShortName() + doubleUnderScore + tblName + doubleUnderScore
+ (new BigInteger(RANDOM_WIDTH, new SecureRandom()).toString(RANDOM_RADIX));
return createStagingDir(baseDir, user, randomDir);
}
private Path createStagingDir(Path baseDir, User user, String randomDir) throws IOException {
Path p = new Path(baseDir, randomDir);
sinkFs.mkdirs(p, PERM_ALL_ACCESS);
sinkFs.setPermission(p, PERM_ALL_ACCESS);
return p;
}
/**
* This class will copy the given hfiles from the given source file system to the given local file
* system staging directory.
*/
private class Copier implements Callable<Void> {
private FileSystem sourceFs;
private Path stagingDir;
private List<String> hfiles;
public Copier(FileSystem sourceFs, final Path stagingDir, final List<String> hfiles)
throws IOException {
this.sourceFs = sourceFs;
this.stagingDir = stagingDir;
this.hfiles = hfiles;
}
@Override
public Void call() throws IOException {
Path sourceHFilePath;
Path localHFilePath;
int totalHFiles = hfiles.size();
for (int i = 0; i < totalHFiles; i++) {
sourceHFilePath = new Path(sourceBaseNamespaceDirPath, hfiles.get(i));
localHFilePath = new Path(stagingDir, sourceHFilePath.getName());
try {
FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, false, conf);
// If any other exception other than FNFE then we will fail the replication requests and
// source will retry to replicate these data.
} catch (FileNotFoundException e) {
LOG.info("Failed to copy hfile from " + sourceHFilePath + " to " + localHFilePath
+ ". Trying to copy from hfile archive directory.",
e);
sourceHFilePath = new Path(sourceHFileArchiveDirPath, hfiles.get(i));
try {
FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, false, conf);
} catch (FileNotFoundException e1) {
// This will mean that the hfile does not exists any where in source cluster FS. So we
// cannot do anything here just log and continue.
LOG.debug("Failed to copy hfile from " + sourceHFilePath + " to " + localHFilePath
+ ". Hence ignoring this hfile from replication..",
e1);
continue;
}
}
sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS);
}
return null;
}
}
}
| |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.google.common.collect.ImmutableMap;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.elastic.ClusterProfile;
import com.thoughtworks.go.config.elastic.ElasticProfile;
import com.thoughtworks.go.config.exceptions.StageNotFoundException;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.util.Clock;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.TimeProvider;
import com.thoughtworks.go.utils.Timeout;
import org.joda.time.DateTime;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.*;
import static com.thoughtworks.go.domain.JobResult.Passed;
import static com.thoughtworks.go.domain.JobResult.Unknown;
import static com.thoughtworks.go.domain.JobState.Completed;
import static com.thoughtworks.go.domain.JobState.Scheduled;
import static com.thoughtworks.go.helper.ModificationsMother.modifyOneFile;
import static com.thoughtworks.go.util.DataStructureUtils.a;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.fail;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
class InstanceFactoryTest {
private InstanceFactory instanceFactory;
private Clock clock;
@BeforeEach
void setUp() throws Exception {
instanceFactory = new InstanceFactory();
this.clock = mock(Clock.class);
}
@Test
void shouldSetTheConfigVersionOnSchedulingAStage() {
PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("foo-pipeline", "foo-stage", "foo-job");
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser");
String md5 = "foo-md5";
Stage actualStage = instanceFactory.createStageInstance(pipelineConfig, new CaseInsensitiveString("foo-stage"), schedulingContext, md5, clock);
assertThat(actualStage.getConfigVersion(), is(md5));
}
@Test
void shouldThrowStageNotFoundExceptionWhenStageDoesNotExist() {
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("cruise"), new MaterialConfigs(), new StageConfig(new CaseInsensitiveString("first"), new JobConfigs()));
try {
instanceFactory.createStageInstance(pipelineConfig, new CaseInsensitiveString("doesNotExist"), new DefaultSchedulingContext(), "md5", clock);
fail("Found the stage doesNotExist but, well, it doesn't");
} catch (StageNotFoundException expected) {
assertThat(expected.getMessage(), is("Stage 'doesNotExist' not found in pipeline 'cruise'"));
}
}
@Test
void shouldCreateAStageInstanceThroughInstanceFactory() {
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("cruise"), new MaterialConfigs(),
new StageConfig(new CaseInsensitiveString("first"), new JobConfigs(new JobConfig("job1"), new JobConfig("job2"))));
Stage actualStage = instanceFactory.createStageInstance(pipelineConfig, new CaseInsensitiveString("first"), new DefaultSchedulingContext(), "md5", clock);
JobInstances jobInstances = new JobInstances();
jobInstances.add(new JobInstance("job1", clock));
jobInstances.add(new JobInstance("job2", clock));
Stage expectedStage = new Stage("first", jobInstances, "Unknown", null, Approval.SUCCESS, clock);
assertThat(actualStage, is(expectedStage));
}
@Test
void shouldCreatePipelineInstanceWithEnvironmentVariablesOverriddenAccordingToScope() {
StageConfig stageConfig = StageConfigMother.custom("stage", "foo", "bar");
JobConfig fooConfig = stageConfig.jobConfigByConfigName(new CaseInsensitiveString("foo"));
fooConfig.addVariable("foo", "foo");
JobConfig barConfig = stageConfig.jobConfigByConfigName(new CaseInsensitiveString("bar"));
barConfig.addVariable("foo", "bar");
MaterialConfigs materialConfigs = MaterialConfigsMother.defaultMaterialConfigs();
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("pipeline"), materialConfigs, stageConfig);
DefaultSchedulingContext context = new DefaultSchedulingContext("anonymous");
Pipeline instance = instanceFactory.createPipelineInstance(pipelineConfig, ModificationsMother.forceBuild(pipelineConfig), context, "some-md5", new TimeProvider());
assertThat(instance.findStage("stage").findJob("foo").getPlan().getVariables(), is(new EnvironmentVariables(asList(new EnvironmentVariable("foo", "foo")))));
assertThat(instance.findStage("stage").findJob("bar").getPlan().getVariables(), is(new EnvironmentVariables(asList(new EnvironmentVariable("foo", "bar")))));
}
@Test
void shouldOverridePipelineEnvironmentVariablesFromBuildCauseForLabel() {
StageConfig stageConfig = StageConfigMother.custom("stage", "foo", "bar");
MaterialConfigs materialConfigs = MaterialConfigsMother.defaultMaterialConfigs();
DefaultSchedulingContext context = new DefaultSchedulingContext("anonymous");
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("pipeline"), materialConfigs, stageConfig);
pipelineConfig.addEnvironmentVariable("VAR", "value");
pipelineConfig.setLabelTemplate("${ENV:VAR}");
BuildCause buildCause = ModificationsMother.forceBuild(pipelineConfig);
EnvironmentVariables overriddenVars = buildCause.getVariables();
overriddenVars.add("VAR", "overriddenValue");
buildCause.setVariables(overriddenVars);
Pipeline instance = instanceFactory.createPipelineInstance(pipelineConfig, buildCause, context, "some-md5", new TimeProvider());
instance.updateCounter(1);
assertThat(instance.getLabel(), is("overriddenValue"));
}
@Test
void shouldSchedulePipelineWithFirstStage() {
StageConfig stageOneConfig = StageConfigMother.stageConfig("dev", BuildPlanMother.withBuildPlans("functional", "unit"));
StageConfig stageTwoConfig = StageConfigMother.stageConfig("qa", BuildPlanMother.withBuildPlans("suiteOne", "suiteTwo"));
MaterialConfigs materialConfigs = MaterialConfigsMother.defaultMaterialConfigs();
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("mingle"), materialConfigs, stageOneConfig, stageTwoConfig);
BuildCause buildCause = BuildCause.createManualForced(modifyOneFile(pipelineConfig), Username.ANONYMOUS);
Pipeline pipeline = instanceFactory.createPipelineInstance(pipelineConfig, buildCause, new DefaultSchedulingContext("test"), "some-md5", new TimeProvider());
assertThat(pipeline.getName(), is("mingle"));
assertThat(pipeline.getStages().size(), is(1));
assertThat(pipeline.getStages().get(0).getName(), is("dev"));
assertThat(pipeline.getStages().get(0).getJobInstances().get(0).getName(), is("functional"));
}
@Test
void shouldSetAutoApprovalOnStageInstance() {
StageConfig stageConfig = StageConfigMother.custom("test", Approval.automaticApproval());
Stage instance = instanceFactory.createStageInstance(stageConfig, new DefaultSchedulingContext("anyone"), "md5", new TimeProvider());
assertThat(instance.getApprovalType(), is(GoConstants.APPROVAL_SUCCESS));
}
@Test
void shouldSetManualApprovalOnStageInstance() {
StageConfig stageConfig = StageConfigMother.custom("test", Approval.manualApproval());
Stage instance = instanceFactory.createStageInstance(stageConfig, new DefaultSchedulingContext("anyone"), "md5", new TimeProvider());
assertThat(instance.getApprovalType(), is(GoConstants.APPROVAL_MANUAL));
}
@Test
void shouldSetFetchMaterialsFlagOnStageInstance() {
StageConfig stageConfig = StageConfigMother.custom("test", Approval.automaticApproval());
stageConfig.setFetchMaterials(false);
Stage instance = instanceFactory.createStageInstance(stageConfig, new DefaultSchedulingContext("anyone"), "md5", new TimeProvider());
assertThat(instance.shouldFetchMaterials(), is(false));
}
@Test
void shouldClear_DatabaseIds_State_and_Result_ForJobObjectHierarchy() {
Date old = new DateTime().minusDays(2).toDate();
JobInstance rails = jobInstance(old, "rails", 7, 10);
JobInstance java = jobInstance(old, "java", 12, 22);
Stage stage = stage(9, rails, java);
assertThat(stage.hasRerunJobs(), is(false));
Stage newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "rails", "java"),
new TimeProvider(), "md5");
assertThat(stage.hasRerunJobs(), is(false));
assertThat(newStage.getId(), is(-1l));
assertThat(newStage.getJobInstances().size(), is(2));
assertThat(newStage.isLatestRun(), is(true));
JobInstance newRails = newStage.getJobInstances().getByName("rails");
assertNewJob(old, newRails);
JobInstance newJava = newStage.getJobInstances().getByName("java");
assertCopiedJob(newJava, 12l);
}
@Test
void should_MaintainRerunOfReferences_InCaseOfMultipleCopyForRerunOperations() {
Date old = new DateTime().minusDays(2).toDate();
JobInstance rails = jobInstance(old, "rails", 7, 10);
JobInstance java = jobInstance(old, "java", 12, 22);
Stage stage = stage(9, rails, java);
stage.setCounter(2);
Stage newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "rails", "java"),
new TimeProvider(), "md5");
newStage.setCounter(3);
assertThat(newStage.getId(), is(-1l));
assertThat(newStage.getJobInstances().size(), is(2));
assertThat(newStage.isLatestRun(), is(true));
assertThat(newStage.getRerunOfCounter(), is(2));
JobInstance newJava = newStage.getJobInstances().getByName("java");
assertCopiedJob(newJava, 12l);
//set id, to assert if original ends up pointing to copied job's id
newJava.setId(18l);
newStage = instanceFactory.createStageForRerunOfJobs(newStage, a("rails"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "rails", "java"),
new TimeProvider(), "md5");
newStage.setCounter(4);
assertThat(newStage.getId(), is(-1l));
assertThat(newStage.getJobInstances().size(), is(2));
assertThat(newStage.isLatestRun(), is(true));
assertThat(newStage.getRerunOfCounter(), is(2));
newJava = newStage.getJobInstances().getByName("java");
assertCopiedJob(newJava, 12l);
}
@Test
void shouldCloneStageForGivenJobsWithLatestMd5() {
TimeProvider timeProvider = new TimeProvider() {
@Override
public Date currentTime() {
return new Date();
}
@Override
public DateTime currentDateTime() {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public DateTime timeoutTime(Timeout timeout) {
throw new UnsupportedOperationException("Not implemented");
}
};
JobInstance firstJob = new JobInstance("first-job", timeProvider);
JobInstance secondJob = new JobInstance("second-job", timeProvider);
JobInstances jobInstances = new JobInstances(firstJob, secondJob);
Stage stage = StageMother.custom("test", jobInstances);
Stage clonedStage = instanceFactory.createStageForRerunOfJobs(stage, asList("first-job"), new DefaultSchedulingContext("loser", new Agents()),
StageConfigMother.custom("test", "first-job", "second-job"),
new TimeProvider(),
"latest");
assertThat(clonedStage.getConfigVersion(), is("latest"));
}
@Test
void shouldAddEnvironmentVariablesPresentInTheScheduleContextToJobPlan() {
JobConfig jobConfig = new JobConfig("foo");
EnvironmentVariablesConfig variablesConfig = new EnvironmentVariablesConfig();
variablesConfig.add("blahVar", "blahVal");
SchedulingContext context = new DefaultSchedulingContext("Loser");
context = context.overrideEnvironmentVariables(variablesConfig);
JobPlan plan = instanceFactory.createJobPlan(jobConfig, context);
assertThat(plan.getVariables(), hasItem(new EnvironmentVariable("blahVar", "blahVal")));
}
@Test
void shouldOverrideEnvironmentVariablesPresentInTheScheduleContextToJobPlan() {
EnvironmentVariablesConfig variablesConfig = new EnvironmentVariablesConfig();
variablesConfig.add("blahVar", "blahVal");
variablesConfig.add("differentVar", "differentVal");
JobConfig jobConfig = new JobConfig("foo");
jobConfig.setVariables(variablesConfig);
EnvironmentVariablesConfig overridenConfig = new EnvironmentVariablesConfig();
overridenConfig.add("blahVar", "originalVal");
overridenConfig.add("secondVar", "secondVal");
SchedulingContext context = new DefaultSchedulingContext();
context = context.overrideEnvironmentVariables(overridenConfig);
JobPlan plan = instanceFactory.createJobPlan(jobConfig, context);
assertThat(plan.getVariables().size(), is(3));
assertThat(plan.getVariables(), hasItem(new EnvironmentVariable("blahVar", "blahVal")));
assertThat(plan.getVariables(), hasItem(new EnvironmentVariable("secondVar", "secondVal")));
assertThat(plan.getVariables(), hasItem(new EnvironmentVariable("differentVar", "differentVal")));
}
@Test
void shouldAddEnvironmentVariablesToJobPlan() {
EnvironmentVariablesConfig variablesConfig = new EnvironmentVariablesConfig();
variablesConfig.add("blahVar", "blahVal");
JobConfig jobConfig = new JobConfig("foo");
jobConfig.setVariables(variablesConfig);
SchedulingContext context = new DefaultSchedulingContext();
JobPlan plan = instanceFactory.createJobPlan(jobConfig, context);
assertThat(plan.getVariables(), hasItem(new EnvironmentVariable("blahVar", "blahVal")));
}
@Test
void shouldCreateJobPlan() {
ResourceConfigs resourceConfigs = new ResourceConfigs();
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("test"), resourceConfigs, artifactTypeConfigs);
JobPlan plan = instanceFactory.createJobPlan(jobConfig, new DefaultSchedulingContext());
assertThat(plan, is(new DefaultJobPlan(new Resources(resourceConfigs), ArtifactPlan.toArtifactPlans(artifactTypeConfigs), -1, new JobIdentifier(), null, new EnvironmentVariables(), new EnvironmentVariables(), null, null)));
}
@Test
void shouldAddElasticProfileOnJobPlan() {
ElasticProfile elasticProfile = new ElasticProfile("id", "prod-cluster");
DefaultSchedulingContext context = new DefaultSchedulingContext("foo", new Agents(), ImmutableMap.of("id", elasticProfile));
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("test"), null, artifactTypeConfigs);
jobConfig.setElasticProfileId("id");
JobPlan plan = instanceFactory.createJobPlan(jobConfig, context);
assertThat(plan.getElasticProfile(), is(elasticProfile));
assertNull(plan.getClusterProfile());
}
@Test
void shouldAddElasticProfileAndClusterProfileOnJobPlan() {
ElasticProfile elasticProfile = new ElasticProfile("id", "clusterId");
ClusterProfile clusterProfile = new ClusterProfile("clusterId", "pluginId");
DefaultSchedulingContext context = new DefaultSchedulingContext("foo", new Agents(), ImmutableMap.of("id", elasticProfile), ImmutableMap.of("clusterId", clusterProfile));
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("test"), null, artifactTypeConfigs);
jobConfig.setElasticProfileId("id");
JobPlan plan = instanceFactory.createJobPlan(jobConfig, context);
assertThat(plan.getElasticProfile(), is(elasticProfile));
assertThat(plan.getClusterProfile(), is(clusterProfile));
}
@Test
void shouldReturnBuildInstance() {
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("test"), null, artifactTypeConfigs);
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(jobConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("stage_foo"), jobConfig, new DefaultSchedulingContext(), new TimeProvider(), jobNameGenerator);
JobInstance jobInstance = jobs.first();
assertThat(jobConfig.name(), is(new CaseInsensitiveString(jobInstance.getName())));
assertThat(jobInstance.getState(), is(JobState.Scheduled));
assertThat(jobInstance.getScheduledDate(), is(notNullValue()));
}
@Test
void shouldUseRightNameGenerator() {
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java", "html");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
JobConfig javaConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("java"));
javaConfig.setRunInstanceCount(2);
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
Stage stageInstance = instanceFactory.createStageInstance(stageConfig, schedulingContext, "md5", clock);
JobInstances jobInstances = stageInstance.getJobInstances();
assertThat(jobInstances.size(), is(5));
assertRunOnAllAgentsJobInstance(jobInstances.get(0), "rails-runOnAll-1");
assertRunOnAllAgentsJobInstance(jobInstances.get(1), "rails-runOnAll-2");
assertRunMultipleJobInstance(jobInstances.get(2), "java-runInstance-1");
assertRunMultipleJobInstance(jobInstances.get(3), "java-runInstance-2");
assertSimpleJobInstance(jobInstances.get(4), "html");
}
/*
SingleJobInstance
*/
@Test
void shouldCreateASingleJobIfRunOnAllAgentsIsFalse() {
JobConfig jobConfig = new JobConfig("foo");
SchedulingContext context = mock(SchedulingContext.class);
when(context.getEnvironmentVariablesConfig()).thenReturn(new EnvironmentVariablesConfig());
when(context.overrideEnvironmentVariables(any(EnvironmentVariablesConfig.class))).thenReturn(context);
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(jobConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("someStage"), jobConfig, new DefaultSchedulingContext(), new TimeProvider(), jobNameGenerator);
assertThat(jobs.toArray(), hasItemInArray(hasProperty("name", is("foo"))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("agentUuid", nullValue())));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("runOnAllAgents", is(false))));
assertThat(jobs.size(), is(1));
}
@Test
void shouldNotRerun_WhenJobConfigDoesNotExistAnymore_ForSingleInstanceJob() {
Date old = new DateTime().minusDays(2).toDate();
JobInstance rails = jobInstance(old, "rails", 7, 10);
JobInstance java = jobInstance(old, "java", 12, 22);
Stage stage = stage(9, rails, java);
Stage newStage = null;
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "java"), new TimeProvider(),
"md5");
fail("should not schedule when job config does not exist anymore");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(newStage, is(nullValue()));
}
@Test
void shouldClearAgentAssignment_ForSingleInstanceJobType() {
Date old = new DateTime().minusDays(2).toDate();
JobInstance rails = jobInstance(old, "rails", 7, 10);
JobInstance java = jobInstance(old, "java", 12, 22);
Stage stage = stage(9, rails, java);
Stage newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "rails", "java"),
new TimeProvider(), "md5");
assertThat(newStage.getJobInstances().getByName("rails").getAgentUuid(), is(nullValue()));
assertThat(newStage.getJobInstances().getByName("java").getAgentUuid(), is(not(nullValue())));
}
@Test
void shouldNotRerun_WhenJobConfigIsChangedToRunMultipleInstance_ForSingleJobInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents());
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), null);
Stage stage = createStageInstance(old, jobs);
Stage newStage = null;
railsConfig.setRunInstanceCount(10);
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), schedulingContext, stageConfig, new TimeProvider(), "md5");
fail("should not schedule since job config changed to run multiple instance");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(exception.getInformation(), is("Run configuration for job has been changed to 'run multiple instance'."));
assertThat(newStage, is(nullValue()));
}
/*
RunOnAllAgents tests
*/
@Test
void shouldCreateAJobForEachAgentIfRunOnAllAgentsIsTrue() {
Agents agents = new Agents();
agents.add(new Agent("uuid1"));
agents.add(new Agent("uuid2"));
JobConfig jobConfig = new JobConfig("foo");
jobConfig.setRunOnAllAgents(true);
SchedulingContext context = mock(SchedulingContext.class);
when(context.getApprovedBy()).thenReturn("chris");
when(context.findAgentsMatching(new ResourceConfigs())).thenReturn(agents);
when(context.getEnvironmentVariablesConfig()).thenReturn(new EnvironmentVariablesConfig());
when(context.overrideEnvironmentVariables(any(EnvironmentVariablesConfig.class))).thenReturn(context);
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(jobConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("stageName"), jobConfig, context, new TimeProvider(), jobNameGenerator);
assertThat(jobs.toArray(), hasItemInArray(hasProperty("name", is("foo-runOnAll-1"))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid1"))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("runOnAllAgents", is(true))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("name", is("foo-runOnAll-1"))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid2"))));
assertThat(jobs.toArray(), hasItemInArray(hasProperty("runOnAllAgents", is(true))));
assertThat(jobs.size(), is(2));
}
@Test
void shouldFailWhenDoesNotFindAnyMatchingAgents() {
JobConfig jobConfig = new JobConfig("foo");
jobConfig.setRunOnAllAgents(true);
SchedulingContext context = mock(SchedulingContext.class);
when(context.getApprovedBy()).thenReturn("chris");
when(context.findAgentsMatching(new ResourceConfigs())).thenReturn(new ArrayList<>());
when(context.getEnvironmentVariablesConfig()).thenReturn(new EnvironmentVariablesConfig());
when(context.overrideEnvironmentVariables(any(EnvironmentVariablesConfig.class))).thenReturn(context);
try {
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(jobConfig.name()));
instanceFactory.createJobInstance(new CaseInsensitiveString("myStage"), jobConfig, new DefaultSchedulingContext(), new TimeProvider(), jobNameGenerator);
fail("should have failed as no agents matched");
} catch (Exception e) {
assertThat(e.getMessage(), is("Could not find matching agents to run job [foo] of stage [myStage]."));
}
}
@Test
void shouldFailWhenNoAgentsmatchAJob() {
DefaultSchedulingContext context = new DefaultSchedulingContext("raghu/vinay", new Agents());
JobConfig fooJob = new JobConfig(new CaseInsensitiveString("foo"), new ResourceConfigs(), new ArtifactTypeConfigs());
fooJob.setRunOnAllAgents(true);
StageConfig stageConfig = new StageConfig(
new CaseInsensitiveString("blah-stage"), new JobConfigs(
fooJob,
new JobConfig(new CaseInsensitiveString("bar"), new ResourceConfigs(), new ArtifactTypeConfigs())
)
);
try {
instanceFactory.createStageInstance(stageConfig, context, "md5", new TimeProvider());
fail("expected exception but not thrown");
} catch (Exception e) {
assertThat(e.getMessage(), is("Could not find matching agents to run job [foo] of stage [blah-stage]."));
}
}
@Test
void shouldBomb_ForRerun_OfASingleInstanceJobType_WhichWasEarlierRunOnAll_WithTwoRunOnAllInstancesSelectedForRerun() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
railsConfig.setRunOnAllAgents(false);
try {
instanceFactory.createStageForRerunOfJobs(stage, a("rails-runOnAll-1", "rails-runOnAll-2"), schedulingContext, stageConfig, new TimeProvider(), "md5");
fail("should have failed when multiple run on all agents jobs are selected when job-config does not have run on all flag anymore");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("Cannot schedule multiple instances of job named 'rails'."));
}
}
@Test
void should_NOT_ClearAgentAssignment_ForRerun_OfASingleInstanceJobType_WhichWasEarlierRunOnAll() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
railsConfig.setRunOnAllAgents(false);
Stage newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runOnAll-1"), schedulingContext, stageConfig, new TimeProvider(), "md5");
assertThat(newStage.getJobInstances().size(), is(3));
JobInstance newRailsJob = newStage.getJobInstances().getByName("rails");
assertNewJob(old, newRailsJob);
assertThat(newRailsJob.getAgentUuid(), is("abcd1234"));
JobInstance copiedRailsJob = newStage.getJobInstances().getByName("rails-runOnAll-2");
assertCopiedJob(copiedRailsJob, 102l);
assertThat(copiedRailsJob.getAgentUuid(), is("1234abcd"));
JobInstance copiedJavaJob = newStage.getJobInstances().getByName("java");
assertCopiedJob(copiedJavaJob, 12l);
assertThat(copiedJavaJob.getAgentUuid(), is(not(nullValue())));
}
@Test
void shouldClearAgentAssignment_ForRunOnAllAgentsJobType() {
Date old = new DateTime().minusDays(2).toDate();
JobInstance rails = jobInstance(old, "rails", 7, 10);
JobInstance java = jobInstance(old, "java", 12, 22);
Stage stage = stage(9, rails, java);
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext context = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
Stage newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails"), context, stageConfig, new TimeProvider(), "md5");
assertThat(newStage.getJobInstances().size(), is(3));
JobInstance newRailsFirstJob = newStage.getJobInstances().getByName("rails-runOnAll-1");
assertNewJob(old, newRailsFirstJob);
assertThat(newRailsFirstJob.getAgentUuid(), is("abcd1234"));
JobInstance newRailsSecondJob = newStage.getJobInstances().getByName("rails-runOnAll-2");
assertNewJob(old, newRailsSecondJob);
assertThat(newRailsSecondJob.getAgentUuid(), is("1234abcd"));
JobInstance copiedJavaJob = newStage.getJobInstances().getByName("java");
assertCopiedJob(copiedJavaJob, 12l);
assertThat(copiedJavaJob.getAgentUuid(), is(not(nullValue())));
}
@Test
void shouldNotRerun_WhenJobConfigDoesNotExistAnymore_ForRunOnAllAgentsJobInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
Stage newStage = null;
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runOnAll-1"), new DefaultSchedulingContext("loser", new Agents()), StageConfigMother.custom("dev", "java"),
new TimeProvider(), "md5");
fail("should not schedule when job config does not exist anymore");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(newStage, is(nullValue()));
}
@Test
void shouldNotRerun_WhenJobConfigIsChangedToRunMultipleInstance_ForRunOnAllAgentsJobInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunOnAllAgents(true);
railsConfig.addResourceConfig("foobar");
Agent agent1 = new Agent("abcd1234", "host", "127.0.0.2", singletonList("foobar"));
Agent agent2 = new Agent("1234abcd", "ghost", "192.168.1.2", asList("baz", "foobar"));
Agent agent3 = new Agent("7890abdc", "lost", "10.4.3.55", singletonList("crapyagent"));
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents(agent1, agent2, agent3));
RunOnAllAgents.CounterBasedJobNameGenerator jobNameGenerator = new RunOnAllAgents.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
Stage newStage = null;
railsConfig.setRunOnAllAgents(false);
railsConfig.setRunInstanceCount(10);
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runOnAll-1"), schedulingContext, stageConfig, new TimeProvider(), "md5");
fail("should not schedule since job config changed to run multiple instance");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(exception.getInformation(), is("Run configuration for job has been changed to 'run multiple instance'."));
assertThat(newStage, is(nullValue()));
}
/*
RunMultipleInstance tests
*/
@Test
void shouldCreateJobInstancesCorrectly_RunMultipleInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunInstanceCount(3);
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents());
RunMultipleInstance.CounterBasedJobNameGenerator jobNameGenerator = new RunMultipleInstance.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
assertThat(jobs.get(0).getName(), is("rails-runInstance-1"));
assertEnvironmentVariable(jobs.get(0), 0, "GO_JOB_RUN_INDEX", "1");
assertEnvironmentVariable(jobs.get(0), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobs.get(1).getName(), is("rails-runInstance-2"));
assertEnvironmentVariable(jobs.get(1), 0, "GO_JOB_RUN_INDEX", "2");
assertEnvironmentVariable(jobs.get(1), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobs.get(2).getName(), is("rails-runInstance-3"));
assertEnvironmentVariable(jobs.get(2), 0, "GO_JOB_RUN_INDEX", "3");
assertEnvironmentVariable(jobs.get(2), 1, "GO_JOB_RUN_COUNT", "3");
Stage stage = createStageInstance(old, jobs);
JobInstances jobInstances = stage.getJobInstances();
assertThat(jobInstances.size(), is(4));
assertRunMultipleJobInstance(jobInstances.get(0), "rails-runInstance-1");
assertRunMultipleJobInstance(jobInstances.get(1), "rails-runInstance-2");
assertRunMultipleJobInstance(jobInstances.get(2), "rails-runInstance-3");
assertThat(jobInstances.get(3).getName(), is("java"));
}
@Test
void shouldCreateJobInstancesCorrectly_RunMultipleInstance_Rerun() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunInstanceCount(3);
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents());
RunMultipleInstance.CounterBasedJobNameGenerator jobNameGenerator = new RunMultipleInstance.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
assertThat(jobs.get(0).getName(), is("rails-runInstance-1"));
assertEnvironmentVariable(jobs.get(0), 0, "GO_JOB_RUN_INDEX", "1");
assertEnvironmentVariable(jobs.get(0), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobs.get(1).getName(), is("rails-runInstance-2"));
assertEnvironmentVariable(jobs.get(1), 0, "GO_JOB_RUN_INDEX", "2");
assertEnvironmentVariable(jobs.get(1), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobs.get(2).getName(), is("rails-runInstance-3"));
assertEnvironmentVariable(jobs.get(2), 0, "GO_JOB_RUN_INDEX", "3");
assertEnvironmentVariable(jobs.get(2), 1, "GO_JOB_RUN_COUNT", "3");
Stage stage = createStageInstance(old, jobs);
Stage stageForRerun = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runInstance-1", "rails-runInstance-2"), schedulingContext, stageConfig, clock, "md5");
JobInstances jobsForRerun = stageForRerun.getJobInstances();
assertThat(jobsForRerun.get(0).getName(), is("rails-runInstance-3"));
assertEnvironmentVariable(jobsForRerun.get(0), 0, "GO_JOB_RUN_INDEX", "3");
assertEnvironmentVariable(jobsForRerun.get(0), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobsForRerun.get(2).getName(), is("rails-runInstance-1"));
assertEnvironmentVariable(jobsForRerun.get(2), 0, "GO_JOB_RUN_INDEX", "1");
assertEnvironmentVariable(jobsForRerun.get(2), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobsForRerun.get(3).getName(), is("rails-runInstance-2"));
assertEnvironmentVariable(jobsForRerun.get(3), 0, "GO_JOB_RUN_INDEX", "2");
assertEnvironmentVariable(jobsForRerun.get(3), 1, "GO_JOB_RUN_COUNT", "3");
assertThat(jobsForRerun.size(), is(4));
assertRunMultipleJobInstance(jobsForRerun.get(0), "rails-runInstance-3");
assertThat(jobsForRerun.get(1).getName(), is("java"));
assertReRunMultipleJobInstance(jobsForRerun.get(2), "rails-runInstance-1");
assertReRunMultipleJobInstance(jobsForRerun.get(3), "rails-runInstance-2");
}
@Test
void shouldNotRerun_WhenJobConfigDoesNotExistAnymore_ForRunMultipleInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunInstanceCount(3);
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents());
RunMultipleInstance.CounterBasedJobNameGenerator jobNameGenerator = new RunMultipleInstance.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
Stage newStage = null;
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runInstance-1"), schedulingContext, StageConfigMother.custom("dev", "java"),
new TimeProvider(), "md5");
fail("should not schedule when job config does not exist anymore");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(exception.getInformation(), is("Configuration for job doesn't exist."));
assertThat(newStage, is(nullValue()));
}
@Test
void shouldNotRerun_WhenJobRunConfigIsChanged_ForRunMultipleInstance() {
Date old = new DateTime().minusDays(2).toDate();
StageConfig stageConfig = StageConfigMother.custom("dev", "rails", "java");
JobConfig railsConfig = stageConfig.getJobs().getJob(new CaseInsensitiveString("rails"));
railsConfig.setRunInstanceCount(3);
DefaultSchedulingContext schedulingContext = new DefaultSchedulingContext("loser", new Agents());
RunMultipleInstance.CounterBasedJobNameGenerator jobNameGenerator = new RunMultipleInstance.CounterBasedJobNameGenerator(CaseInsensitiveString.str(railsConfig.name()));
JobInstances jobs = instanceFactory.createJobInstance(new CaseInsensitiveString("dev"), railsConfig, schedulingContext, new TimeProvider(), jobNameGenerator);
Stage stage = createStageInstance(old, jobs);
Stage newStage = null;
railsConfig.setRunOnAllAgents(true);
railsConfig.setRunInstanceCount(0);
CannotRerunJobException exception = null;
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runInstance-1"), schedulingContext, stageConfig, new TimeProvider(), "md5");
fail("should not schedule since job config changed to run multiple instance");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(exception.getInformation(), is("Run configuration for job has been changed to 'run on all agents'."));
assertThat(newStage, is(nullValue()));
railsConfig.setRunOnAllAgents(false);
try {
newStage = instanceFactory.createStageForRerunOfJobs(stage, a("rails-runInstance-1"), schedulingContext, stageConfig, new TimeProvider(), "md5");
fail("should not schedule since job config changed to run multiple instance");
} catch (CannotRerunJobException e) {
exception = e;
}
assertThat(exception.getJobName(), is("rails"));
assertThat(exception.getInformation(), is("Run configuration for job has been changed to 'simple'."));
assertThat(newStage, is(nullValue()));
}
private Stage stage(long id, JobInstance... jobs) {
Stage stage = new Stage("dev", new JobInstances(jobs), "anonymous", null, "manual", new TimeProvider());
stage.setId(id);
return stage;
}
private Stage createStageInstance(Date old, JobInstances jobs) {
int jobId = 100;
for (JobInstance job : jobs) {
passJob(new Date(), ++jobId, jobId * 10, job);
}
JobInstance java = jobInstance(old, "java", 12, 22);
jobs.add(java);
return stage(9, jobs.toArray(new JobInstance[0]));
}
private JobInstance jobInstance(final Date date, final String jobName, final int id, int transitionIdStart) {
JobInstance jobInstance = new JobInstance(jobName, new TimeProvider() {
@Override
public Date currentTime() {
return date;
}
});
jobInstance.setAgentUuid(UUID.randomUUID().toString());
return passJob(date, id, transitionIdStart, jobInstance);
}
private JobInstance passJob(Date date, int id, int transitionIdStart, JobInstance jobInstance) {
jobInstance.setId(id);
jobInstance.changeState(JobState.Completed, date);
for (JobStateTransition jobStateTransition : jobInstance.getTransitions()) {
jobStateTransition.setId(++transitionIdStart);
}
jobInstance.setResult(JobResult.Passed);
return jobInstance;
}
private void assertCopiedJob(JobInstance newJava, final long originalId) {
assertThat(newJava.getId(), is(-1l));
assertThat(newJava.getTransitions().isEmpty(), is(false));
assertThat(newJava.getResult(), is(Passed));
assertThat(newJava.getState(), is(Completed));
assertThat(newJava.getTransitions().byState(Scheduled).getId(), is(-1l));
assertThat(newJava.getTransitions().byState(Completed).getId(), is(-1l));
assertThat(newJava.getOriginalJobId(), is(originalId));
assertThat(newJava.isRerun(), is(false));
assertThat(newJava.isCopy(), is(true));
}
private void assertNewJob(Date old, JobInstance newRails) {
JobStateTransition newSchedulingTransition = assertNewJob(newRails);
assertThat(newSchedulingTransition.getStateChangeTime().after(old), is(true));
}
private JobStateTransition assertNewJob(JobInstance newRails) {
assertThat(newRails.getId(), is(-1l));
assertThat(newRails.getTransitions().size(), is(1));
JobStateTransition newSchedulingTransition = newRails.getTransitions().byState(JobState.Scheduled);
assertThat(newSchedulingTransition.getId(), is(-1l));
assertThat(newRails.getResult(), is(Unknown));
assertThat(newRails.getState(), is(Scheduled));
assertThat(newRails.isRerun(), is(true));
return newSchedulingTransition;
}
private void assertSimpleJobInstance(JobInstance jobInstance, String jobName) {
assertThat(jobInstance.getName(), is(jobName));
assertThat(jobInstance.isRunOnAllAgents(), is(false));
assertThat(jobInstance.isRunMultipleInstance(), is(false));
assertThat(jobInstance.isRerun(), is(false));
}
private void assertRunOnAllAgentsJobInstance(JobInstance jobInstance, String jobName) {
assertThat(jobInstance.getName(), is(jobName));
assertThat(jobInstance.isRunOnAllAgents(), is(true));
assertThat(jobInstance.isRunMultipleInstance(), is(false));
assertThat(jobInstance.isRerun(), is(false));
}
private void assertRunMultipleJobInstance(JobInstance jobInstance, String jobName) {
assertThat(jobInstance.getName(), is(jobName));
assertThat(jobInstance.isRunMultipleInstance(), is(true));
assertThat(jobInstance.isRunOnAllAgents(), is(false));
assertThat(jobInstance.isRerun(), is(false));
}
private void assertReRunMultipleJobInstance(JobInstance jobInstance, String jobName) {
assertThat(jobInstance.getName(), is(jobName));
assertThat(jobInstance.isRunMultipleInstance(), is(true));
assertThat(jobInstance.isRunOnAllAgents(), is(false));
assertThat(jobInstance.isRerun(), is(true));
}
private void assertEnvironmentVariable(JobInstance jobInstance, int index, String name, String value) {
assertThat(jobInstance.getPlan().getVariables().get(index).getName(), is(name));
assertThat(jobInstance.getPlan().getVariables().get(index).getValue(), is(value));
}
}
| |
/* Copyright (c) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kivy.billing;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentSender.SendIntentException;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import com.android.vending.billing.IInAppBillingService;
import org.json.JSONException;
import java.util.ArrayList;
import java.util.List;
/**
* Provides convenience methods for in-app billing. Create one instance of this
* class for your application and use it to process in-app billing operations.
* It provides synchronous (blocking) and asynchronous (non-blocking) methods for
* many common in-app billing operations, as well as automatic signature
* verification.
*
* After instantiating, you must perform setup in order to start using the object.
* To perform setup, call the {@link #startSetup} method and provide a listener;
* that listener will be notified when setup is complete, after which (and not before)
* you may call other methods.
*
* After setup is complete, you may query whether the user owns a given item or
* not by calling {@link #isOwned}, get all items owned with {@link #getOwnedSkus},
* get an item's price with {@link #getPrice}, amongst others (see documentation
* for specific methods).
*
* Please notice that the object will only have knowledge about owned items; it
* will not automatically have information (such as price, description) for items
* that are not owned by the user, because the server will not automatically
* provide those. In order to query information for an item that's not owned
* (such as to display the price to the user before a purchase), you should first
* bring the item's sku to the object's knowledge by calling {@link #addSku}
* and then perform an inventory refresh by calling {@link #refreshInventory()}
* or its corresponding asynchronous version {@link #refreshInventoryAsync}.
*
* If you know the skus of all the items that you can possibly be interested in,
* you can call {@link #addSku} for those items before {@link #startSetup}, and
* that way all the information about them will be available from the start,
* with no need to refresh the inventory later.
*
* When you are done with this object, don't forget to call {@link #dispose}
* to ensure proper cleanup. This object holds a binding to the in-app billing
* service, which will leak unless you dispose of it correctly. If you created
* the object on an Activity's onCreate method, then the recommended
* place to dispose of it is the Activity's onDestroy method.
*
* A note about threading: When using this object from a background thread, you may
* call the blocking versions of methods; when using from a UI thread, call
* only the asynchronous versions and handle the results via callbacks.
* Also, notice that you can only call one asynchronous operation at a time;
* attempting to start a second asynchronous operation while the first one
* has not yet completed will result in an exception being thrown.
*
* @author Bruno Oliveira (Google)
*
*/
public class IabHelper {
// Is debug logging enabled?
boolean mDebugLog = false;
String mDebugTag = "IabHelper";
// Is setup done?
public boolean mSetupDone = false;
// Is an asynchronous operation in progress?
// (only one at a time can be in progress)
public boolean mAsyncInProgress = false;
// (for logging/debugging)
// if mAsyncInProgress == true, what asynchronous operation is in progress?
String mAsyncOperation = "";
// Context we were passed during initialization
Context mContext;
// Connection to the service
IInAppBillingService mService;
ServiceConnection mServiceConn;
// The request code used to launch purchase flow
int mRequestCode;
// Public key for verifying signature, in base64 encoding
String mSignatureBase64 = null;
// Billing response codes
public static final int BILLING_RESPONSE_RESULT_OK = 0;
public static final int BILLING_RESPONSE_RESULT_USER_CANCELED = 1;
public static final int BILLING_RESPONSE_RESULT_BILLING_UNAVAILABLE = 3;
public static final int BILLING_RESPONSE_RESULT_ITEM_UNAVAILABLE = 4;
public static final int BILLING_RESPONSE_RESULT_DEVELOPER_ERROR = 5;
public static final int BILLING_RESPONSE_RESULT_ERROR = 6;
public static final int BILLING_RESPONSE_RESULT_ITEM_ALREADY_OWNED = 7;
public static final int BILLING_RESPONSE_RESULT_ITEM_NOT_OWNED = 8;
// IAB Helper error codes
public static final int IABHELPER_ERROR_BASE = -1000;
public static final int IABHELPER_REMOTE_EXCEPTION = -1001;
public static final int IABHELPER_BAD_RESPONSE = -1002;
public static final int IABHELPER_VERIFICATION_FAILED = -1003;
public static final int IABHELPER_SEND_INTENT_FAILED = -1004;
public static final int IABHELPER_USER_CANCELLED = -1005;
public static final int IABHELPER_UNKNOWN_PURCHASE_RESPONSE = -1006;
public static final int IABHELPER_MISSING_TOKEN = -1007;
public static final int IABHELPER_UNKNOWN_ERROR = -1008;
// Keys for the responses from InAppBillingService
public static final String RESPONSE_CODE = "RESPONSE_CODE";
public static final String RESPONSE_GET_SKU_DETAILS_LIST = "DETAILS_LIST";
public static final String RESPONSE_BUY_INTENT = "BUY_INTENT";
public static final String RESPONSE_INAPP_PURCHASE_DATA = "INAPP_PURCHASE_DATA";
public static final String RESPONSE_INAPP_SIGNATURE = "INAPP_DATA_SIGNATURE";
public static final String RESPONSE_INAPP_ITEM_LIST = "INAPP_PURCHASE_ITEM_LIST";
public static final String RESPONSE_INAPP_PURCHASE_DATA_LIST = "INAPP_PURCHASE_DATA_LIST";
public static final String RESPONSE_INAPP_SIGNATURE_LIST = "INAPP_DATA_SIGNATURE_LIST";
public static final String INAPP_CONTINUATION_TOKEN = "INAPP_CONTINUATION_TOKEN";
// Item type: in-app item
public static final String ITEM_TYPE_INAPP = "inapp";
// some fields on the getSkuDetails response bundle
public static final String GET_SKU_DETAILS_ITEM_LIST = "ITEM_ID_LIST";
public static final String GET_SKU_DETAILS_ITEM_TYPE_LIST = "ITEM_TYPE_LIST";
/**
* Creates an instance. After creation, it will not yet be ready to use. You must perform
* setup by calling {@link #startSetup} and wait for setup to complete. This constructor does not
* block and is safe to call from a UI thread.
*
* @param ctx Your application or Activity context. Needed to bind to the in-app billing service.
* @param base64PublicKey Your application's public key, encoded in base64.
* This is used for verification of purchase signatures. You can find your app's base64-encoded
* public key in your application's page on Google Play Developer Console. Note that this
* is NOT your "developer public key".
*/
public IabHelper(Context ctx, String base64PublicKey) {
mContext = ctx.getApplicationContext();
mSignatureBase64 = base64PublicKey;
logDebug("IAB helper created.");
}
/**
* Enables or disable debug logging through LogCat.
*/
public void enableDebugLogging(boolean enable, String tag) {
mDebugLog = enable;
mDebugTag = tag;
}
public void enableDebugLogging(boolean enable) {
mDebugLog = enable;
}
/**
* Callback for setup process. This listener's {@link #onIabSetupFinished} method is called
* when the setup process is complete.
*/
public interface OnIabSetupFinishedListener {
/**
* Called to notify that setup is complete.
*
* @param result The result of the setup process.
*/
public void onIabSetupFinished(IabResult result);
}
/**
* Starts the setup process. This will start up the setup process asynchronously.
* You will be notified through the listener when the setup process is complete.
* This method is safe to call from a UI thread.
*
* @param listener The listener to notify when the setup process is complete.
*/
public void startSetup(final OnIabSetupFinishedListener listener) {
// If already set up, can't do it again.
if (mSetupDone) throw new IllegalStateException("IAB helper is already set up.");
// Connection to IAB service
logDebug("Starting in-app billing setup.");
mServiceConn = new ServiceConnection() {
@Override
public void onServiceDisconnected(ComponentName name) {
logDebug("Billing service disconnected.");
mService = null;
}
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
logDebug("Billing service connected.");
mService = IInAppBillingService.Stub.asInterface(service);
String packageName = mContext.getPackageName();
try {
logDebug("Checking for in-app billing 3 support.");
int response = mService.isBillingSupported(3, packageName, ITEM_TYPE_INAPP);
if (response != BILLING_RESPONSE_RESULT_OK) {
if (listener != null) listener.onIabSetupFinished(new IabResult(response,
"Error checking for billing v3 support."));
return;
}
logDebug("In-app billing version 3 supported for " + packageName);
mSetupDone = true;
}
catch (RemoteException e) {
if (listener != null) {
listener.onIabSetupFinished(new IabResult(IABHELPER_REMOTE_EXCEPTION,
"RemoteException while setting up in-app billing."));
}
e.printStackTrace();
}
if (listener != null) {
listener.onIabSetupFinished(new IabResult(BILLING_RESPONSE_RESULT_OK, "Setup successful."));
}
}
};
mContext.bindService(new Intent("com.android.vending.billing.InAppBillingService.BIND"),
mServiceConn, Context.BIND_AUTO_CREATE);
}
/**
* Dispose of object, releasing resources. It's very important to call this
* method when you are done with this object. It will release any resources
* used by it such as service connections. Naturally, once the object is
* disposed of, it can't be used again.
*/
public void dispose() {
logDebug("Disposing.");
mSetupDone = false;
if (mServiceConn != null) {
logDebug("Unbinding from service.");
if (mContext != null) mContext.unbindService(mServiceConn);
mServiceConn = null;
mService = null;
mPurchaseListener = null;
}
}
/**
* Callback that notifies when a purchase is finished.
*/
public interface OnIabPurchaseFinishedListener {
/**
* Called to notify that an in-app purchase finished. If the purchase was successful,
* then the sku parameter specifies which item was purchased. If the purchase failed,
* the sku and extraData parameters may or may not be null, depending on how far the purchase
* process went.
*
* @param result The result of the purchase.
* @param info The purchase information (null if purchase failed)
*/
public void onIabPurchaseFinished(IabResult result, Purchase info);
}
// The listener registered on launchPurchaseFlow, which we have to call back when
// the purchase finishes
OnIabPurchaseFinishedListener mPurchaseListener;
/**
* Same as calling {@link #launchPurchaseFlow(Activity, String, int, OnIabPurchaseFinishedListener, String)}
* with null as extraData.
*/
public void launchPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener) {
launchPurchaseFlow(act, sku, requestCode, listener, "");
}
/**
* Initiate the UI flow for an in-app purchase. Call this method to initiate an in-app purchase,
* which will involve bringing up the Google Play screen. The calling activity will be paused while
* the user interacts with Google Play, and the result will be delivered via the activity's
* {@link android.app.Activity#onActivityResult} method, at which point you must call
* this object's {@link #handleActivityResult} method to continue the purchase flow. This method
* MUST be called from the UI thread of the Activity.
*
* @param act The calling activity.
* @param sku The sku of the item to purchase.
* @param requestCode A request code (to differentiate from other responses --
* as in {@link android.app.Activity#startActivityForResult}).
* @param listener The listener to notify when the purchase process finishes
* @param extraData Extra data (developer payload), which will be returned with the purchase data
* when the purchase completes. This extra data will be permanently bound to that purchase
* and will always be returned when the purchase is queried.
*/
public void launchPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener, String extraData) {
checkSetupDone("launchPurchaseFlow");
flagStartAsync("launchPurchaseFlow");
IabResult result;
try {
logDebug("Constructing buy intent for " + sku);
Bundle buyIntentBundle = mService.getBuyIntent(3, mContext.getPackageName(), sku, ITEM_TYPE_INAPP, extraData);
int response = getResponseCodeFromBundle(buyIntentBundle);
if (response != BILLING_RESPONSE_RESULT_OK) {
logError("Unable to buy item, Error response: " + getResponseDesc(response));
result = new IabResult(response, "Unable to buy item");
if (listener != null) listener.onIabPurchaseFinished(result, null);
}
PendingIntent pendingIntent = buyIntentBundle.getParcelable(RESPONSE_BUY_INTENT);
logDebug("Launching buy intent for " + sku + ". Request code: " + requestCode);
mRequestCode = requestCode;
mPurchaseListener = listener;
act.startIntentSenderForResult(pendingIntent.getIntentSender(),
requestCode, new Intent(),
Integer.valueOf(0), Integer.valueOf(0),
Integer.valueOf(0));
}
catch (SendIntentException e) {
logError("SendIntentException while launching purchase flow for sku " + sku);
e.printStackTrace();
result = new IabResult(IABHELPER_SEND_INTENT_FAILED, "Failed to send intent.");
if (listener != null) listener.onIabPurchaseFinished(result, null);
}
catch (RemoteException e) {
logError("RemoteException while launching purchase flow for sku " + sku);
e.printStackTrace();
result = new IabResult(IABHELPER_REMOTE_EXCEPTION, "Remote exception while starting purchase flow");
if (listener != null) listener.onIabPurchaseFinished(result, null);
}
}
/**
* Handles an activity result that's part of the purchase flow in in-app billing. If you
* are calling {@link #launchPurchaseFlow}, then you must call this method from your
* Activity's {@link android.app.Activity@onActivityResult} method. This method
* MUST be called from the UI thread of the Activity.
*
* @param requestCode The requestCode as you received it.
* @param resultCode The resultCode as you received it.
* @param data The data (Intent) as you received it.
* @return Returns true if the result was related to a purchase flow and was handled;
* false if the result was not related to a purchase, in which case you should
* handle it normally.
*/
public boolean handleActivityResult(int requestCode, int resultCode, Intent data) {
IabResult result;
if (requestCode != mRequestCode) return false;
checkSetupDone("handleActivityResult");
// end of async purchase operation
flagEndAsync();
if (data == null) {
logError("Null data in IAB activity result.");
result = new IabResult(IABHELPER_BAD_RESPONSE, "Null data in IAB result");
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null);
return false;
}
int responseCode = getResponseCodeFromIntent(data);
String purchaseData = data.getStringExtra(RESPONSE_INAPP_PURCHASE_DATA);
String dataSignature = data.getStringExtra(RESPONSE_INAPP_SIGNATURE);
if (resultCode == Activity.RESULT_OK && responseCode == BILLING_RESPONSE_RESULT_OK) {
logDebug("Successful resultcode from purchase activity.");
logDebug("Purchase data: " + purchaseData);
logDebug("Data signature: " + dataSignature);
logDebug("Extras: " + data.getExtras());
if (purchaseData == null || dataSignature == null) {
logError("BUG: either purchaseData or dataSignature is null.");
logDebug("Extras: " + data.getExtras().toString());
result = new IabResult(IABHELPER_UNKNOWN_ERROR, "IAB returned null purchaseData or dataSignature");
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null);
return false;
}
Purchase purchase = null;
try {
purchase = new Purchase(purchaseData, dataSignature);
String sku = purchase.getSku();
// Verify signature
if (!Security.verifyPurchase(mSignatureBase64, purchaseData, dataSignature)) {
logError("Purchase signature verification FAILED for sku " + sku);
result = new IabResult(IABHELPER_VERIFICATION_FAILED, "Signature verification failed for sku " + sku);
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, purchase);
return false;
}
logDebug("Purchase signature successfully verified.");
}
catch (JSONException e) {
logError("Failed to parse purchase data.");
e.printStackTrace();
result = new IabResult(IABHELPER_BAD_RESPONSE, "Failed to parse purchase data.");
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null);
return false;
}
if (mPurchaseListener != null) {
logDebug("Everything okay, but if we call mPurchaseListener.onIabPurchaseFinished, it will explode");
mPurchaseListener.onIabPurchaseFinished(new IabResult(BILLING_RESPONSE_RESULT_OK, "Success"), purchase);
return true;
}
}
else if (resultCode == Activity.RESULT_OK) {
// result code was OK, but in-app billing response was not OK.
logDebug("Result code was OK but in-app billing response was not OK: " + getResponseDesc(responseCode));
if (mPurchaseListener != null) {
result = new IabResult(responseCode, "Problem purchashing item.");
mPurchaseListener.onIabPurchaseFinished(result, null);
}
}
else if (resultCode == Activity.RESULT_CANCELED) {
logDebug("Purchase canceled - Response: " + getResponseDesc(responseCode));
result = new IabResult(IABHELPER_USER_CANCELLED, "User canceled.");
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null);
return false;
}
else {
logError("Purchase failed. Result code: " + Integer.toString(resultCode));
// + ". Response: " + getResponseDesc(responseCode));
result = new IabResult(IABHELPER_UNKNOWN_PURCHASE_RESPONSE, "Unknown purchase response.");
if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null);
return false;
}
return false;
}
/**
* Queries the inventory. This will query all owned items from the server, as well as
* information on additional skus, if specified. This method may block or take long to execute.
* Do not call from a UI thread. For that, use the non-blocking version {@link #refreshInventoryAsync}.
*
* @param querySkuDetails if true, SKU details (price, description, etc) will be queried as well
* as purchase information.
* @param moreSkus additional skus to query information on, regardless of ownership. Ignored
* if null or if querySkuDetails is false.
* @throws IabException if a problem occurs while refreshing the inventory.
*/
public Inventory queryInventory(boolean querySkuDetails, List<String> moreSkus) throws IabException {
checkSetupDone("queryInventory");
try {
Inventory inv = new Inventory();
int r = queryPurchases(inv);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying owned items).");
}
if (querySkuDetails) {
r = querySkuDetails(inv, moreSkus);
if (r != BILLING_RESPONSE_RESULT_OK) {
throw new IabException(r, "Error refreshing inventory (querying prices of items).");
}
}
return inv;
}
catch (RemoteException e) {
throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while refreshing inventory.", e);
}
catch (JSONException e) {
throw new IabException(IABHELPER_BAD_RESPONSE, "Error parsing JSON response while refreshing inventory.", e);
}
}
/**
* Listener that notifies when an inventory query operation completes.
*/
public interface QueryInventoryFinishedListener {
/**
* Called to notify that an inventory query operation completed.
*
* @param result The result of the operation.
* @param inv The inventory.
*/
public void onQueryInventoryFinished(IabResult result, Inventory inv);
}
/**
* Asynchronous wrapper for inventory query. This will perform an inventory
* query as described in {@link #queryInventory}, but will do so asynchronously
* and call back the specified listener upon completion. This method is safe to
* call from a UI thread.
*
* @param querySkuDetails as in {@link #queryInventory}
* @param moreSkus as in {@link #queryInventory}
* @param listener The listener to notify when the refresh operation completes.
*/
public void queryInventoryAsync(final boolean querySkuDetails,
final List<String> moreSkus,
final QueryInventoryFinishedListener listener) {
final Handler handler = new Handler();
checkSetupDone("queryInventory");
flagStartAsync("refresh inventory");
(new Thread(new Runnable() {
public void run() {
IabResult result = new IabResult(BILLING_RESPONSE_RESULT_OK, "Inventory refresh successful.");
Inventory inv = null;
try {
inv = queryInventory(querySkuDetails, moreSkus);
}
catch (IabException ex) {
result = ex.getResult();
}
flagEndAsync();
final IabResult result_f = result;
final Inventory inv_f = inv;
handler.post(new Runnable() {
public void run() {
listener.onQueryInventoryFinished(result_f, inv_f);
}
});
}
})).start();
}
public void queryInventoryAsync(QueryInventoryFinishedListener listener) {
queryInventoryAsync(true, null, listener);
}
public void queryInventoryAsync(boolean querySkuDetails, QueryInventoryFinishedListener listener) {
queryInventoryAsync(querySkuDetails, null, listener);
}
/**
* Consumes a given in-app product. Consuming can only be done on an item
* that's owned, and as a result of consumption, the user will no longer own it.
* This method may block or take long to return. Do not call from the UI thread.
* For that, see {@link #consumeAsync}.
*
* @param itemInfo The PurchaseInfo that represents the item to consume.
* @throws IabException if there is a problem during consumption.
*/
void consume(Purchase itemInfo) throws IabException {
checkSetupDone("consume");
try {
String token = itemInfo.getToken();
String sku = itemInfo.getSku();
if (token == null || token.equals("")) {
logError("Can't consume "+ sku + ". No token.");
throw new IabException(IABHELPER_MISSING_TOKEN, "PurchaseInfo is missing token for sku: "
+ sku + " " + itemInfo);
}
logDebug("Consuming sku: " + sku + ", token: " + token);
int response = mService.consumePurchase(3, mContext.getPackageName(), token);
if (response == BILLING_RESPONSE_RESULT_OK) {
logDebug("Successfully consumed sku: " + sku);
}
else {
logDebug("Error consuming consuming sku " + sku + ". " + getResponseDesc(response));
throw new IabException(response, "Error consuming sku " + sku);
}
}
catch (RemoteException e) {
throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while consuming. PurchaseInfo: " + itemInfo, e);
}
}
/**
* Callback that notifies when a consumption operation finishes.
*/
public interface OnConsumeFinishedListener {
/**
* Called to notify that a consumption has finished.
*
* @param purchase The purchase that was (or was to be) consumed.
* @param result The result of the consumption operation.
*/
public void onConsumeFinished(Purchase purchase, IabResult result);
}
/**
* Callback that notifies when a multi-item consumption operation finishes.
*/
public interface OnConsumeMultiFinishedListener {
/**
* Called to notify that a consumption of multiple items has finished.
*
* @param purchases The purchases that were (or were to be) consumed.
* @param results The results of each consumption operation, corresponding to each
* sku.
*/
public void onConsumeMultiFinished(List<Purchase> purchases, List<IabResult> results);
}
/**
* Asynchronous wrapper to item consumption. Works like {@link #consume}, but
* performs the consumption in the background and notifies completion through
* the provided listener. This method is safe to call from a UI thread.
*
* @param purchase The purchase to be consumed.
* @param listener The listener to notify when the consumption operation finishes.
*/
public void consumeAsync(Purchase purchase, OnConsumeFinishedListener listener) {
checkSetupDone("consume");
List<Purchase> purchases = new ArrayList<Purchase>();
purchases.add(purchase);
consumeAsyncInternal(purchases, listener, null);
}
/**
* Same as {@link consumeAsync}, but for multiple items at once.
* @param purchases The list of PurchaseInfo objects representing the purchases to consume.
* @param listener The listener to notify when the consumption operation finishes.
*/
public void consumeAsync(List<Purchase> purchases, OnConsumeMultiFinishedListener listener) {
checkSetupDone("consume");
consumeAsyncInternal(purchases, null, listener);
}
/**
* Returns a human-readable description for the given response code.
*
* @param code The response code
* @return A human-readable string explaining the result code.
* It also includes the result code numerically.
*/
public static String getResponseDesc(int code) {
String[] iab_msgs = ("0:OK/1:User Canceled/2:Unknown/" +
"3:Billing Unavailable/4:Item unavailable/" +
"5:Developer Error/6:Error/7:Item Already Owned/" +
"8:Item not owned").split("/");
String[] iabhelper_msgs = ("0:OK/-1001:Remote exception during initialization/" +
"-1002:Bad response received/" +
"-1003:Purchase signature verification failed/" +
"-1004:Send intent failed/" +
"-1005:User cancelled/" +
"-1006:Unknown purchase response/" +
"-1007:Missing token/" +
"-1008:Unknown error").split("/");
if (code <= IABHELPER_ERROR_BASE) {
int index = IABHELPER_ERROR_BASE - code;
if (index >= 0 && index < iabhelper_msgs.length) return iabhelper_msgs[index];
else return String.valueOf(code) + ":Unknown IAB Helper Error";
}
else if (code < 0 || code >= iab_msgs.length)
return String.valueOf(code) + ":Unknown";
else
return iab_msgs[code];
}
// Checks that setup was done; if not, throws an exception.
void checkSetupDone(String operation) {
if (!mSetupDone) {
logError("Illegal state for operation (" + operation + "): IAB helper is not set up.");
throw new IllegalStateException("IAB helper is not set up. Can't perform operation: " + operation);
}
}
// Workaround to bug where sometimes response codes come as Long instead of Integer
int getResponseCodeFromBundle(Bundle b) {
Object o = b.get(RESPONSE_CODE);
if (o == null) {
logDebug("Bundle with null response code, assuming OK (known issue)");
return BILLING_RESPONSE_RESULT_OK;
}
else if (o instanceof Integer) return ((Integer)o).intValue();
else if (o instanceof Long) return (int)((Long)o).longValue();
else {
logError("Unexpected type for bundle response code.");
logError(o.getClass().getName());
throw new RuntimeException("Unexpected type for bundle response code: " + o.getClass().getName());
}
}
// Workaround to bug where sometimes response codes come as Long instead of Integer
int getResponseCodeFromIntent(Intent i) {
Object o = i.getExtras().get(RESPONSE_CODE);
if (o == null) {
logError("Intent with no response code, assuming OK (known issue)");
return BILLING_RESPONSE_RESULT_OK;
}
else if (o instanceof Integer) return ((Integer)o).intValue();
else if (o instanceof Long) return (int)((Long)o).longValue();
else {
logError("Unexpected type for intent response code.");
logError(o.getClass().getName());
throw new RuntimeException("Unexpected type for intent response code: " + o.getClass().getName());
}
}
void flagStartAsync(String operation) {
if (mAsyncInProgress) throw new IllegalStateException("Can't start async operation (" +
operation + ") because another async operation(" + mAsyncOperation + ") is in progress.");
mAsyncOperation = operation;
mAsyncInProgress = true;
logDebug("Starting async operation: " + operation);
}
public void flagEndAsync() {
logDebug("Ending async operation: " + mAsyncOperation);
mAsyncOperation = "";
mAsyncInProgress = false;
}
int queryPurchases(Inventory inv) throws JSONException, RemoteException {
// Query purchases
logDebug("Querying owned items...");
logDebug("Package name: " + mContext.getPackageName());
boolean hasMore = true;
boolean verificationFailed = false;
String continueToken = null;
do {
logDebug("Calling getPurchases with continuation token: " + continueToken);
Bundle ownedItems = mService.getPurchases(3, mContext.getPackageName(),
ITEM_TYPE_INAPP, continueToken);
int response = getResponseCodeFromBundle(ownedItems);
logDebug("Owned items response: " + String.valueOf(response));
if (response != BILLING_RESPONSE_RESULT_OK) {
logDebug("getPurchases() failed: " + getResponseDesc(response));
return response;
}
if (!ownedItems.containsKey(RESPONSE_INAPP_ITEM_LIST)
|| !ownedItems.containsKey(RESPONSE_INAPP_PURCHASE_DATA_LIST)
|| !ownedItems.containsKey(RESPONSE_INAPP_SIGNATURE_LIST)) {
logError("Bundle returned from getPurchases() doesn't contain required fields.");
return IABHELPER_BAD_RESPONSE;
}
ArrayList<String> ownedSkus = ownedItems.getStringArrayList(
RESPONSE_INAPP_ITEM_LIST);
ArrayList<String> purchaseDataList = ownedItems.getStringArrayList(
RESPONSE_INAPP_PURCHASE_DATA_LIST);
ArrayList<String> signatureList = ownedItems.getStringArrayList(
RESPONSE_INAPP_SIGNATURE_LIST);
for (int i = 0; i < purchaseDataList.size(); ++i) {
String purchaseData = purchaseDataList.get(i);
String signature = signatureList.get(i);
String sku = ownedSkus.get(i);
if (Security.verifyPurchase(mSignatureBase64, purchaseData, signature)) {
logDebug("Sku is owned: " + sku);
Purchase purchase = new Purchase(purchaseData, signature);
if (TextUtils.isEmpty(purchase.getToken())) {
logWarn("BUG: empty/null token!");
logDebug("Purchase data: " + purchaseData);
}
// Record ownership and token
inv.addPurchase(purchase);
}
else {
logWarn("Purchase signature verification **FAILED**. Not adding item.");
logDebug(" Purchase data: " + purchaseData);
logDebug(" Signature: " + signature);
verificationFailed = true;
}
}
continueToken = ownedItems.getString(INAPP_CONTINUATION_TOKEN);
logDebug("Continuation token: " + continueToken);
} while (!TextUtils.isEmpty(continueToken));
return verificationFailed ? IABHELPER_VERIFICATION_FAILED : BILLING_RESPONSE_RESULT_OK;
}
int querySkuDetails(Inventory inv, List<String> moreSkus) throws RemoteException, JSONException {
logDebug("Querying SKU details.");
ArrayList<String> skuList = new ArrayList<String>();
skuList.addAll(inv.getAllOwnedSkus());
if (moreSkus != null) skuList.addAll(moreSkus);
if (skuList.size() == 0) {
logDebug("queryPrices: nothing to do because there are no SKUs.");
return BILLING_RESPONSE_RESULT_OK;
}
Bundle querySkus = new Bundle();
querySkus.putStringArrayList(GET_SKU_DETAILS_ITEM_LIST, skuList);
Bundle skuDetails = mService.getSkuDetails(3, mContext.getPackageName(),
ITEM_TYPE_INAPP, querySkus);
if (!skuDetails.containsKey(RESPONSE_GET_SKU_DETAILS_LIST)) {
int response = getResponseCodeFromBundle(skuDetails);
if (response != BILLING_RESPONSE_RESULT_OK) {
logDebug("getSkuDetails() failed: " + getResponseDesc(response));
return response;
}
else {
logError("getSkuDetails() returned a bundle with neither an error nor a detail list.");
return IABHELPER_BAD_RESPONSE;
}
}
ArrayList<String> responseList = skuDetails.getStringArrayList(
RESPONSE_GET_SKU_DETAILS_LIST);
for (String thisResponse : responseList) {
SkuDetails d = new SkuDetails(thisResponse);
logDebug("Got sku details: " + d);
inv.addSkuDetails(d);
}
return BILLING_RESPONSE_RESULT_OK;
}
void consumeAsyncInternal(final List<Purchase> purchases,
final OnConsumeFinishedListener singleListener,
final OnConsumeMultiFinishedListener multiListener) {
final Handler handler = new Handler();
flagStartAsync("consume");
(new Thread(new Runnable() {
public void run() {
final List<IabResult> results = new ArrayList<IabResult>();
for (Purchase purchase : purchases) {
try {
consume(purchase);
results.add(new IabResult(BILLING_RESPONSE_RESULT_OK, "Successful consume of sku " + purchase.getSku()));
}
catch (IabException ex) {
results.add(ex.getResult());
}
}
flagEndAsync();
if (singleListener != null) {
handler.post(new Runnable() {
public void run() {
singleListener.onConsumeFinished(purchases.get(0), results.get(0));
}
});
}
if (multiListener != null) {
handler.post(new Runnable() {
public void run() {
multiListener.onConsumeMultiFinished(purchases, results);
}
});
}
}
})).start();
}
void logDebug(String msg) {
if (mDebugLog) Log.d(mDebugTag, msg);
}
void logError(String msg) {
Log.e(mDebugTag, "In-app billing error: " + msg);
}
void logWarn(String msg) {
Log.w(mDebugTag, "In-app billing warning: " + msg);
}
}
| |
/*******************************************************************************
* Copyright 2018 Klaus Pfeiffer - klaus@allpiper.com
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.jfastnet.processors;
import com.jfastnet.*;
import com.jfastnet.messages.Message;
import com.jfastnet.messages.RequestSeqIdsMessage;
import com.jfastnet.util.NullsafeHashMap;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
/**
* Must be thread-safe.
*
* @author Klaus Pfeiffer - klaus@allpiper.com
*/
@Slf4j
public class ReliableModeSequenceProcessor extends AbstractMessageProcessor<ReliableModeSequenceProcessor.ProcessorConfig> implements ISimpleProcessable, IMessageReceiverPreProcessor, IMessageSenderPostProcessor, IServerHooks {
private static final AtomicLong ZERO_ATOMIC_LONG = new AtomicLong();
/** Key: client id; Value: last received message id. */
@Getter private final Map<Integer, AtomicLong> lastMessageIdMap = new HashMap<>();
private final Map<Integer, Set<Long>> absentMessageIds = new NullsafeHashMap<Integer, Set<Long>>() {
@Override
protected Set<Long> newInstance() {
return new CopyOnWriteArraySet<>();
}
};
private final Map<Integer, List<Message>> heldBackMessages = new NullsafeHashMap<Integer, List<Message>>() {
@Override
protected List<Message> newInstance() {
return new ArrayList<>();
}
};
private final Map<Integer, ReentrantLock> clientLockMap = new NullsafeHashMap<Integer, ReentrantLock>() {
@Override
protected ReentrantLock newInstance() {
return new ReentrantLock();
}
};
private long lastCheck;
/** Set to true when we receive an out-of-order message. */
private volatile boolean outOfSync;
public ReliableModeSequenceProcessor(Config config, State state) {
super(config, state);
}
@Override
public void onUnregister(int clientId) {
lastMessageIdMap.remove(clientId);
absentMessageIds.remove(clientId);
heldBackMessages.remove(clientId);
}
@Override
public void process() {
if (heldBackMessages.size() > 0) {
for (Map.Entry<Integer, List<Message>> entry : heldBackMessages.entrySet()) {
Integer clientId = entry.getKey();
ReentrantLock lock = clientLockMap.get(clientId);
if (lock.tryLock()) {
try {
List<Message> messages = entry.getValue();
Long lastMsgId = lastMessageIdMap.getOrDefault(clientId, ZERO_ATOMIC_LONG).get();
if (messages != null && !messages.isEmpty()) {
long expectedMessageId = lastMsgId + 1;
Collections.sort(messages);
// catch up with held back messages
Set<Message> removes = new HashSet<>();
for (int i = 0; i < messages.size(); i++) {
Message message = messages.get(i);
if (message.getMsgId() == expectedMessageId) {
log.trace("Catch up with {}", message);
// lastMessageId gets set in receive
config.internalReceiver.receive(message);
expectedMessageId++;
removes.add(message);
}
}
messages.removeAll(removes);
}
} finally {
lock.unlock();
}
}
}
}
long currentTime = config.timeProvider.get();
if (currentTime > lastCheck + processorConfig.requestMissingIdsIntervalMs) {
lastCheck = currentTime;
for (Map.Entry<Integer, Set<Long>> entry : absentMessageIds.entrySet()) {
if (entry.getValue().size() > 0) {
Integer clientId = entry.getKey();
requestAbsentIds(clientId, absentMessageIds.get(clientId), 0);
}
}
}
}
private boolean addReceivedMessage(MessageKey key) {
absentMessageIds.get(key.clientId).remove(key.messageId);
return true;
}
@Override
public Message beforeReceive(Message message) {
if (Message.ReliableMode.SEQUENCE_NUMBER.equals(message.getReliableMode())) {
int senderId = message.getSenderId();
ReentrantLock lock = clientLockMap.get(senderId);
lock.lock();
try {
MessageKey key = MessageKey.newKey(Message.ReliableMode.SEQUENCE_NUMBER, senderId, message.getMsgId());
addReceivedMessage(key);
Long lastMsgId = lastMessageIdMap.getOrDefault(senderId, ZERO_ATOMIC_LONG).get();
if (message.getMsgId() <= lastMsgId) {
// Discard old messages - don't handle already received messages.
return null;
}
List<Message> clientHeldBackMessages = heldBackMessages.get(senderId);
if (!handleReceivedMessage(key)) {
// Don't handle out of order messages yet
log.trace("Last received message: {}", message);
clientHeldBackMessages.add(message);
return null;
}
clientHeldBackMessages.removeIf(heldBackMsg -> heldBackMsg.getMsgId() <= message.getMsgId());
return message;
} finally {
lock.unlock();
}
}
return message;
}
private boolean handleReceivedMessage(MessageKey key) {
// msgId has to be sequential in this case
int clientId = key.clientId;
long messageId = key.messageId;
Set<Long> clientAbsentMessageIds = absentMessageIds.get(clientId);
if (!clientAbsentMessageIds.contains(messageId)) {
AtomicLong lastMsgIdAtomicLong = lastMessageIdMap.get(clientId);
if (lastMsgIdAtomicLong == null) {
lastMsgIdAtomicLong = new AtomicLong();
lastMessageIdMap.put(clientId, lastMsgIdAtomicLong);
}
Long lastMsgId = lastMsgIdAtomicLong.get();
long expectedMessageId = lastMsgId + 1;
if (messageId == expectedMessageId) {
lastMsgIdAtomicLong.incrementAndGet();
outOfSync = false;
return true;
} else if (messageId > expectedMessageId) {
List<Message> clientHeldBackMessages = new ArrayList<>(heldBackMessages.get(clientId));
for (long i = expectedMessageId; i < messageId; i++) {
boolean hasIt = false;
for (Message clientHeldBackMessage : clientHeldBackMessages) {
if (i == clientHeldBackMessage.getMsgId()) {
hasIt = true;
break;
}
}
if (!hasIt) {
clientAbsentMessageIds.add(i);
}
}
Collections.sort(clientHeldBackMessages);
// catch up with held back messages
Set<Message> removes = new HashSet<>();
for (int i = 0; i < clientHeldBackMessages.size(); i++) {
Message heldBackMsg = clientHeldBackMessages.get(i);
if (heldBackMsg.getMsgId() == (lastMsgIdAtomicLong.get() + 1)) {
log.trace("Catch up with {}", heldBackMsg);
config.internalReceiver.receive(heldBackMsg);
removes.add(heldBackMsg);
} else if (heldBackMsg.getMsgId() < (lastMsgIdAtomicLong.get() + 1)) {
removes.add(heldBackMsg);
} else {
break;
}
}
heldBackMessages.get(clientId).removeAll(removes);
if (!outOfSync) {
// skipped message
log.warn("Skipped received message id: {}, last messaged id was: {}", new Object[]{messageId, lastMsgId});
if (clientAbsentMessageIds.size() > 0) {
requestAbsentIds(clientId, clientAbsentMessageIds, messageId);
}
outOfSync = true;
}
}
}
return false;
}
/** Sends a request to peer id to request missing messages.
* @param peerId peer id that gets requested
* @param clientAbsentMessageIds missing message ids
* @param maxId maximum id of message id that gets requested. 0 for no maximum id.
*/
private void requestAbsentIds(int peerId, Set<Long> clientAbsentMessageIds, long maxId) {
List<Long> requestIdsTmp = new ArrayList<>(clientAbsentMessageIds);
Collections.sort(requestIdsTmp);
List<Long> requestIds = new ArrayList<>();
// request at most X ids
for (int i = 0; i < Math.min(processorConfig.maximumMissingIdsRequestCount, requestIdsTmp.size()); i++) {
Long id = requestIdsTmp.get(i);
if (maxId != 0L && id > maxId) {
break;
}
requestIds.add(id);
}
if (requestIds.size() <= 0) {
return;
}
RequestSeqIdsMessage requestSeqIdsMessage = new RequestSeqIdsMessage(requestIds, peerId);
config.internalSender.send(requestSeqIdsMessage);
config.netStats.requestedMissingMessages.addAndGet(clientAbsentMessageIds.size());
}
@Override
public Message afterSend(Message message) {
if (Message.ReliableMode.SEQUENCE_NUMBER.equals(message.getReliableMode())) {
log.trace("afterSend: id: {}, msg: {}", message.getMsgId(), message);
config.netStats.sentMessages.incrementAndGet();
}
return message;
}
@Override
public Class<ProcessorConfig> getConfigClass() {
return ProcessorConfig.class;
}
@Setter @Getter
@Accessors(chain = true)
public static class ProcessorConfig {
/** Maximum number of ids to request when not in sync anymore. */
public int maximumMissingIdsRequestCount = 5;
/** Interval in milliseconds after which missing ids get requested. */
public int requestMissingIdsIntervalMs = 500;
}
}
| |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2016 Fabian Prasser, Florian Kohlmayer and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.gui.view.impl.common;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.accessibility.ACC;
import org.eclipse.swt.accessibility.Accessible;
import org.eclipse.swt.accessibility.AccessibleAdapter;
import org.eclipse.swt.accessibility.AccessibleControlAdapter;
import org.eclipse.swt.accessibility.AccessibleControlEvent;
import org.eclipse.swt.accessibility.AccessibleEvent;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.events.TraverseEvent;
import org.eclipse.swt.events.TraverseListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageLoader;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.graphics.TextLayout;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
/**
* A label that can display animated GIFs.
* Adapted from https://stackoverflow.com/questions/6896632/is-there-a-custom-label-widget-which-supports-animated-gif
*/
public class ComponentStatusLabel extends Canvas {
/** Gap between icon and text. */
private static final int GAP = 5;
/** Left and right margins. */
private static final int DEFAULT_MARGIN = 3;
/** A string inserted in the middle of text that has been shortened. */
private static final String ELLIPSIS = "..."; //$NON-NLS-1$
/** The alignment. Either CENTER, RIGHT, LEFT. Default is LEFT */
private int align = SWT.LEFT;
/** Field */
private int leftMargin = DEFAULT_MARGIN;
/** Field */
private int topMargin = DEFAULT_MARGIN;
/** Field */
private int rightMargin = DEFAULT_MARGIN;
/** Field */
private int bottomMargin = DEFAULT_MARGIN;
/** Field */
private String text;
/** Field */
private Image image;
/** Field */
private String appToolTipText;
/** Field */
private ComponentStatusLabelProgressProvider progressProvider;
/** Field */
private boolean ignoreDispose;
/** Field */
private Image backgroundImage;
/** Field */
private Color[] gradientColors;
/** Field */
private int[] gradientPercents;
/** Field */
private boolean gradientVertical;
/** Field */
private Color background;
/** Field */
private ComponentStatusLabelGIFHandler thread = null;
/** Field */
private static int DRAW_FLAGS = SWT.DRAW_MNEMONIC | SWT.DRAW_TAB | SWT.DRAW_TRANSPARENT |
SWT.DRAW_DELIMITER;
/**
* Checkstyle method.
*
* @param style
* @return
*/
private static int checkStyle(int style) {
if ((style & SWT.BORDER) != 0) style |= SWT.SHADOW_IN;
int mask = SWT.SHADOW_IN | SWT.SHADOW_OUT | SWT.SHADOW_NONE | SWT.LEFT_TO_RIGHT | SWT.RIGHT_TO_LEFT;
style = style & mask;
return style |= SWT.NO_FOCUS | SWT.DOUBLE_BUFFERED;
}
/**
*
*
* @param parent
* @param style
*/
public ComponentStatusLabel(Composite parent, int style) {
super(parent, checkStyle(style));
if ((style & (SWT.CENTER | SWT.RIGHT)) == 0) style |= SWT.LEFT;
if ((style & SWT.CENTER) != 0) align = SWT.CENTER;
if ((style & SWT.RIGHT) != 0) align = SWT.RIGHT;
if ((style & SWT.LEFT) != 0) align = SWT.LEFT;
addPaintListener(new PaintListener() {
public void paintControl(PaintEvent event) {
onPaint(event);
}
});
addTraverseListener(new TraverseListener() {
public void keyTraversed(TraverseEvent event) {
if (event.detail == SWT.TRAVERSE_MNEMONIC) {
onMnemonic(event);
}
}
});
addListener(SWT.Dispose, new Listener() {
public void handleEvent(Event event) {
onDispose(event);
}
});
initAccessible();
}
public Point computeSize(int wHint, int hHint, boolean changed) {
checkWidget();
Point e = getTotalSize(image, text);
if (wHint == SWT.DEFAULT) {
e.x += leftMargin + rightMargin;
} else {
e.x = wHint;
}
if (hHint == SWT.DEFAULT) {
e.y += topMargin + bottomMargin;
} else {
e.y = hHint;
}
return e;
}
@Override
public void dispose() {
super.dispose();
}
/**
*
*
* @return
*/
public int getAlignment() {
// checkWidget();
return align;
}
/**
*
*
* @return
*/
public int getBottomMargin() {
// checkWidget();
return bottomMargin;
}
/**
*
*
* @return
*/
public Image getImage() {
// checkWidget();
return image;
}
/**
*
*
* @return
*/
public int getLeftMargin() {
// checkWidget();
return leftMargin;
}
/**
*
*
* @return
*/
public int getRightMargin() {
// checkWidget();
return rightMargin;
}
public int getStyle() {
int style = super.getStyle();
switch (align) {
case SWT.RIGHT:
style |= SWT.RIGHT;
break;
case SWT.CENTER:
style |= SWT.CENTER;
break;
case SWT.LEFT:
style |= SWT.LEFT;
break;
}
return style;
}
/**
*
*
* @return
*/
public String getText() {
// checkWidget();
return text;
}
public String getToolTipText() {
checkWidget();
return appToolTipText;
}
/**
*
*
* @return
*/
public int getTopMargin() {
// checkWidget();
return topMargin;
}
/**
*
*
* @param align
*/
public void setAlignment(int align) {
checkWidget();
if (align != SWT.LEFT && align != SWT.RIGHT && align != SWT.CENTER) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
if (this.align != align) {
this.align = align;
redraw();
}
}
public void setBackground(Color color) {
super.setBackground(color);
// Are these settings the same as before?
if (backgroundImage == null && gradientColors == null && gradientPercents == null) {
if (color == null) {
if (background == null) return;
} else {
if (color.equals(background)) return;
}
}
background = color;
backgroundImage = null;
gradientColors = null;
gradientPercents = null;
redraw();
}
/**
*
*
* @param colors
* @param percents
*/
public void setBackground(Color[] colors, int[] percents) {
setBackground(colors, percents, false);
}
/**
*
*
* @param colors
* @param percents
* @param vertical
*/
public void setBackground(Color[] colors, int[] percents, boolean vertical) {
checkWidget();
if (colors != null) {
if (percents == null || percents.length != colors.length - 1) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
if (getDisplay().getDepth() < 15) {
// Don't use gradients on low color displays
colors = new Color[] { colors[colors.length - 1] };
percents = new int[] {};
}
for (int i = 0; i < percents.length; i++) {
if (percents[i] < 0 || percents[i] > 100) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
if (i > 0 && percents[i] < percents[i - 1]) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
}
}
// Are these settings the same as before?
final Color background = getBackground();
if (backgroundImage == null) {
if ((gradientColors != null) && (colors != null) && (gradientColors.length == colors.length)) {
boolean same = false;
for (int i = 0; i < gradientColors.length; i++) {
same = (gradientColors[i] == colors[i]) ||
((gradientColors[i] == null) && (colors[i] == background)) ||
((gradientColors[i] == background) && (colors[i] == null));
if (!same) break;
}
if (same) {
for (int i = 0; i < gradientPercents.length; i++) {
same = gradientPercents[i] == percents[i];
if (!same) break;
}
}
if (same && this.gradientVertical == vertical) return;
}
} else {
backgroundImage = null;
}
// Store the new settings
if (colors == null) {
gradientColors = null;
gradientPercents = null;
gradientVertical = false;
} else {
gradientColors = new Color[colors.length];
for (int i = 0; i < colors.length; ++i)
gradientColors[i] = (colors[i] != null) ? colors[i] : background;
gradientPercents = new int[percents.length];
for (int i = 0; i < percents.length; ++i)
gradientPercents[i] = percents[i];
gradientVertical = vertical;
}
// Refresh with the new settings
redraw();
}
/**
*
*
* @param image
*/
public void setBackground(Image image) {
checkWidget();
if (image == backgroundImage) return;
if (image != null) {
gradientColors = null;
gradientPercents = null;
}
backgroundImage = image;
redraw();
}
/**
*
*
* @param bottomMargin
*/
public void setBottomMargin(int bottomMargin) {
checkWidget();
if (this.bottomMargin == bottomMargin || bottomMargin < 0) return;
this.bottomMargin = bottomMargin;
redraw();
}
public void setFont(Font font) {
super.setFont(font);
redraw();
}
/**
*
*
* @param inputStream
*/
public void setGIF(InputStream inputStream) {
checkWidget();
if (thread != null) {
thread.stop();
this.getDisplay().timerExec(-1, thread);
}
ImageLoader loader = new ImageLoader();
try {
loader.load(inputStream);
} catch (Exception e) {
this.image = null;
return;
}
if (loader.data[0] != null) this.image = new Image(this.getDisplay(), loader.data[0]);
if (loader.data.length > 1) {
thread = new ComponentStatusLabelGIFHandler(this, loader);
thread.run();
}
redraw();
}
/**
*
*
* @param path
*/
public void setGIF(String path) {
try {
this.setGIF(new FileInputStream(new File(path)));
} catch (FileNotFoundException e) {
this.image = null;
return;
}
}
/**
* Updates the image
* @param image
*/
public void updateImage(Image image) {
this.image = image;
this.redraw();
}
/**
*
*
* @param image
*/
public void setImage(Image image) {
checkWidget();
if (thread != null) {
thread.stop();
getDisplay().timerExec(-1, thread);
}
if (image != this.image) {
this.image = image;
redraw();
}
}
/**
*
*
* @param leftMargin
*/
public void setLeftMargin(int leftMargin) {
checkWidget();
if (this.leftMargin == leftMargin || leftMargin < 0) return;
this.leftMargin = leftMargin;
redraw();
}
/**
*
*
* @param leftMargin
* @param topMargin
* @param rightMargin
* @param bottomMargin
*/
public void setMargins(int leftMargin, int topMargin, int rightMargin, int bottomMargin) {
checkWidget();
this.leftMargin = Math.max(0, leftMargin);
this.topMargin = Math.max(0, topMargin);
this.rightMargin = Math.max(0, rightMargin);
this.bottomMargin = Math.max(0, bottomMargin);
redraw();
}
/**
* Sets a progress provider, if any
* @param provider
*/
public void setProgressProvider(ComponentStatusLabelProgressProvider provider) {
this.progressProvider = provider;
}
/**
*
*
* @param rightMargin
*/
public void setRightMargin(int rightMargin) {
checkWidget();
if (this.rightMargin == rightMargin || rightMargin < 0) return;
this.rightMargin = rightMargin;
redraw();
}
/**
*
*
* @param text
*/
public void setText(String text) {
checkWidget();
if (text == null) text = ""; //$NON-NLS-1$
if (!text.equals(this.text)) {
this.text = text;
redraw();
}
}
public void setToolTipText(String string) {
super.setToolTipText(string);
appToolTipText = super.getToolTipText();
}
/**
*
*
* @param topMargin
*/
public void setTopMargin(int topMargin) {
checkWidget();
if (this.topMargin == topMargin || topMargin < 0) return;
this.topMargin = topMargin;
redraw();
}
/**
*
*
* @param gc
* @param x
* @param y
* @param w
* @param h
* @param topleft
* @param bottomright
*/
private void drawBevelRect(GC gc, int x, int y, int w, int h, Color topleft, Color bottomright) {
gc.setForeground(bottomright);
gc.drawLine(x + w, y, x + w, y + h);
gc.drawLine(x, y + h, x + w, y + h);
gc.setForeground(topleft);
gc.drawLine(x, y, x + w - 1, y);
gc.drawLine(x, y, x, y + h - 1);
}
/**
*
*
* @param image
* @param text
* @return
*/
private Point getTotalSize(Image image, String text) {
Point size = new Point(0, 0);
if (image != null) {
Rectangle r = image.getBounds();
size.x += r.width;
size.y += r.height;
}
GC gc = new GC(this);
if (text != null && text.length() > 0) {
Point e = gc.textExtent(text, DRAW_FLAGS);
size.x += e.x;
size.y = Math.max(size.y, e.y);
if (image != null) size.x += GAP;
} else {
size.y = Math.max(size.y, gc.getFontMetrics().getHeight());
}
gc.dispose();
return size;
}
/**
*
*/
private void initAccessible() {
Accessible accessible = getAccessible();
accessible.addAccessibleListener(new AccessibleAdapter() {
public void getHelp(AccessibleEvent e) {
e.result = getToolTipText();
}
public void getKeyboardShortcut(AccessibleEvent e) {
char mnemonic = _findMnemonic(ComponentStatusLabel.this.text);
if (mnemonic != '\0') {
e.result = "Alt+" + mnemonic; //$NON-NLS-1$
}
}
public void getName(AccessibleEvent e) {
e.result = getText();
}
});
accessible.addAccessibleControlListener(new AccessibleControlAdapter() {
public void getChildAtPoint(AccessibleControlEvent e) {
e.childID = ACC.CHILDID_SELF;
}
public void getChildCount(AccessibleControlEvent e) {
e.detail = 0;
}
public void getLocation(AccessibleControlEvent e) {
Rectangle rect = getDisplay().map(getParent(), null, getBounds());
e.x = rect.x;
e.y = rect.y;
e.width = rect.width;
e.height = rect.height;
}
public void getRole(AccessibleControlEvent e) {
e.detail = ACC.ROLE_LABEL;
}
public void getState(AccessibleControlEvent e) {
e.detail = ACC.STATE_READONLY;
}
});
}
/**
*
*
* @param gc
* @param r
*/
private void paintBorder(GC gc, Rectangle r) {
Display disp = getDisplay();
Color c1 = null;
Color c2 = null;
int style = getStyle();
if ((style & SWT.SHADOW_IN) != 0) {
c1 = disp.getSystemColor(SWT.COLOR_WIDGET_NORMAL_SHADOW);
c2 = disp.getSystemColor(SWT.COLOR_WIDGET_HIGHLIGHT_SHADOW);
}
if ((style & SWT.SHADOW_OUT) != 0) {
c1 = disp.getSystemColor(SWT.COLOR_WIDGET_LIGHT_SHADOW);
c2 = disp.getSystemColor(SWT.COLOR_WIDGET_NORMAL_SHADOW);
}
if (c1 != null && c2 != null) {
gc.setLineWidth(1);
drawBevelRect(gc, r.x, r.y, r.width - 1, r.height - 1, c1, c2);
}
}
/**
*
*
* @param text
* @return
*/
private String[] splitString(String text) {
String[] lines = new String[1];
int start = 0, pos;
do {
pos = text.indexOf('\n', start);
if (pos == -1) {
lines[lines.length - 1] = text.substring(start);
} else {
boolean crlf = (pos > 0) && (text.charAt(pos - 1) == '\r');
lines[lines.length - 1] = text.substring(start, pos - (crlf ? 1 : 0));
start = pos + 1;
String[] newLines = new String[lines.length + 1];
System.arraycopy(lines, 0, newLines, 0, lines.length);
lines = newLines;
}
} while (pos != -1);
return lines;
}
/**
*
*
* @param gc
* @param t
* @param width
* @return
*/
protected String shortenText(GC gc, String t, int width) {
if (t == null) return null;
int w = gc.textExtent(ELLIPSIS, DRAW_FLAGS).x;
if (width <= w) return t;
int l = t.length();
int max = l / 2;
int min = 0;
int mid = (max + min) / 2 - 1;
if (mid <= 0) return t;
TextLayout layout = new TextLayout(getDisplay());
layout.setText(t);
mid = validateOffset(layout, mid);
while (min < mid && mid < max) {
String s1 = t.substring(0, mid);
String s2 = t.substring(validateOffset(layout, l - mid), l);
int l1 = gc.textExtent(s1, DRAW_FLAGS).x;
int l2 = gc.textExtent(s2, DRAW_FLAGS).x;
if (l1 + w + l2 > width) {
max = mid;
mid = validateOffset(layout, (max + min) / 2);
} else if (l1 + w + l2 < width) {
min = mid;
mid = validateOffset(layout, (max + min) / 2);
} else {
min = max;
}
}
String result = mid == 0 ? t : t.substring(0, mid) + ELLIPSIS + t.substring(validateOffset(layout, l - mid), l);
layout.dispose();
return result;
}
/**
*
*
* @param string
* @return
*/
char _findMnemonic(String string) {
if (string == null) return '\0';
int index = 0;
int length = string.length();
do {
while (index < length && string.charAt(index) != '&')
index++;
if (++index >= length) return '\0';
if (string.charAt(index) != '&') return Character.toLowerCase(string.charAt(index));
index++;
} while (index < length);
return '\0';
}
/**
*
*
* @param event
*/
void onDispose(Event event) {
/* make this handler run after other dispose listeners */
if (ignoreDispose) {
ignoreDispose = false;
return;
}
ignoreDispose = true;
notifyListeners(event.type, event);
event.type = SWT.NONE;
gradientColors = null;
gradientPercents = null;
backgroundImage = null;
text = null;
image = null;
appToolTipText = null;
}
/**
*
*
* @param event
*/
void onMnemonic(TraverseEvent event) {
char mnemonic = _findMnemonic(text);
if (mnemonic == '\0') return;
if (Character.toLowerCase(event.character) != mnemonic) return;
Composite control = this.getParent();
while (control != null) {
Control[] children = control.getChildren();
int index = 0;
while (index < children.length) {
if (children[index] == this) break;
index++;
}
index++;
if (index < children.length) {
if (children[index].setFocus()) {
event.doit = true;
event.detail = SWT.TRAVERSE_NONE;
}
}
control = control.getParent();
}
}
/**
*
*
* @param event
*/
void onPaint(PaintEvent event) {
Rectangle rect = getClientArea();
if (rect.width == 0 || rect.height == 0) return;
boolean shortenText = false;
String t = text;
Image img = image;
int availableWidth = Math.max(0, rect.width - (leftMargin + rightMargin));
Point extent = getTotalSize(img, t);
if (extent.x > availableWidth) {
img = null;
extent = getTotalSize(img, t);
if (extent.x > availableWidth) {
shortenText = true;
}
}
GC gc = event.gc;
String[] lines = text == null ? null : splitString(text);
// shorten the text
if (shortenText) {
extent.x = 0;
for (int i = 0; i < lines.length; i++) {
Point e = gc.textExtent(lines[i], DRAW_FLAGS);
if (e.x > availableWidth) {
lines[i] = shortenText(gc, lines[i], availableWidth);
extent.x = Math.max(extent.x, getTotalSize(null, lines[i]).x);
} else {
extent.x = Math.max(extent.x, e.x);
}
}
if (appToolTipText == null) {
super.setToolTipText(text);
}
} else {
super.setToolTipText(appToolTipText);
}
// determine horizontal position
int x = rect.x + leftMargin;
if (align == SWT.CENTER) {
x = (rect.width - extent.x) / 2;
}
if (align == SWT.RIGHT) {
x = rect.width - rightMargin - extent.x;
}
// draw a background image behind the text
try {
if (backgroundImage != null) {
// draw a background image behind the text
Rectangle imageRect = backgroundImage.getBounds();
// tile image to fill space
gc.setBackground(getBackground());
gc.fillRectangle(rect);
int xPos = 0;
while (xPos < rect.width) {
int yPos = 0;
while (yPos < rect.height) {
gc.drawImage(backgroundImage, xPos, yPos);
yPos += imageRect.height;
}
xPos += imageRect.width;
}
} else if (gradientColors != null) {
// draw a gradient behind the text
final Color oldBackground = gc.getBackground();
if (gradientColors.length == 1) {
if (gradientColors[0] != null) gc.setBackground(gradientColors[0]);
gc.fillRectangle(0, 0, rect.width, rect.height);
} else {
final Color oldForeground = gc.getForeground();
Color lastColor = gradientColors[0];
if (lastColor == null) lastColor = oldBackground;
int pos = 0;
for (int i = 0; i < gradientPercents.length; ++i) {
gc.setForeground(lastColor);
lastColor = gradientColors[i + 1];
if (lastColor == null) lastColor = oldBackground;
gc.setBackground(lastColor);
if (gradientVertical) {
final int gradientHeight = (gradientPercents[i] * rect.height / 100) - pos;
gc.fillGradientRectangle(0, pos, rect.width, gradientHeight, true);
pos += gradientHeight;
} else {
final int gradientWidth = (gradientPercents[i] * rect.width / 100) - pos;
gc.fillGradientRectangle(pos, 0, gradientWidth, rect.height, false);
pos += gradientWidth;
}
}
if (gradientVertical && pos < rect.height) {
gc.setBackground(getBackground());
gc.fillRectangle(0, pos, rect.width, rect.height - pos);
}
if (!gradientVertical && pos < rect.width) {
gc.setBackground(getBackground());
gc.fillRectangle(pos, 0, rect.width - pos, rect.height);
}
gc.setForeground(oldForeground);
}
gc.setBackground(oldBackground);
} else {
if (background != null || (getStyle() & SWT.DOUBLE_BUFFERED) == 0) {
gc.setBackground(getBackground());
gc.fillRectangle(rect);
}
}
} catch (SWTException e) {
if ((getStyle() & SWT.DOUBLE_BUFFERED) == 0) {
gc.setBackground(getBackground());
gc.fillRectangle(rect);
}
}
// draw border
int style = getStyle();
if ((style & SWT.SHADOW_IN) != 0 || (style & SWT.SHADOW_OUT) != 0) {
paintBorder(gc, rect);
}
Rectangle imageRect = null;
int lineHeight = 0, textHeight = 0, imageHeight = 0;
if (img != null) {
imageRect = img.getBounds();
imageHeight = imageRect.height;
}
if (lines != null) {
lineHeight = gc.getFontMetrics().getHeight();
textHeight = lines.length * lineHeight;
}
int imageY = 0, midPoint = 0, lineY = 0;
if (imageHeight > textHeight) {
if (topMargin == DEFAULT_MARGIN && bottomMargin == DEFAULT_MARGIN) imageY = rect.y +
(rect.height - imageHeight) / 2;
else imageY = topMargin;
midPoint = imageY + imageHeight / 2;
lineY = midPoint - textHeight / 2;
} else {
if (topMargin == DEFAULT_MARGIN && bottomMargin == DEFAULT_MARGIN) lineY = rect.y +
(rect.height - textHeight) / 2;
else lineY = topMargin;
midPoint = lineY + textHeight / 2;
imageY = midPoint - imageHeight / 2;
}
// draw the image
if (img != null) {
gc.drawImage(img, 0, 0, imageRect.width, imageHeight, x, imageY, imageRect.width, imageHeight);
x += imageRect.width + GAP;
extent.x -= imageRect.width + GAP;
}
// draw the text
if (lines != null) {
gc.setForeground(getForeground());
for (int i = 0; i < lines.length; i++) {
int lineX = x;
if (lines.length > 1) {
if (align == SWT.CENTER) {
int lineWidth = gc.textExtent(lines[i], DRAW_FLAGS).x;
lineX = x + Math.max(0, (extent.x - lineWidth) / 2);
}
if (align == SWT.RIGHT) {
int lineWidth = gc.textExtent(lines[i], DRAW_FLAGS).x;
lineX = Math.max(x, rect.x + rect.width - rightMargin - lineWidth);
}
}
if (i == lines.length-1 && progressProvider != null && progressProvider.getProgress() != 0) {
gc.drawText(lines[i] + " (" + progressProvider.getProgress()+"%)", lineX, lineY, DRAW_FLAGS); //$NON-NLS-1$ //$NON-NLS-2$
} else {
gc.drawText(lines[i], lineX, lineY, DRAW_FLAGS);
}
lineY += lineHeight;
}
}
}
/**
*
* @param layout
* @param offset
* @return
*/
int validateOffset(TextLayout layout, int offset) {
int nextOffset = layout.getNextOffset(offset, SWT.MOVEMENT_CLUSTER);
if (nextOffset != offset) return layout.getPreviousOffset(nextOffset, SWT.MOVEMENT_CLUSTER);
return offset;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.