answer
stringlengths 17
10.2M
|
|---|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
/**
* Flush, as it name implies, forces group members to flush their pending messages while blocking
* them to send any additional messages. The process of flushing acquiesces the group so that state
* transfer or a join can be done. It is also called stop-the-world model as nobody will be able to
* send messages while a flush is in process.
*
* <p>
* Flush is needed for:
* <p>
* (1) State transfer. When a member requests state transfer, the coordinator tells everyone to stop
* sending messages and waits for everyone's ack. Then it asks the application for its state and
* ships it back to the requester. After the requester has received and set the state successfully,
* the coordinator tells everyone to resume sending messages.
* <p>
* (2) View changes (e.g.a join). Before installing a new view V2, flushing would ensure that all
* messages *sent* in the current view V1 are indeed *delivered* in V1, rather than in V2 (in all
* non-faulty members). This is essentially Virtual Synchrony.
*
*
*
* @author Vladimir Blagojevic
* @since 2.4
*/
@MBean(description = "Flushes the cluster")
public class FLUSH extends Protocol {
private static final FlushStartResult SUCCESS_START_FLUSH = new FlushStartResult(Boolean.TRUE,null);
// flags for marshalling
protected static final short DIGEST_PRESENT = 1 << 0;
protected static final short PARTICIPANTS_PRESENT = 1 << 1;
@Property(description = "Max time to keep channel blocked in flush. Default is 8000 msec")
private long timeout = 8000;
@Property(description = "Timeout (per atttempt) to quiet the cluster during the first flush phase. Default is 2000 msec")
private long start_flush_timeout = 2000;
@Property(description = "Timeout to wait for UNBLOCK after STOP_FLUSH is issued. Default is 2000 msec")
private long end_flush_timeout = 2000;
@Property(description = "Retry timeout after an unsuccessful attempt to quiet the cluster (first flush phase). Default is 3000 msec")
private long retry_timeout = 2000;
@Property(description = "Reconciliation phase toggle. Default is true")
private boolean enable_reconciliation = true;
@Property(description="When set, FLUSH is bypassed, same effect as if FLUSH wasn't in the config at all")
protected boolean bypass=false;
private long startFlushTime;
private long totalTimeInFlush;
private int numberOfFlushes;
private double averageFlushDuration;
@GuardedBy("sharedLock")
private View currentView=new View(new ViewId(), new ArrayList<Address>());
private Address localAddress;
/**
* Group member that requested FLUSH. For view installations flush coordinator is the group
* coordinator For state transfer flush coordinator is the state requesting member
*/
@GuardedBy("sharedLock")
private Address flushCoordinator;
@GuardedBy("sharedLock")
private final List<Address> flushMembers=new ArrayList<Address>();
private final AtomicInteger viewCounter = new AtomicInteger(0);
@GuardedBy("sharedLock")
private final Map<Address, Digest> flushCompletedMap=new HashMap<Address, Digest>();
@GuardedBy("sharedLock")
private final List<Address> flushNotCompletedMap=new ArrayList<Address>();
@GuardedBy("sharedLock")
private final Set<Address> suspected=new TreeSet<Address>();
@GuardedBy("sharedLock")
private final List<Address> reconcileOks=new ArrayList<Address>();
private final Object sharedLock = new Object();
private final ReentrantLock blockMutex = new ReentrantLock();
private final Condition notBlockedDown = blockMutex.newCondition();
/**
* Indicates if FLUSH.down() is currently blocking threads Condition predicate associated with
* blockMutex
*/
@ManagedAttribute(description="Is message sending currently blocked")
@GuardedBy("blockMutex")
private volatile boolean isBlockingFlushDown = true;
@GuardedBy("sharedLock")
private boolean flushCompleted = false;
private final Promise<FlushStartResult> flush_promise = new Promise<FlushStartResult>();
private final Promise<Boolean> flush_unblock_promise = new Promise<Boolean>();
private final AtomicBoolean flushInProgress = new AtomicBoolean(false);
private final AtomicBoolean sentBlock = new AtomicBoolean(false);
private final AtomicBoolean sentUnblock = new AtomicBoolean(false);
public long getStartFlushTimeout() {
return start_flush_timeout;
}
public void setStartFlushTimeout(long start_flush_timeout) {
this.start_flush_timeout = start_flush_timeout;
}
public long getRetryTimeout() {
return retry_timeout;
}
public void setRetryTimeout(long retry_timeout) {
this.retry_timeout = retry_timeout;
}
public void start() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("flush_supported", Boolean.TRUE);
up_prot.up(new Event(Event.CONFIG, map));
down_prot.down(new Event(Event.CONFIG, map));
viewCounter.set(0);
blockMutex.lock();
try {
isBlockingFlushDown = true;
} finally {
blockMutex.unlock();
}
}
public void stop() {
synchronized (sharedLock) {
currentView = new View(new ViewId(), new ArrayList<Address>());
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
}
}
@ManagedAttribute
public double getAverageFlushDuration() {
return averageFlushDuration;
}
@ManagedAttribute
public long getTotalTimeInFlush() {
return totalTimeInFlush;
}
@ManagedAttribute
public int getNumberOfFlushes() {
return numberOfFlushes;
}
@ManagedOperation(description="Sets the bypass flag")
public boolean setBypass(boolean flag) {
boolean ret=bypass;
bypass=flag;
return ret;
}
@ManagedOperation(description = "Request cluster flush")
public void startFlush() {
startFlush(new Event(Event.SUSPEND));
}
@SuppressWarnings("unchecked")
private void startFlush(Event evt) {
List<Address> flushParticipants = (List<Address>) evt.getArg();
startFlush(flushParticipants);
}
private void startFlush(List<Address> flushParticipants) {
if (!flushInProgress.get()) {
flush_promise.reset();
synchronized(sharedLock) {
if(flushParticipants == null)
flushParticipants=new ArrayList<Address>(currentView.getMembers());
}
onSuspend(flushParticipants);
try {
FlushStartResult r = flush_promise.getResultWithTimeout(start_flush_timeout);
if(r.failed())
throw new RuntimeException(r.getFailureCause());
} catch (TimeoutException e) {
Set<Address> missingMembers = new HashSet<Address>();
synchronized(sharedLock) {
missingMembers.addAll(flushMembers);
missingMembers.removeAll(flushCompletedMap.keySet());
}
rejectFlush(flushParticipants, currentViewId());
throw new RuntimeException(localAddress
+ " timed out waiting for flush responses from "
+ missingMembers
+ " after "
+ start_flush_timeout
+ " ms. Rejected flush to participants "
+ flushParticipants,e);
}
}
else {
throw new RuntimeException("Flush attempt is in progress");
}
}
@ManagedOperation(description = "Request end of flush in a cluster")
public void stopFlush() {
down(new Event(Event.RESUME));
}
public Object down(Event evt) {
if(!bypass){
switch (evt.getType()) {
case Event.MSG:
Message msg = (Message) evt.getArg();
Address dest = msg.getDest();
if (dest == null) { // mcasts
FlushHeader fh = (FlushHeader) msg.getHeader(this.id);
if (fh != null && fh.type == FlushHeader.FLUSH_BYPASS) {
return down_prot.down(evt);
} else {
blockMessageDuringFlush();
}
} else {
// unicasts are irrelevant in virtual synchrony, let them through
return down_prot.down(evt);
}
break;
case Event.CONNECT:
case Event.CONNECT_USE_FLUSH:
return handleConnect(evt,true);
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
return handleConnect(evt, false);
case Event.SUSPEND:
startFlush(evt);
return null;
// only for testing, see FLUSH#testFlushWithCrashedFlushCoordinator
case Event.SUSPEND_BUT_FAIL:
if (!flushInProgress.get()) {
flush_promise.reset();
ArrayList<Address> flushParticipants = null;
synchronized (sharedLock) {
flushParticipants = new ArrayList<Address>(currentView.getMembers());
}
onSuspend(flushParticipants);
}
break;
case Event.RESUME:
onResume(evt);
return null;
case Event.SET_LOCAL_ADDRESS:
localAddress = (Address) evt.getArg();
break;
}
}
return down_prot.down(evt);
}
private Object handleConnect(Event evt, boolean waitForUnblock) {
if (sentBlock.compareAndSet(false, true)) {
sendBlockUpToChannel();
}
Object result = down_prot.down(evt);
if (result instanceof Throwable) {
// set the var back to its original state if we cannot
// connect successfully
sentBlock.set(false);
}
if(waitForUnblock)
waitForUnblock();
return result;
}
private void blockMessageDuringFlush() {
boolean shouldSuspendByItself = false;
blockMutex.lock();
try {
while (isBlockingFlushDown) {
if (log.isDebugEnabled())
log.debug(localAddress + ": blocking for " + (timeout <= 0 ? "ever" : timeout + "ms"));
if(timeout <= 0) {
notBlockedDown.await();
}
else {
shouldSuspendByItself = !notBlockedDown.await(timeout, TimeUnit.MILLISECONDS);
}
if (shouldSuspendByItself) {
isBlockingFlushDown = false;
log.warn(localAddress + ": unblocking after " + timeout + "ms");
flush_promise.setResult(new FlushStartResult(Boolean.TRUE,null));
notBlockedDown.signalAll();
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
blockMutex.unlock();
}
}
public Object up(Event evt) {
if(!bypass){
switch (evt.getType()) {
case Event.MSG:
Message msg = (Message) evt.getArg();
final FlushHeader fh = (FlushHeader) msg.getHeader(this.id);
if (fh != null) {
final Tuple<Collection<? extends Address>,Digest> tuple=readParticipantsAndDigest(msg.getRawBuffer(),
msg.getOffset(),
msg.getLength());
switch (fh.type) {
case FlushHeader.FLUSH_BYPASS:
return up_prot.up(evt);
case FlushHeader.START_FLUSH:
Collection<? extends Address> fp = tuple.getVal1();
boolean amIParticipant = (fp != null && fp.contains(localAddress))
|| msg.getSrc().equals(localAddress);
if (amIParticipant) {
handleStartFlush(msg, fh);
} else {
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH but I'm not flush participant, not responding");
}
break;
case FlushHeader.FLUSH_RECONCILE:
handleFlushReconcile(msg);
break;
case FlushHeader.FLUSH_RECONCILE_OK:
onFlushReconcileOK(msg);
break;
case FlushHeader.STOP_FLUSH:
onStopFlush();
break;
case FlushHeader.ABORT_FLUSH:
Collection<? extends Address> flushParticipants = tuple.getVal1();
boolean participant = flushParticipants != null && flushParticipants.contains(localAddress);
if (log.isDebugEnabled()) {
log.debug(localAddress + ": received ABORT_FLUSH from flush coordinator " + msg.getSrc()
+ ", am I flush participant=" + participant);
}
if (participant)
resetForNextFlush();
break;
case FlushHeader.FLUSH_NOT_COMPLETED:
if (log.isDebugEnabled()) {
log.debug(localAddress + ": received FLUSH_NOT_COMPLETED from " + msg.getSrc());
}
boolean flushCollision = false;
synchronized (sharedLock) {
flushNotCompletedMap.add(msg.getSrc());
flushCollision = !flushCompletedMap.isEmpty();
if (flushCollision) {
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (log.isDebugEnabled())
log.debug(localAddress + ": received FLUSH_NOT_COMPLETED from " + msg.getSrc() +
" collision=" + flushCollision);
// reject flush if we have at least one OK and at least one FAIL
if (flushCollision) {
Runnable r = new Runnable() {
public void run() {
rejectFlush(tuple.getVal1(), fh.viewID);
}
};
new Thread(r).start();
}
// however, flush should fail/retry as soon as one FAIL is received
flush_promise.setResult(new FlushStartResult(Boolean.FALSE, new Exception("Flush failed for " + msg.getSrc())));
break;
case FlushHeader.FLUSH_COMPLETED:
if (isCurrentFlushMessage(fh))
onFlushCompleted(msg.getSrc(), msg, fh);
break;
}
return null; // do not pass FLUSH msg up
} else {
// lets wait for STOP_FLUSH to complete before we start allowing message up
if (msg.getDest() != null)
return up_prot.up(evt); // allow unicasts to pass, virtual synchrony only applies to multicasts
}
break;
case Event.VIEW_CHANGE:
// JGRP-618: FLUSH coordinator transfer reorders block/unblock/view events in applications (TCP stack only)
up_prot.up(evt);
View newView = (View) evt.getArg();
boolean coordinatorLeft = onViewChange(newView);
boolean singletonMember = newView.size() == 1 && newView.containsMember(localAddress);
boolean isThisOurFirstView = viewCounter.addAndGet(1) == 1;
// if this is channel's first view and its the only member of the group - no flush
// was run but the channel application should still receive BLOCK,VIEW,UNBLOCK
// also if coordinator of flush left each member should run stopFlush individually.
if ((isThisOurFirstView && singletonMember) || coordinatorLeft)
onStopFlush();
return null;
case Event.TMP_VIEW:
View tmpView = (View) evt.getArg();
if (!tmpView.containsMember(localAddress))
onViewChange(tmpView);
break;
case Event.SUSPECT:
onSuspect((Address) evt.getArg());
break;
case Event.SUSPEND:
startFlush(evt);
return null;
case Event.RESUME:
onResume(evt);
return null;
case Event.UNBLOCK:
flush_unblock_promise.setResult(Boolean.TRUE);
break;
}
}
return up_prot.up(evt);
}
public void up(MessageBatch batch) {
if(bypass) {
up_prot.up(batch);
return;
}
for(Message msg: batch) {
if(msg.getHeader(id) != null) {
batch.remove(msg);
up(new Event(Event.MSG, msg)); // let the existing code handle this
}
else {
if(msg.getDest() != null) { // skip unicast messages, process them right away
batch.remove(msg);
up_prot.up(new Event(Event.MSG, msg));
}
}
}
if(!batch.isEmpty())
up_prot.up(batch);
}
private void waitForUnblock() {
try {
flush_unblock_promise.getResultWithTimeout(end_flush_timeout);
} catch (TimeoutException t) {
if (log.isWarnEnabled())
log.warn(localAddress + ": waiting for UNBLOCK timed out after " + end_flush_timeout + " ms");
} finally {
flush_unblock_promise.reset();
}
}
private void onFlushReconcileOK(Message msg) {
if (log.isDebugEnabled())
log.debug(localAddress + ": received reconcile ok from " + msg.getSrc());
synchronized (sharedLock) {
reconcileOks.add(msg.getSrc());
if (reconcileOks.size() >= flushMembers.size()) {
flush_promise.setResult(SUCCESS_START_FLUSH);
if (log.isDebugEnabled())
log.debug(localAddress + ": all FLUSH_RECONCILE_OK received");
}
}
}
private void handleFlushReconcile(Message msg) {
Address requester = msg.getSrc();
Tuple<Collection<? extends Address>,Digest> tuple=readParticipantsAndDigest(msg.getRawBuffer(),
msg.getOffset(),msg.getLength());
Digest reconcileDigest = tuple.getVal2();
if (log.isDebugEnabled())
log.debug(localAddress + ": received FLUSH_RECONCILE, passing digest to NAKACK "
+ reconcileDigest);
// Let NAKACK reconcile missing messages
down_prot.down(new Event(Event.REBROADCAST, reconcileDigest));
if (log.isDebugEnabled())
log.debug(localAddress + ": returned from FLUSH_RECONCILE, "
+ " sending RECONCILE_OK to " + requester);
Message reconcileOk = new Message(requester).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL)
.putHeader(this.id,new FlushHeader(FlushHeader.FLUSH_RECONCILE_OK));
down_prot.down(new Event(Event.MSG, reconcileOk));
}
private void handleStartFlush(Message msg, FlushHeader fh) {
Address flushRequester = msg.getSrc();
boolean proceed = flushInProgress.compareAndSet(false, true);
if (proceed) {
synchronized (sharedLock) {
flushCoordinator = flushRequester;
}
onStartFlush(flushRequester, msg, fh);
} else {
Tuple<Collection<? extends Address>,Digest> tuple=readParticipantsAndDigest(msg.getRawBuffer(),
msg.getOffset(),msg.getLength());
Collection<? extends Address> flushParticipants=tuple.getVal1();
Message response = new Message(flushRequester)
.putHeader(this.id,new FlushHeader(FlushHeader.FLUSH_NOT_COMPLETED,fh.viewID))
.setBuffer(marshal(flushParticipants,null));
down_prot.down(new Event(Event.MSG, response));
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, responded with FLUSH_NOT_COMPLETED to " + flushRequester);
}
}
private void rejectFlush(Collection<? extends Address> participants, long viewId) {
if(participants == null)
return;
for (Address flushMember : participants) {
if(flushMember == null)
continue;
Message reject = new Message(flushMember, localAddress, null).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL)
.putHeader(this.id, new FlushHeader(FlushHeader.ABORT_FLUSH, viewId))
.setBuffer(marshal(participants, null));
down_prot.down(new Event(Event.MSG, reject));
}
}
public List<Integer> providedDownServices() {
List<Integer> retval=new ArrayList<Integer>(2);
retval.add(Event.SUSPEND);
retval.add(Event.RESUME);
return retval;
}
private void sendBlockUpToChannel() {
this.up(new Event(Event.BLOCK));
sentUnblock.set(false);
}
private void sendUnBlockUpToChannel() {
sentBlock.set(false);
this.up(new Event(Event.UNBLOCK));
}
private boolean isCurrentFlushMessage(FlushHeader fh) {
return fh.viewID == currentViewId();
}
private long currentViewId() {
long viewId = -1;
synchronized (sharedLock) {
ViewId view = currentView.getViewId();
if (view != null) {
viewId = view.getId();
}
}
return viewId;
}
private boolean onViewChange(View view) {
boolean coordinatorLeft = false;
View oldView;
synchronized (sharedLock) {
suspected.retainAll(view.getMembers());
oldView = currentView;
currentView = view;
coordinatorLeft = !oldView.getMembers().isEmpty() && !view.getMembers().isEmpty()
&& !view.containsMember(oldView.getCreator());
}
if (log.isDebugEnabled())
log.debug(localAddress + ": installing view " + view);
return coordinatorLeft;
}
private void onStopFlush() {
if (stats && startFlushTime > 0) {
long stopFlushTime = System.currentTimeMillis();
totalTimeInFlush += (stopFlushTime - startFlushTime);
if (numberOfFlushes > 0) {
averageFlushDuration = totalTimeInFlush / (double) numberOfFlushes;
}
startFlushTime = 0;
}
if (log.isDebugEnabled())
log.debug(localAddress
+ ": received STOP_FLUSH, unblocking FLUSH.down() and sending UNBLOCK up");
resetForNextFlush();
if (sentUnblock.compareAndSet(false, true)) {
// ensures that we do not repeat unblock event
sendUnBlockUpToChannel();
}
}
private void resetForNextFlush() {
synchronized (sharedLock) {
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
flushCompleted = false;
}
blockMutex.lock();
try {
isBlockingFlushDown = false;
notBlockedDown.signalAll();
} finally {
blockMutex.unlock();
}
flushInProgress.set(false);
}
/**
* Starts the flush protocol
* @param members List of participants in the flush protocol. Guaranteed to be non-null
*/
private void onSuspend(final List<Address> members) {
Message msg = null;
Collection<Address> participantsInFlush = null;
synchronized (sharedLock) {
flushCoordinator = localAddress;
// start FLUSH only on group members that we need to flush
participantsInFlush = members;
participantsInFlush.retainAll(currentView.getMembers());
flushMembers.clear();
flushMembers.addAll(participantsInFlush);
flushMembers.removeAll(suspected);
msg = new Message(null, localAddress, null)
.putHeader(this.id, new FlushHeader(FlushHeader.START_FLUSH, currentViewId()))
.setBuffer(marshal(participantsInFlush, null));
}
if (participantsInFlush.isEmpty()) {
flush_promise.setResult(SUCCESS_START_FLUSH);
} else {
down_prot.down(new Event(Event.MSG, msg));
if (log.isDebugEnabled())
log.debug(localAddress + ": flush coordinator "
+ " is starting FLUSH with participants " + participantsInFlush);
}
}
@SuppressWarnings("unchecked")
private void onResume(Event evt) {
List<Address> members = (List<Address>) evt.getArg();
long viewID = currentViewId();
boolean isParticipant = false;
synchronized(sharedLock) {
isParticipant = flushMembers.contains(localAddress) || (members != null && members.contains(localAddress));
}
if (members == null || members.isEmpty()) {
Message msg = new Message(null, localAddress, null);
// Cannot be OOB since START_FLUSH is not OOB
// we have to FIFO order two subsequent flushes
if (log.isDebugEnabled())
log.debug(localAddress + ": received RESUME, sending STOP_FLUSH to all");
msg.putHeader(this.id, new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
} else {
for (Address address : members) {
Message msg = new Message(address, localAddress, null);
// Cannot be OOB since START_FLUSH is not OOB
// we have to FIFO order two subsequent flushes
if (log.isDebugEnabled())
log.debug(localAddress + ": received RESUME, sending STOP_FLUSH to " + address);
msg.putHeader(this.id, new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
}
}
if(isParticipant)
waitForUnblock();
}
private void onStartFlush(Address flushStarter, Message msg, FlushHeader fh) {
if (stats) {
startFlushTime = System.currentTimeMillis();
numberOfFlushes += 1;
}
boolean proceed = false;
boolean amIFlushInitiator = false;
Tuple<Collection<? extends Address>,Digest> tuple=readParticipantsAndDigest(msg.getRawBuffer(),
msg.getOffset(),msg.getLength());
synchronized (sharedLock) {
amIFlushInitiator = flushStarter.equals(localAddress);
if(!amIFlushInitiator){
flushCoordinator = flushStarter;
flushMembers.clear();
if (tuple.getVal1() != null) {
flushMembers.addAll(tuple.getVal1());
}
flushMembers.removeAll(suspected);
}
proceed = flushMembers.contains(localAddress);
}
if (proceed) {
if (sentBlock.compareAndSet(false, true)) {
// ensures that we do not repeat block event
// and that we do not send block event to non participants
sendBlockUpToChannel();
blockMutex.lock();
try {
isBlockingFlushDown = true;
} finally {
blockMutex.unlock();
}
} else {
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, but not sending BLOCK up");
}
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
Message start_msg = new Message(flushStarter)
.putHeader(this.id, new FlushHeader(FlushHeader.FLUSH_COMPLETED, fh.viewID))
.setBuffer(marshal(tuple.getVal1(),digest));
down_prot.down(new Event(Event.MSG, start_msg));
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, responded with FLUSH_COMPLETED to " + flushStarter);
}
}
private void onFlushCompleted(Address address, final Message m, final FlushHeader header) {
Message msg = null;
boolean needsReconciliationPhase = false;
boolean collision = false;
final Tuple<Collection<? extends Address>,Digest> tuple=readParticipantsAndDigest(m.getRawBuffer(),
m.getOffset(),m.getLength());
Digest digest = tuple.getVal2();
synchronized (sharedLock) {
flushCompletedMap.put(address, digest);
flushCompleted = flushCompletedMap.size() >= flushMembers.size()
&& !flushMembers.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
collision = !flushNotCompletedMap.isEmpty();
if (log.isDebugEnabled())
log.debug(localAddress + ": FLUSH_COMPLETED from " + address + ", completed "
+ flushCompleted + ", flushMembers " + flushMembers
+ ", flushCompleted " + flushCompletedMap.keySet());
needsReconciliationPhase = enable_reconciliation && flushCompleted && hasVirtualSynchronyGaps();
if (needsReconciliationPhase) {
Digest d = findHighestSequences(currentView);
msg = new Message().setFlag(Message.Flag.OOB);
reconcileOks.clear();
msg.putHeader(this.id, new FlushHeader(FlushHeader.FLUSH_RECONCILE, currentViewId()))
.setBuffer(marshal(flushMembers, d));
if (log.isDebugEnabled())
log.debug(localAddress
+ ": reconciling flush mebers due to virtual synchrony gap, digest is "
+ d + " flush members are " + flushMembers);
flushCompletedMap.clear();
} else if (flushCompleted) {
flushCompletedMap.clear();
} else if (collision) {
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (needsReconciliationPhase) {
down_prot.down(new Event(Event.MSG, msg));
} else if (flushCompleted) {
flush_promise.setResult(SUCCESS_START_FLUSH);
if (log.isDebugEnabled())
log.debug(localAddress + ": all FLUSH_COMPLETED received");
} else if (collision) {
// reject flush if we have at least one OK and at least one FAIL
Runnable r = new Runnable() {
public void run() {
rejectFlush(tuple.getVal1(), header.viewID);
}
};
new Thread(r).start();
}
}
private boolean hasVirtualSynchronyGaps() {
ArrayList<Digest> digests = new ArrayList<Digest>();
digests.addAll(flushCompletedMap.values());
return !same(digests);
}
protected static boolean same(final List<Digest> digests) {
if(digests == null) return false;
Digest first=digests.get(0);
for(int i=1; i < digests.size(); i++) {
Digest current=digests.get(i);
if(!first.equals(current))
return false;
}
return true;
}
private Digest findHighestSequences(View view) {
List<Digest> digests = new ArrayList<Digest>(flushCompletedMap.values());
return maxSeqnos(view,digests);
}
/** Returns a digest which contains, for all members of view, the highest delivered and received
* seqno of all digests */
protected static Digest maxSeqnos(final View view, List<Digest> digests) {
if(view == null || digests == null)
return null;
MutableDigest digest=new MutableDigest(view.getMembersRaw());
for(Digest dig: digests)
digest.merge(dig);
return digest;
}
private void onSuspect(Address address) {
// handles FlushTest#testFlushWithCrashedFlushCoordinator
boolean amINeighbourOfCrashedFlushCoordinator = false;
ArrayList<Address> flushMembersCopy = null;
synchronized (sharedLock) {
boolean flushCoordinatorSuspected = address != null && address.equals(flushCoordinator);
if (flushCoordinatorSuspected) {
int indexOfCoordinator = flushMembers.indexOf(flushCoordinator);
int myIndex = flushMembers.indexOf(localAddress);
int diff = myIndex - indexOfCoordinator;
amINeighbourOfCrashedFlushCoordinator = (diff == 1 || (myIndex == 0 && indexOfCoordinator == flushMembers.size()));
if (amINeighbourOfCrashedFlushCoordinator) {
flushMembersCopy = new ArrayList<Address>(flushMembers);
}
}
}
if (amINeighbourOfCrashedFlushCoordinator) {
if (log.isDebugEnabled())
log.debug(localAddress + ": flush coordinator " + flushCoordinator + " suspected,"
+ " I am the neighbor, completing the flush ");
onResume(new Event(Event.RESUME, flushMembersCopy));
}
// handles FlushTest#testFlushWithCrashedNonCoordinators
boolean flushOkCompleted = false;
Message m = null;
long viewID = 0;
synchronized (sharedLock) {
suspected.add(address);
flushMembers.removeAll(suspected);
viewID = currentViewId();
flushOkCompleted = !flushCompletedMap.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
if (flushOkCompleted) {
m = new Message(flushCoordinator, localAddress, null);
}
if (log.isDebugEnabled())
log.debug(localAddress + ": suspect is " + address + ", completed " + flushOkCompleted
+ ", flushOkSet " + flushCompletedMap + ", flushMembers " + flushMembers);
}
if (flushOkCompleted) {
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
m.putHeader(this.id, new FlushHeader(FlushHeader.FLUSH_COMPLETED, viewID)).setBuffer(marshal(null, digest));
down_prot.down(new Event(Event.MSG, m));
if (log.isDebugEnabled())
log.debug(localAddress + ": sent FLUSH_COMPLETED message to " + flushCoordinator);
}
}
protected static Buffer marshal(final Collection<? extends Address> participants, final Digest digest) {
final ByteArrayDataOutputStream out=new ByteArrayDataOutputStream(512);
try {
Util.writeAddresses(participants, out);
Util.writeStreamable(digest,out);
return out.getBuffer();
}
catch(Exception ex) {
return null;
}
}
protected Tuple<Collection<? extends Address>,Digest> readParticipantsAndDigest(byte[] buffer, int offset, int length) {
if(buffer == null) return null;
try {
DataInput in=new ByteArrayDataInputStream(buffer, offset, length);
Collection<? extends Address> participants=Util.readAddresses(in, ArrayList.class);
Digest digest=(Digest)Util.readStreamable(Digest.class,in);
return new Tuple<Collection<? extends Address>,Digest>(participants, digest);
}
catch(Exception ex) {
log.error("%s: failed reading particpants and digest from message: %s", localAddress, ex);
return null;
}
}
private static class FlushStartResult {
private final Boolean result;
private final Exception failureCause;
private FlushStartResult(Boolean result, Exception failureCause) {
this.result = result;
this.failureCause = failureCause;
}
public Boolean getResult() {
return result;
}
public boolean failed(){
return result == Boolean.FALSE;
}
public Exception getFailureCause() {
return failureCause;
}
}
public static class FlushHeader extends Header {
public static final byte START_FLUSH = 0;
public static final byte STOP_FLUSH = 2;
public static final byte FLUSH_COMPLETED = 3;
public static final byte ABORT_FLUSH = 5;
public static final byte FLUSH_BYPASS = 6;
public static final byte FLUSH_RECONCILE = 7;
public static final byte FLUSH_RECONCILE_OK = 8;
public static final byte FLUSH_NOT_COMPLETED = 9;
protected byte type;
protected long viewID;
public FlushHeader() {
this(START_FLUSH, 0);
}
public FlushHeader(byte type) {
this.type=type;
}
public FlushHeader(byte type, long viewID) {
this(type);
this.viewID=viewID;
}
public byte getType() {return type;}
public long getViewID() {return viewID;}
@Override
public int size() {
return Global.BYTE_SIZE + Global.LONG_SIZE; // type and viewId
}
public String toString() {
switch (type) {
case START_FLUSH:
return "FLUSH[type=START_FLUSH,viewId=" + viewID;
case STOP_FLUSH:
return "FLUSH[type=STOP_FLUSH,viewId=" + viewID + "]";
case ABORT_FLUSH:
return "FLUSH[type=ABORT_FLUSH,viewId=" + viewID + "]";
case FLUSH_COMPLETED:
return "FLUSH[type=FLUSH_COMPLETED,viewId=" + viewID + "]";
case FLUSH_BYPASS:
return "FLUSH[type=FLUSH_BYPASS,viewId=" + viewID + "]";
case FLUSH_RECONCILE:
return "FLUSH[type=FLUSH_RECONCILE,viewId=" + viewID;
case FLUSH_RECONCILE_OK:
return "FLUSH[type=FLUSH_RECONCILE_OK,viewId=" + viewID + "]";
default:
return "[FLUSH: unknown type (" + type + ")]";
}
}
public void writeTo(DataOutput out) throws Exception {
out.writeByte(type);
out.writeLong(viewID);
}
@SuppressWarnings("unchecked")
public void readFrom(DataInput in) throws Exception {
type = in.readByte();
viewID = in.readLong();
}
}
}
|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Digest;
import org.jgroups.util.MessageBatch;
import org.jgroups.util.Promise;
import org.jgroups.util.Util;
import java.io.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
/**
* Flush, as it name implies, forces group members to flush their pending messages while blocking
* them to send any additional messages. The process of flushing acquiesces the group so that state
* transfer or a join can be done. It is also called stop-the-world model as nobody will be able to
* send messages while a flush is in process.
*
* <p>
* Flush is needed for:
* <p>
* (1) State transfer. When a member requests state transfer, the coordinator tells everyone to stop
* sending messages and waits for everyone's ack. Then it asks the application for its state and
* ships it back to the requester. After the requester has received and set the state successfully,
* the coordinator tells everyone to resume sending messages.
* <p>
* (2) View changes (e.g.a join). Before installing a new view V2, flushing would ensure that all
* messages *sent* in the current view V1 are indeed *delivered* in V1, rather than in V2 (in all
* non-faulty members). This is essentially Virtual Synchrony.
*
*
*
* @author Vladimir Blagojevic
* @since 2.4
*/
@MBean(description = "Flushes the cluster")
public class FLUSH extends Protocol {
private static final FlushStartResult SUCCESS_START_FLUSH = new FlushStartResult(Boolean.TRUE,null);
@Property(description = "Max time to keep channel blocked in flush. Default is 8000 msec")
private long timeout = 8000;
@Property(description = "Timeout (per atttempt) to quiet the cluster during the first flush phase. Default is 2000 msec")
private long start_flush_timeout = 2000;
@Property(description = "Timeout to wait for UNBLOCK after STOP_FLUSH is issued. Default is 2000 msec")
private long end_flush_timeout = 2000;
@Property(description = "Retry timeout after an unsuccessful attempt to quiet the cluster (first flush phase). Default is 3000 msec")
private long retry_timeout = 2000;
@Property(description = "Reconciliation phase toggle. Default is true")
private boolean enable_reconciliation = true;
@Property(description="When set, FLUSH is bypassed, same effect as if FLUSH wasn't in the config at all")
protected boolean bypass=false;
private long startFlushTime;
private long totalTimeInFlush;
private int numberOfFlushes;
private double averageFlushDuration;
@GuardedBy("sharedLock")
private View currentView=new View(new ViewId(), new ArrayList<Address>());
private Address localAddress;
/**
* Group member that requested FLUSH. For view installations flush coordinator is the group
* coordinator For state transfer flush coordinator is the state requesting member
*/
@GuardedBy("sharedLock")
private Address flushCoordinator;
@GuardedBy("sharedLock")
private final List<Address> flushMembers=new ArrayList<Address>();
private final AtomicInteger viewCounter = new AtomicInteger(0);
@GuardedBy("sharedLock")
private final Map<Address, Digest> flushCompletedMap=new HashMap<Address, Digest>();
@GuardedBy("sharedLock")
private final List<Address> flushNotCompletedMap=new ArrayList<Address>();
@GuardedBy("sharedLock")
private final Set<Address> suspected=new TreeSet<Address>();
@GuardedBy("sharedLock")
private final List<Address> reconcileOks=new ArrayList<Address>();
private final Object sharedLock = new Object();
private final ReentrantLock blockMutex = new ReentrantLock();
private final Condition notBlockedDown = blockMutex.newCondition();
/**
* Indicates if FLUSH.down() is currently blocking threads Condition predicate associated with
* blockMutex
*/
@ManagedAttribute(description="Is message sending currently blocked")
@GuardedBy("blockMutex")
private volatile boolean isBlockingFlushDown = true;
@GuardedBy("sharedLock")
private boolean flushCompleted = false;
private final Promise<FlushStartResult> flush_promise = new Promise<FlushStartResult>();
private final Promise<Boolean> flush_unblock_promise = new Promise<Boolean>();
private final AtomicBoolean flushInProgress = new AtomicBoolean(false);
private final AtomicBoolean sentBlock = new AtomicBoolean(false);
private final AtomicBoolean sentUnblock = new AtomicBoolean(false);
public long getStartFlushTimeout() {
return start_flush_timeout;
}
public void setStartFlushTimeout(long start_flush_timeout) {
this.start_flush_timeout = start_flush_timeout;
}
public long getRetryTimeout() {
return retry_timeout;
}
public void setRetryTimeout(long retry_timeout) {
this.retry_timeout = retry_timeout;
}
public void start() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("flush_supported", Boolean.TRUE);
up_prot.up(new Event(Event.CONFIG, map));
down_prot.down(new Event(Event.CONFIG, map));
viewCounter.set(0);
blockMutex.lock();
try {
isBlockingFlushDown = true;
} finally {
blockMutex.unlock();
}
}
public void stop() {
synchronized (sharedLock) {
currentView = new View(new ViewId(), new ArrayList<Address>());
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
}
}
@ManagedAttribute
public double getAverageFlushDuration() {
return averageFlushDuration;
}
@ManagedAttribute
public long getTotalTimeInFlush() {
return totalTimeInFlush;
}
@ManagedAttribute
public int getNumberOfFlushes() {
return numberOfFlushes;
}
@ManagedOperation(description="Sets the bypass flag")
public boolean setBypass(boolean flag) {
boolean ret=bypass;
bypass=flag;
return ret;
}
@ManagedOperation(description = "Request cluster flush")
public void startFlush() {
startFlush(new Event(Event.SUSPEND));
}
@SuppressWarnings("unchecked")
private void startFlush(Event evt) {
List<Address> flushParticipants = (List<Address>) evt.getArg();
startFlush(flushParticipants);
}
private void startFlush(List<Address> flushParticipants) {
if (!flushInProgress.get()) {
flush_promise.reset();
synchronized(sharedLock) {
if(flushParticipants == null)
flushParticipants=new ArrayList<Address>(currentView.getMembers());
}
onSuspend(flushParticipants);
try {
FlushStartResult r = flush_promise.getResultWithTimeout(start_flush_timeout);
if(r.failed())
throw new RuntimeException(r.getFailureCause());
} catch (TimeoutException e) {
Set<Address> missingMembers = new HashSet<Address>();
synchronized(sharedLock) {
missingMembers.addAll(flushMembers);
missingMembers.removeAll(flushCompletedMap.keySet());
}
rejectFlush(flushParticipants, currentViewId());
throw new RuntimeException(localAddress
+ " timed out waiting for flush responses from "
+ missingMembers
+ " after "
+ start_flush_timeout
+ " ms. Rejected flush to participants "
+ flushParticipants,e);
}
}
else {
throw new RuntimeException("Flush attempt is in progress");
}
}
@ManagedOperation(description = "Request end of flush in a cluster")
public void stopFlush() {
down(new Event(Event.RESUME));
}
public Object down(Event evt) {
switch (evt.getType()) {
case Event.MSG:
if(bypass)
break;
Message msg = (Message) evt.getArg();
Address dest = msg.getDest();
if (dest == null) { // mcasts
FlushHeader fh = (FlushHeader) msg.getHeader(this.id);
if (fh != null && fh.type == FlushHeader.FLUSH_BYPASS) {
return down_prot.down(evt);
} else {
blockMessageDuringFlush();
}
} else {
// unicasts are irrelevant in virtual synchrony, let them through
return down_prot.down(evt);
}
break;
case Event.CONNECT:
case Event.CONNECT_USE_FLUSH:
return handleConnect(evt,true);
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
return handleConnect(evt, false);
case Event.SUSPEND:
startFlush(evt);
return null;
// only for testing, see FLUSH#testFlushWithCrashedFlushCoordinator
case Event.SUSPEND_BUT_FAIL:
if (!flushInProgress.get()) {
flush_promise.reset();
ArrayList<Address> flushParticipants = null;
synchronized (sharedLock) {
flushParticipants = new ArrayList<Address>(currentView.getMembers());
}
onSuspend(flushParticipants);
}
break;
case Event.RESUME:
onResume(evt);
return null;
case Event.SET_LOCAL_ADDRESS:
localAddress = (Address) evt.getArg();
break;
}
return down_prot.down(evt);
}
private Object handleConnect(Event evt, boolean waitForUnblock) {
if (sentBlock.compareAndSet(false, true)) {
sendBlockUpToChannel();
}
Object result = down_prot.down(evt);
if (result instanceof Throwable) {
// set the var back to its original state if we cannot
// connect successfully
sentBlock.set(false);
}
if(waitForUnblock)
waitForUnblock();
return result;
}
private void blockMessageDuringFlush() {
boolean shouldSuspendByItself = false;
blockMutex.lock();
try {
while (isBlockingFlushDown) {
if (log.isDebugEnabled())
log.debug(localAddress + ": blocking for " + (timeout <= 0 ? "ever" : timeout + "ms"));
if(timeout <= 0) {
notBlockedDown.await();
}
else {
shouldSuspendByItself = !notBlockedDown.await(timeout, TimeUnit.MILLISECONDS);
}
if (shouldSuspendByItself) {
isBlockingFlushDown = false;
log.warn(localAddress + ": unblocking after " + timeout + "ms");
flush_promise.setResult(new FlushStartResult(Boolean.TRUE,null));
notBlockedDown.signalAll();
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
blockMutex.unlock();
}
}
public Object up(Event evt) {
switch (evt.getType()) {
case Event.MSG:
if(bypass)
break;
Message msg = (Message) evt.getArg();
final FlushHeader fh = (FlushHeader) msg.getHeader(this.id);
if (fh != null) {
switch (fh.type) {
case FlushHeader.FLUSH_BYPASS:
return up_prot.up(evt);
case FlushHeader.START_FLUSH:
Collection<Address> fp = fh.flushParticipants;
boolean amIParticipant = (fp != null && fp.contains(localAddress))
|| msg.getSrc().equals(localAddress);
if (amIParticipant) {
handleStartFlush(msg, fh);
} else {
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH but I'm not flush participant, not responding");
}
break;
case FlushHeader.FLUSH_RECONCILE:
handleFlushReconcile(msg, fh);
break;
case FlushHeader.FLUSH_RECONCILE_OK:
onFlushReconcileOK(msg);
break;
case FlushHeader.STOP_FLUSH:
Collection<Address> stopFlushParticipants = fh.flushParticipants;
boolean amIStopFlushParticipant = stopFlushParticipants == null
|| stopFlushParticipants.size() == 0
|| stopFlushParticipants.contains(localAddress)
|| msg.getSrc().equals(localAddress);
if (amIStopFlushParticipant) {
onStopFlush();
} else {
if (log.isDebugEnabled())
log.debug("Received STOP_FLUSH at " + localAddress + " but I am not flush participant, not responding");
}
break;
case FlushHeader.ABORT_FLUSH:
Collection<Address> flushParticipants = fh.flushParticipants;
boolean participant = flushParticipants != null && flushParticipants.contains(localAddress);
if (log.isDebugEnabled()) {
log.debug(localAddress + ": received ABORT_FLUSH from flush coordinator " + msg.getSrc()
+ ", am I flush participant=" + participant);
}
if (participant)
resetForNextFlush();
break;
case FlushHeader.FLUSH_NOT_COMPLETED:
if (log.isDebugEnabled()) {
log.debug(localAddress + ": received FLUSH_NOT_COMPLETED from " + msg.getSrc());
}
boolean flushCollision = false;
synchronized (sharedLock) {
flushNotCompletedMap.add(msg.getSrc());
flushCollision = !flushCompletedMap.isEmpty();
if (flushCollision) {
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (log.isDebugEnabled())
log.debug(localAddress + ": received FLUSH_NOT_COMPLETED from " + msg.getSrc() +
" collision=" + flushCollision);
// reject flush if we have at least one OK and at least one FAIL
if (flushCollision) {
Runnable r = new Runnable() {
public void run() {
rejectFlush(fh.flushParticipants, fh.viewID);
}
};
new Thread(r).start();
}
// however, flush should fail/retry as soon as one FAIL is received
flush_promise.setResult(new FlushStartResult(Boolean.FALSE, new Exception("Flush failed for " + msg.getSrc())));
break;
case FlushHeader.FLUSH_COMPLETED:
if (isCurrentFlushMessage(fh))
onFlushCompleted(msg.getSrc(), fh);
break;
}
return null; // do not pass FLUSH msg up
} else {
// lets wait for STOP_FLUSH to complete before we start allowing message up
if (msg.getDest() != null)
return up_prot.up(evt); // allow unicasts to pass, virtual synchrony only applies to multicasts
}
break;
case Event.VIEW_CHANGE:
// JGRP-618: FLUSH coordinator transfer reorders block/unblock/view events in applications (TCP stack only)
up_prot.up(evt);
View newView = (View) evt.getArg();
boolean coordinatorLeft = onViewChange(newView);
boolean singletonMember = newView.size() == 1 && newView.containsMember(localAddress);
boolean isThisOurFirstView = viewCounter.addAndGet(1) == 1;
// if this is channel's first view and its the only member of the group - no flush
// was run but the channel application should still receive BLOCK,VIEW,UNBLOCK
// also if coordinator of flush left each member should run stopFlush individually.
if ((isThisOurFirstView && singletonMember) || coordinatorLeft)
onStopFlush();
return null;
case Event.TMP_VIEW:
View tmpView = (View) evt.getArg();
if (!tmpView.containsMember(localAddress))
onViewChange(tmpView);
break;
case Event.SUSPECT:
onSuspect((Address) evt.getArg());
break;
case Event.SUSPEND:
startFlush(evt);
return null;
case Event.RESUME:
onResume(evt);
return null;
case Event.UNBLOCK:
flush_unblock_promise.setResult(Boolean.TRUE);
break;
}
return up_prot.up(evt);
}
public void up(MessageBatch batch) {
if(bypass) {
up_prot.up(batch);
return;
}
for(Message msg: batch) {
if(msg.getHeader(id) != null) {
batch.remove(msg);
up(new Event(Event.MSG, msg)); // let the existing code handle this
}
else {
if(msg.getDest() != null) { // skip unicast messages, process them right away
batch.remove(msg);
up_prot.up(new Event(Event.MSG, msg));
}
}
}
if(!batch.isEmpty())
up_prot.up(batch);
}
private void waitForUnblock() {
try {
flush_unblock_promise.getResultWithTimeout(end_flush_timeout);
} catch (TimeoutException t) {
if (log.isWarnEnabled())
log.warn(localAddress + ": waiting for UNBLOCK timed out after " + end_flush_timeout + " ms");
} finally {
flush_unblock_promise.reset();
}
}
private void onFlushReconcileOK(Message msg) {
if (log.isDebugEnabled())
log.debug(localAddress + ": received reconcile ok from " + msg.getSrc());
synchronized (sharedLock) {
reconcileOks.add(msg.getSrc());
if (reconcileOks.size() >= flushMembers.size()) {
flush_promise.setResult(SUCCESS_START_FLUSH);
if (log.isDebugEnabled())
log.debug(localAddress + ": all FLUSH_RECONCILE_OK received");
}
}
}
private void handleFlushReconcile(Message msg, FlushHeader fh) {
Address requester = msg.getSrc();
Digest reconcileDigest = fh.digest;
if (log.isDebugEnabled())
log.debug(localAddress + ": received FLUSH_RECONCILE, passing digest to NAKACK "
+ reconcileDigest);
// Let NAKACK reconcile missing messages
down_prot.down(new Event(Event.REBROADCAST, reconcileDigest));
if (log.isDebugEnabled())
log.debug(localAddress + ": returned from FLUSH_RECONCILE, "
+ " sending RECONCILE_OK to " + requester);
Message reconcileOk = new Message(requester).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL)
.putHeader(this.id,new FlushHeader(FlushHeader.FLUSH_RECONCILE_OK));
down_prot.down(new Event(Event.MSG, reconcileOk));
}
private void handleStartFlush(Message msg, FlushHeader fh) {
Address flushRequester = msg.getSrc();
boolean proceed = flushInProgress.compareAndSet(false, true);
if (proceed) {
synchronized (sharedLock) {
flushCoordinator = flushRequester;
}
onStartFlush(flushRequester, fh);
} else {
FlushHeader fhr = new FlushHeader(FlushHeader.FLUSH_NOT_COMPLETED, fh.viewID, fh.flushParticipants);
Message response = new Message(flushRequester).putHeader(this.id, fhr);
down_prot.down(new Event(Event.MSG, response));
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, responded with FLUSH_NOT_COMPLETED to " + flushRequester);
}
}
private void rejectFlush(Collection<? extends Address> participants, long viewId) {
if(participants == null)
return;
for (Address flushMember : participants) {
if(flushMember == null)
continue;
Message reject = new Message(flushMember, localAddress, null).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL)
.putHeader(this.id, new FlushHeader(FlushHeader.ABORT_FLUSH, viewId,participants));
down_prot.down(new Event(Event.MSG, reject));
}
}
public List<Integer> providedDownServices() {
List<Integer> retval=new ArrayList<Integer>(2);
retval.add(Event.SUSPEND);
retval.add(Event.RESUME);
return retval;
}
private void sendBlockUpToChannel() {
this.up(new Event(Event.BLOCK));
sentUnblock.set(false);
}
private void sendUnBlockUpToChannel() {
sentBlock.set(false);
this.up(new Event(Event.UNBLOCK));
}
private boolean isCurrentFlushMessage(FlushHeader fh) {
return fh.viewID == currentViewId();
}
private long currentViewId() {
long viewId = -1;
synchronized (sharedLock) {
ViewId view = currentView.getVid();
if (view != null) {
viewId = view.getId();
}
}
return viewId;
}
private boolean onViewChange(View view) {
boolean coordinatorLeft = false;
View oldView;
synchronized (sharedLock) {
suspected.retainAll(view.getMembers());
oldView = currentView;
currentView = view;
coordinatorLeft = !oldView.getMembers().isEmpty() && !view.getMembers().isEmpty()
&& !view.containsMember(oldView.getCreator());
}
if (log.isDebugEnabled())
log.debug(localAddress + ": installing view " + view);
return coordinatorLeft;
}
private void onStopFlush() {
if (stats && startFlushTime > 0) {
long stopFlushTime = System.currentTimeMillis();
totalTimeInFlush += (stopFlushTime - startFlushTime);
if (numberOfFlushes > 0) {
averageFlushDuration = totalTimeInFlush / (double) numberOfFlushes;
}
startFlushTime = 0;
}
if (log.isDebugEnabled())
log.debug(localAddress
+ ": received STOP_FLUSH, unblocking FLUSH.down() and sending UNBLOCK up");
resetForNextFlush();
if (sentUnblock.compareAndSet(false, true)) {
// ensures that we do not repeat unblock event
sendUnBlockUpToChannel();
}
}
private void resetForNextFlush() {
synchronized (sharedLock) {
flushCompletedMap.clear();
flushNotCompletedMap.clear();
flushMembers.clear();
suspected.clear();
flushCoordinator = null;
flushCompleted = false;
}
blockMutex.lock();
try {
isBlockingFlushDown = false;
notBlockedDown.signalAll();
} finally {
blockMutex.unlock();
}
flushInProgress.set(false);
}
/**
* Starts the flush protocol
* @param members List of participants in the flush protocol. Guaranteed to be non-null
*/
private void onSuspend(final List<Address> members) {
Message msg = null;
Collection<Address> participantsInFlush = null;
synchronized (sharedLock) {
flushCoordinator = localAddress;
// start FLUSH only on group members that we need to flush
participantsInFlush = members;
participantsInFlush.retainAll(currentView.getMembers());
flushMembers.clear();
flushMembers.addAll(participantsInFlush);
flushMembers.removeAll(suspected);
msg = new Message(null, localAddress, null)
.putHeader(this.id, new FlushHeader(FlushHeader.START_FLUSH, currentViewId(), participantsInFlush));
}
if (participantsInFlush.isEmpty()) {
flush_promise.setResult(SUCCESS_START_FLUSH);
} else {
down_prot.down(new Event(Event.MSG, msg));
if (log.isDebugEnabled())
log.debug(localAddress + ": flush coordinator "
+ " is starting FLUSH with participants " + participantsInFlush);
}
}
@SuppressWarnings("unchecked")
private void onResume(Event evt) {
List<Address> members = (List<Address>) evt.getArg();
long viewID = currentViewId();
boolean isParticipant = false;
synchronized(sharedLock) {
isParticipant = flushMembers.contains(localAddress) || (members != null && members.contains(localAddress));
}
if (members == null || members.isEmpty()) {
Message msg = new Message(null, localAddress, null);
// Cannot be OOB since START_FLUSH is not OOB
// we have to FIFO order two subsequent flushes
if (log.isDebugEnabled())
log.debug(localAddress + ": received RESUME, sending STOP_FLUSH to all");
msg.putHeader(this.id, new FlushHeader(FlushHeader.STOP_FLUSH, viewID, flushMembers));
down_prot.down(new Event(Event.MSG, msg));
} else {
for (Address address : members) {
Message msg = new Message(address, localAddress, null);
// Cannot be OOB since START_FLUSH is not OOB
// we have to FIFO order two subsequent flushes
if (log.isDebugEnabled())
log.debug(localAddress + ": received RESUME, sending STOP_FLUSH to " + address);
msg.putHeader(this.id, new FlushHeader(FlushHeader.STOP_FLUSH, viewID));
down_prot.down(new Event(Event.MSG, msg));
}
}
if(isParticipant)
waitForUnblock();
}
private void onStartFlush(Address flushStarter, FlushHeader fh) {
if (stats) {
startFlushTime = System.currentTimeMillis();
numberOfFlushes += 1;
}
boolean proceed = false;
boolean amIFlushInitiator = false;
synchronized (sharedLock) {
amIFlushInitiator = flushStarter.equals(localAddress);
if(!amIFlushInitiator){
flushCoordinator = flushStarter;
flushMembers.clear();
if (fh.flushParticipants != null) {
flushMembers.addAll(fh.flushParticipants);
}
flushMembers.removeAll(suspected);
}
proceed = flushMembers.contains(localAddress);
}
if (proceed) {
if (sentBlock.compareAndSet(false, true)) {
// ensures that we do not repeat block event
// and that we do not send block event to non participants
sendBlockUpToChannel();
blockMutex.lock();
try {
isBlockingFlushDown = true;
} finally {
blockMutex.unlock();
}
} else {
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, but not sending BLOCK up");
}
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fhr = new FlushHeader(FlushHeader.FLUSH_COMPLETED, fh.viewID,fh.flushParticipants);
fhr.addDigest(digest);
Message msg = new Message(flushStarter).putHeader(this.id, fhr);
down_prot.down(new Event(Event.MSG, msg));
if (log.isDebugEnabled())
log.debug(localAddress + ": received START_FLUSH, responded with FLUSH_COMPLETED to " + flushStarter);
}
}
private void onFlushCompleted(Address address, final FlushHeader header) {
Message msg = null;
boolean needsReconciliationPhase = false;
boolean collision = false;
Digest digest = header.digest;
synchronized (sharedLock) {
flushCompletedMap.put(address, digest);
flushCompleted = flushCompletedMap.size() >= flushMembers.size()
&& !flushMembers.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
collision = !flushNotCompletedMap.isEmpty();
if (log.isDebugEnabled())
log.debug(localAddress + ": FLUSH_COMPLETED from " + address + ", completed "
+ flushCompleted + ", flushMembers " + flushMembers
+ ", flushCompleted " + flushCompletedMap.keySet());
needsReconciliationPhase = enable_reconciliation && flushCompleted && hasVirtualSynchronyGaps();
if (needsReconciliationPhase) {
Digest d = findHighestSequences();
msg = new Message().setFlag(Message.Flag.OOB);
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_RECONCILE, currentViewId(),flushMembers);
reconcileOks.clear();
fh.addDigest(d);
msg.putHeader(this.id, fh);
if (log.isDebugEnabled())
log.debug(localAddress
+ ": reconciling flush mebers due to virtual synchrony gap, digest is "
+ d + " flush members are " + flushMembers);
flushCompletedMap.clear();
} else if (flushCompleted) {
flushCompletedMap.clear();
} else if (collision) {
flushNotCompletedMap.clear();
flushCompletedMap.clear();
}
}
if (needsReconciliationPhase) {
down_prot.down(new Event(Event.MSG, msg));
} else if (flushCompleted) {
flush_promise.setResult(SUCCESS_START_FLUSH);
if (log.isDebugEnabled())
log.debug(localAddress + ": all FLUSH_COMPLETED received");
} else if (collision) {
// reject flush if we have at least one OK and at least one FAIL
Runnable r = new Runnable() {
public void run() {
rejectFlush(header.flushParticipants, header.viewID);
}
};
new Thread(r).start();
}
}
private boolean hasVirtualSynchronyGaps() {
ArrayList<Digest> digests = new ArrayList<Digest>();
digests.addAll(flushCompletedMap.values());
Digest firstDigest = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for (Digest digest : remainingDigests) {
Digest diff = firstDigest.difference(digest);
if (diff != null)
return true;
}
return false;
}
private Digest findHighestSequences() {
Digest result = null;
List<Digest> digests = new ArrayList<Digest>(flushCompletedMap.values());
result = digests.get(0);
List<Digest> remainingDigests = digests.subList(1, digests.size());
for (Digest digestG : remainingDigests) {
result = result.highestSequence(digestG);
}
return result;
}
private void onSuspect(Address address) {
// handles FlushTest#testFlushWithCrashedFlushCoordinator
boolean amINeighbourOfCrashedFlushCoordinator = false;
ArrayList<Address> flushMembersCopy = null;
synchronized (sharedLock) {
boolean flushCoordinatorSuspected = address != null && address.equals(flushCoordinator);
if (flushCoordinatorSuspected) {
int indexOfCoordinator = flushMembers.indexOf(flushCoordinator);
int myIndex = flushMembers.indexOf(localAddress);
int diff = myIndex - indexOfCoordinator;
amINeighbourOfCrashedFlushCoordinator = (diff == 1 || (myIndex == 0 && indexOfCoordinator == flushMembers.size()));
if (amINeighbourOfCrashedFlushCoordinator) {
flushMembersCopy = new ArrayList<Address>(flushMembers);
}
}
}
if (amINeighbourOfCrashedFlushCoordinator) {
if (log.isDebugEnabled())
log.debug(localAddress + ": flush coordinator " + flushCoordinator + " suspected,"
+ " I am the neighbor, completing the flush ");
onResume(new Event(Event.RESUME, flushMembersCopy));
}
// handles FlushTest#testFlushWithCrashedNonCoordinators
boolean flushOkCompleted = false;
Message m = null;
long viewID = 0;
synchronized (sharedLock) {
suspected.add(address);
flushMembers.removeAll(suspected);
viewID = currentViewId();
flushOkCompleted = !flushCompletedMap.isEmpty()
&& flushCompletedMap.keySet().containsAll(flushMembers);
if (flushOkCompleted) {
m = new Message(flushCoordinator, localAddress, null);
}
if (log.isDebugEnabled())
log.debug(localAddress + ": suspect is " + address + ", completed " + flushOkCompleted
+ ", flushOkSet " + flushCompletedMap + ", flushMembers " + flushMembers);
}
if (flushOkCompleted) {
Digest digest = (Digest) down_prot.down(new Event(Event.GET_DIGEST));
FlushHeader fh = new FlushHeader(FlushHeader.FLUSH_COMPLETED, viewID);
fh.addDigest(digest);
m.putHeader(this.id, fh);
down_prot.down(new Event(Event.MSG, m));
if (log.isDebugEnabled())
log.debug(localAddress + ": sent FLUSH_COMPLETED message to " + flushCoordinator);
}
}
private static class FlushStartResult {
private final Boolean result;
private final Exception failureCause;
private FlushStartResult(Boolean result, Exception failureCause) {
this.result = result;
this.failureCause = failureCause;
}
public Boolean getResult() {
return result;
}
public boolean failed(){
return result == Boolean.FALSE;
}
public Exception getFailureCause() {
return failureCause;
}
}
public static class FlushHeader extends Header {
public static final byte START_FLUSH = 0;
public static final byte STOP_FLUSH = 2;
public static final byte FLUSH_COMPLETED = 3;
public static final byte ABORT_FLUSH = 5;
public static final byte FLUSH_BYPASS = 6;
public static final byte FLUSH_RECONCILE = 7;
public static final byte FLUSH_RECONCILE_OK = 8;
public static final byte FLUSH_NOT_COMPLETED = 9;
byte type;
long viewID;
Collection<Address> flushParticipants;
Digest digest = null;
public FlushHeader() {
this(START_FLUSH, 0);
} // used for externalization
public FlushHeader(byte type) {
this(type, 0);
}
public FlushHeader(byte type, long viewID) {
this(type, viewID, null);
}
public FlushHeader(byte type, long viewID, Collection<? extends Address> flushView) {
this.type = type;
this.viewID = viewID;
if (flushView != null) {
this.flushParticipants = new ArrayList<Address>(flushView);
}
}
@Override
public int size() {
int retval = Global.BYTE_SIZE; // type
retval += Global.LONG_SIZE; // viewID
retval += Util.size(flushParticipants);
retval += Global.BYTE_SIZE; // presence for digest
if (digest != null) {
retval += digest.serializedSize();
}
return retval;
}
public void addDigest(Digest digest) {
this.digest = digest;
}
public String toString() {
switch (type) {
case START_FLUSH:
return "FLUSH[type=START_FLUSH,viewId=" + viewID + ",members="
+ flushParticipants + "]";
case STOP_FLUSH:
return "FLUSH[type=STOP_FLUSH,viewId=" + viewID + "]";
case ABORT_FLUSH:
return "FLUSH[type=ABORT_FLUSH,viewId=" + viewID + "]";
case FLUSH_COMPLETED:
return "FLUSH[type=FLUSH_COMPLETED,viewId=" + viewID + "]";
case FLUSH_BYPASS:
return "FLUSH[type=FLUSH_BYPASS,viewId=" + viewID + "]";
case FLUSH_RECONCILE:
return "FLUSH[type=FLUSH_RECONCILE,viewId=" + viewID + ",digest=" + digest
+ "]";
case FLUSH_RECONCILE_OK:
return "FLUSH[type=FLUSH_RECONCILE_OK,viewId=" + viewID + "]";
default:
return "[FLUSH: unknown type (" + type + ")]";
}
}
public void writeTo(DataOutput out) throws Exception {
out.writeByte(type);
out.writeLong(viewID);
Util.writeAddresses(flushParticipants, out);
Util.writeStreamable(digest, out);
}
@SuppressWarnings("unchecked")
public void readFrom(DataInput in) throws Exception {
type = in.readByte();
viewID = in.readLong();
flushParticipants =(Collection<Address>)Util.readAddresses(in, ArrayList.class);
digest = (Digest) Util.readStreamable(Digest.class, in);
}
}
}
|
package org.pbrt.core;
public class MicrofacetReflection extends BxDF {
public MicrofacetReflection(Spectrum R, MicrofacetDistribution distribution, Fresnel fresnel) {
super(BSDF_REFLECTION | BSDF_GLOSSY);
this.R = new Spectrum(R);
this.distribution = distribution;
this.fresnel = fresnel;
}
@Override
public Spectrum f(Vector3f wo, Vector3f wi) {
float cosThetaO = Reflection.AbsCosTheta(wo), cosThetaI = Reflection.AbsCosTheta(wi);
Vector3f wh = wi.add(wo);
// Handle degenerate cases for microfacet reflection
if (cosThetaI == 0 || cosThetaO == 0) return new Spectrum(0);
if (wh.x == 0 && wh.y == 0 && wh.z == 0) return new Spectrum(0);
wh = Vector3f.Normalize(wh);
Spectrum F = fresnel.Evaluate(Vector3f.Dot(wi, wh));
return R.scale(distribution.D(wh) * distribution.G(wo, wi)).multiply(F.scale(1 / (4 * cosThetaI * cosThetaO)));
}
@Override
public BxDFSample Sample_f(Vector3f wo, Point2f u) {
BxDFSample bs = new BxDFSample();
bs.f = new Spectrum(0);
bs.pdf = 0.0f;
bs.wiWorld = new Vector3f();
bs.sampledType = BSDF_NONE;
// Sample microfacet orientation $\wh$ and reflected direction $\wi$
if (wo.z == 0) return bs;
Vector3f wh = distribution.Sample_wh(wo, u);
bs.wiWorld = Reflection.Reflect(wo, wh);
if (!Reflection.SameHemisphere(wo, bs.wiWorld)) return bs;
// Compute PDF of _wi_ for microfacet reflection
bs.pdf = distribution.Pdf(wo, wh) / (4 * Vector3f.Dot(wo, wh));
bs.f = f(wo, bs.wiWorld);
return bs;
}
@Override
public float Pdf(Vector3f wo, Vector3f wi) {
if (!Reflection.SameHemisphere(wo, wi)) return 0;
Vector3f wh = Vector3f.Normalize(wo.add(wi));
return distribution.Pdf(wo, wh) / (4 * Vector3f.Dot(wo, wh));
}
@Override
public String toString() {
return "[ MicrofacetReflection R: " + R.toString() + " distribution: " + distribution.toString() +
" fresnel: " + fresnel.toString() + " ]";
}
private final Spectrum R;
private final MicrofacetDistribution distribution;
private final Fresnel fresnel;
}
|
package org.seqcode.motifs;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import org.seqcode.data.io.BackgroundModelIO;
import org.seqcode.data.motifdb.CountsBackgroundModel;
import org.seqcode.data.motifdb.MarkovBackgroundModel;
import org.seqcode.data.motifdb.WeightMatrix;
import org.seqcode.genome.GenomeConfig;
import org.seqcode.gseutils.ArgParser;
import org.seqcode.motifs.FreqMatrixImport;
/**
* For each input motif, print motif logos.
*/
public class PrintMotifLogos {
protected List<WeightMatrix> motifs=new ArrayList<WeightMatrix>();
// Load freq matrices
public void loadMotifsFromFile(String filename, MarkovBackgroundModel b) {
FreqMatrixImport motifImport = new FreqMatrixImport();
motifImport.setBackground(b);
motifs.addAll(motifImport.readTransfacMatrices(filename));
}
public void execute(boolean drawaxis){
// Finally, draw the motif logos
for(WeightMatrix fm : motifs){
File motifFileName = new File(fm.getName()+".png");
org.seqcode.motifs.DrawMotifs.printMotifLogo(fm, motifFileName, 150, fm.getName(), drawaxis);
motifFileName = new File(fm.getName()+"_rc.png");
org.seqcode.motifs.DrawMotifs.printMotifLogo(WeightMatrix.reverseComplement(fm), motifFileName, 150, fm.getName(), drawaxis);
}
}
public static void main(String[] args) throws IOException, ParseException{
ArgParser ap = new ArgParser(args);
if(!ap.hasKey("motfile")) {
System.err.println("please input motfile.");
System.err.println("Usage:\n " +
"PrintMotifLogos\n " +
"--geninfo <genome info file> \n " +
"--expt <file name> AND --ctrl <file name> AND --format <SAM/BAM/BED/IDX>\n " +
"--motifs <file containing coordinates of peaks> \n " +
"");
System.exit(0);
}
GenomeConfig gcon = new GenomeConfig(args);
String motfile = ap.getKeyValue("motfile");
String backFile =ap.hasKey("back") ? ap.getKeyValue("back"):null;
boolean showaxis = ap.hasKey("showaxix");
MarkovBackgroundModel back;
if(backFile == null){
back = new MarkovBackgroundModel(CountsBackgroundModel.modelFromWholeGenome(gcon.getGenome()));
}else{
back = BackgroundModelIO.parseMarkovBackgroundModel(backFile, gcon.getGenome());
}
PrintMotifLogos plotter = new PrintMotifLogos();
plotter.loadMotifsFromFile(motfile, back);
plotter.execute(showaxis);
}
}
|
package polyglot.ext.jl5.ast;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import polyglot.ast.Block;
import polyglot.ast.ClassBody;
import polyglot.ast.ClassDecl;
import polyglot.ast.ClassMember;
import polyglot.ast.ConstructorCall;
import polyglot.ast.ConstructorDecl;
import polyglot.ast.Formal;
import polyglot.ast.Id;
import polyglot.ast.IntLit;
import polyglot.ast.Lit;
import polyglot.ast.Node;
import polyglot.ast.NodeFactory;
import polyglot.ast.TypeNode;
import polyglot.ext.jl5.types.JL5MethodInstance;
import polyglot.ext.jl5.types.JL5ParsedClassType;
import polyglot.ext.jl5.types.JL5TypeSystem;
import polyglot.types.ConstructorInstance;
import polyglot.types.Flags;
import polyglot.types.MethodInstance;
import polyglot.types.SemanticException;
import polyglot.types.Type;
import polyglot.types.TypeSystem;
import polyglot.util.InternalCompilerError;
import polyglot.util.Position;
import polyglot.util.SerialVersionUID;
import polyglot.visit.NodeVisitor;
import polyglot.visit.TypeBuilder;
import polyglot.visit.TypeChecker;
public class JL5EnumDecl_c extends JL5ClassDecl_c implements JL5EnumDecl {
private static final long serialVersionUID = SerialVersionUID.generate();
public JL5EnumDecl_c(Position pos, Flags flags,
List<AnnotationElem> annotations, Id name, TypeNode superClass,
List<TypeNode> interfaces, ClassBody body) {
super(pos, flags, annotations, name, superClass, interfaces, body);
}
@Override
public Node buildTypes(TypeBuilder tb) throws SemanticException {
JL5EnumDecl_c n = (JL5EnumDecl_c) super.buildTypes(tb);
try {
JL5TypeSystem ts = (JL5TypeSystem) tb.typeSystem();
return n.addEnumMethodTypesIfNeeded(ts);
}
catch (RuntimeException e) {
e.printStackTrace();
throw e;
}
}
@Override
public NodeVisitor typeCheckEnter(TypeChecker tc) throws SemanticException {
// figure out if this should be an abstract type.
// need to do this before any anonymous subclasses are typechecked.
for (MethodInstance mi : type().methods()) {
if (!mi.flags().isAbstract()) continue;
// mi is abstract! First, mark the class as abstract.
type().setFlags(type().flags().Abstract());
}
return super.typeCheckEnter(tc);
}
@Override
public Node typeCheck(TypeChecker tc) throws SemanticException {
if (flags().isAbstract()) {
throw new SemanticException("Enum types cannot have abstract modifier",
this.position());
}
if (flags().isPrivate() && !type().isNested()) {
throw new SemanticException("Top level enum types cannot have private modifier",
this.position());
}
if (flags().isFinal()) {
throw new SemanticException("Enum types cannot have final modifier",
this.position());
}
for (ConstructorInstance ci : type().constructors()) {
if (!ci.flags().clear(Flags.PRIVATE).equals(Flags.NONE)) {
throw new SemanticException("Modifier "
+ ci.flags()
.clear(Flags.PRIVATE)
+ " not allowed here",
ci.position());
}
}
ClassDecl n = (ClassDecl) super.typeCheck(tc);
if (n.type().isMember()) {
// it's a nested class
n = this.flags(this.flags().Static());
n.type().flags(n.type().flags().Static());
}
for (ClassMember m : this.body().members()) {
if (m.memberInstance().flags().isAbstract()) {
n = this.flags(this.flags().Abstract());
n.type().flags(n.type().flags().Abstract());
break;
}
}
return n;
}
@Override
protected Node addDefaultConstructor(TypeSystem ts, NodeFactory nf)
throws SemanticException {
ConstructorInstance ci = this.defaultCI;
if (ci == null) {
throw new InternalCompilerError("addDefaultConstructor called without defaultCI set");
}
// insert call to appropriate super constructor
List<Lit> args = new ArrayList<Lit>(2);
args.add(nf.NullLit(Position.compilerGenerated()));// XXX the right thing to do is change the type of java.lang.Enum instead of adding these dummy params
args.add(nf.IntLit(Position.compilerGenerated(), IntLit.INT, 0));
Block block =
nf.Block(position().startOf(),
((JL5NodeFactory) nf).ConstructorCall(position.startOf(),
ConstructorCall.SUPER,
null,
args,
true));
//Default constructor of an enum is private
ConstructorDecl cd =
nf.ConstructorDecl(body().position().startOf(),
Flags.PRIVATE,
name,
Collections.<Formal> emptyList(),
Collections.<TypeNode> emptyList(),
block);
cd = cd.constructorInstance(ci);
return body(body.addMember(cd));
}
private Node addEnumMethodTypesIfNeeded(TypeSystem ts) {
JL5ParsedClassType ct = (JL5ParsedClassType) this.type();
JL5EnumDecl_c n = this;
if (ct.enumValueOfMethodNeeded()) {
n = n.addValueOfMethodType(ts);
}
if (ct.enumValuesMethodNeeded()) {
n = n.addValuesMethodType(ts);
}
return n;
}
protected JL5EnumDecl_c addValueOfMethodType(TypeSystem ts) {
Flags flags = Flags.PUBLIC.set(Flags.STATIC.set(Flags.FINAL));
// add valueOf method
JL5MethodInstance valueOfMI =
(JL5MethodInstance) ts.methodInstance(position(),
this.type(),
flags,
this.type(),
"valueOf",
Collections.singletonList((Type) ts.String()),
Collections.<Type> emptyList());
this.type.addMethod(valueOfMI);
return this;
}
protected JL5EnumDecl_c addValuesMethodType(TypeSystem ts) {
Flags flags = Flags.PUBLIC.set(Flags.STATIC.set(Flags.FINAL));
// add values method
JL5MethodInstance valuesMI =
(JL5MethodInstance) ts.methodInstance(position(),
this.type(),
flags.set(Flags.NATIVE),
ts.arrayOf(this.type()),
"values",
Collections.<Type> emptyList(),
Collections.<Type> emptyList());
this.type.addMethod(valuesMI);
return this;
}
}
|
package bio.terra.service.filedata.google.firestore;
import static bio.terra.service.configuration.ConfigEnum.FIRESTORE_SNAPSHOT_BATCH_SIZE;
import bio.terra.app.logging.PerformanceLogger;
import bio.terra.model.CloudPlatform;
import bio.terra.service.configuration.ConfigEnum;
import bio.terra.service.configuration.ConfigurationService;
import bio.terra.service.dataset.Dataset;
import bio.terra.service.filedata.FSContainerInterface;
import bio.terra.service.filedata.FSDir;
import bio.terra.service.filedata.FSFile;
import bio.terra.service.filedata.FSItem;
import bio.terra.service.filedata.FileMetadataUtils;
import bio.terra.service.filedata.SnapshotCompute;
import bio.terra.service.filedata.SnapshotComputeHelper;
import bio.terra.service.filedata.exception.FileNotFoundException;
import bio.terra.service.filedata.exception.FileSystemExecutionException;
import bio.terra.service.snapshot.Snapshot;
import bio.terra.service.snapshot.SnapshotProject;
import com.google.cloud.firestore.CollectionReference;
import com.google.cloud.firestore.Firestore;
import com.google.cloud.firestore.QuerySnapshot;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
// Operations on a file often need to touch file and directory collections that is,
// the FireStoreFileDao and the FireStoreDirectoryDao.
// The data to make an FSDir or FSFile is now spread between the file collection and the
// directory collection, so a lookup needs to visit two places to generate a complete FSItem.
// This class coordinates operations between the daos.
// The dependency collection is independent, so it is not included under this dao.
// Perhaps it should be.
// There are several functions performed in this layer.
// 1. Encapsulating the underlying daos
// 2. Converting from dao objects into DR metadata objects
// 3. Dealing with project, dataset, and snapshot objects, so the daos don't have to
@Component
public class FireStoreDao {
private final Logger logger = LoggerFactory.getLogger(FireStoreDao.class);
private final FireStoreDirectoryDao directoryDao;
private final FireStoreFileDao fileDao;
private final FireStoreUtils fireStoreUtils;
private final ConfigurationService configurationService;
private final PerformanceLogger performanceLogger;
@Autowired
public FireStoreDao(
FireStoreDirectoryDao directoryDao,
FireStoreFileDao fileDao,
FireStoreUtils fireStoreUtils,
ConfigurationService configurationService,
PerformanceLogger performanceLogger) {
this.directoryDao = directoryDao;
this.fileDao = fileDao;
this.fireStoreUtils = fireStoreUtils;
this.configurationService = configurationService;
this.performanceLogger = performanceLogger;
}
public void createDirectoryEntry(Dataset dataset, FireStoreDirectoryEntry newEntry)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = newEntry.getDatasetId();
directoryDao.createDirectoryEntry(firestore, datasetId, newEntry);
}
public boolean deleteDirectoryEntry(Dataset dataset, String fileId) throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return directoryDao.deleteDirectoryEntry(firestore, datasetId, fileId);
}
public void createFileMetadata(Dataset dataset, FireStoreFile newFile)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
fileDao.createFileMetadata(firestore, datasetId, newFile);
}
public boolean deleteFileMetadata(Dataset dataset, String fileId) throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return fileDao.deleteFileMetadata(firestore, datasetId, fileId);
}
public void deleteFilesFromDataset(Dataset dataset, InterruptibleConsumer<FireStoreFile> func)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
if (configurationService.testInsertFault(ConfigEnum.LOAD_SKIP_FILE_LOAD)) {
// If we didn't load files, don't try to delete them
fileDao.deleteFilesFromDataset(firestore, datasetId, f -> {});
} else {
fileDao.deleteFilesFromDataset(firestore, datasetId, func);
}
directoryDao.deleteDirectoryEntriesFromCollection(firestore, datasetId);
}
public FireStoreDirectoryEntry lookupDirectoryEntry(Dataset dataset, String fileId)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return directoryDao.retrieveById(firestore, datasetId, fileId);
}
public FireStoreDirectoryEntry lookupDirectoryEntryByPath(Dataset dataset, String path)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return directoryDao.retrieveByPath(firestore, datasetId, path);
}
public FireStoreFile lookupFile(Dataset dataset, String fileId) throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return fileDao.retrieveFileMetadata(firestore, datasetId, fileId);
}
public void addFilesToSnapshot(Dataset dataset, Snapshot snapshot, List<String> refIds)
throws InterruptedException {
Firestore datasetFirestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
Firestore snapshotFirestore =
FireStoreProject.get(snapshot.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
// TODO: Do we need to make sure the dataset name does not contain characters that are invalid
// for paths?
// Added the work to figure that out to DR-325
String datasetName = dataset.getName();
String snapshotId = snapshot.getId().toString();
directoryDao.addEntriesToSnapshot(
datasetFirestore, datasetId, datasetName, snapshotFirestore, snapshotId, refIds);
}
public void deleteFilesFromSnapshot(Snapshot snapshot) throws InterruptedException {
Firestore firestore =
FireStoreProject.get(snapshot.getProjectResource().getGoogleProjectId()).getFirestore();
String snapshotId = snapshot.getId().toString();
directoryDao.deleteDirectoryEntriesFromCollection(firestore, snapshotId);
}
public void snapshotCompute(Snapshot snapshot) throws InterruptedException {
Firestore snapshotFirestore =
FireStoreProject.get(snapshot.getProjectResource().getGoogleProjectId()).getFirestore();
Firestore datasetFirestore =
FireStoreProject.get(
snapshot
.getFirstSnapshotSource()
.getDataset()
.getProjectResource()
.getGoogleProjectId())
.getFirestore();
String snapshotId = snapshot.getId().toString();
FireStoreDirectoryEntry topDir =
directoryDao.retrieveByPath(snapshotFirestore, snapshotId, "/");
// If topDir is null, it means no files were added to the snapshot file system in the previous
// step. So there is nothing to compute
if (topDir != null) {
// We batch the updates to firestore by collecting updated entries into this list,
// and when we get enough, writing them out.
List<FireStoreDirectoryEntry> updateBatch = new ArrayList<>();
String retrieveTimer = performanceLogger.timerStart();
FirestoreComputeHelper helper = getHelper(datasetFirestore, snapshotFirestore, snapshotId);
SnapshotCompute.computeDirectory(helper, topDir, updateBatch);
performanceLogger.timerEndAndLog(
retrieveTimer,
snapshotId, // not a flight, so no job id
this.getClass().getName(),
"fireStoreDao.computeDirectoryGetMetadata");
// Write the last batch out
directoryDao.batchStoreDirectoryEntry(snapshotFirestore, snapshotId, updateBatch);
}
}
/**
* Retrieve an FSItem by path
*
* @param container - dataset or snapshot containing file's directory entry
* @param fullPath - path of the file in the directory
* @param enumerateDepth - how far to enumerate the directory structure; 0 means not at all; 1
* means contents of this directory; 2 means this and its directories, etc. -1 means the
* entire tree.
* @return FSFile or FSDir of retrieved file; can return null on not found
*/
public FSItem retrieveByPath(FSContainerInterface container, String fullPath, int enumerateDepth)
throws InterruptedException {
Firestore fsItemFirestore =
FireStoreProject.get(container.getProjectResource().getGoogleProjectId()).getFirestore();
Firestore metadataFirestore = container.firestoreConnection().getFirestore();
String containerId = container.getId().toString();
FireStoreDirectoryEntry fireStoreDirectoryEntry =
directoryDao.retrieveByPath(fsItemFirestore, containerId, fullPath);
return retrieveWorker(
fsItemFirestore,
metadataFirestore,
containerId,
enumerateDepth,
fireStoreDirectoryEntry,
fullPath);
}
public Optional<FSItem> lookupOptionalPath(
FSContainerInterface container, String fullPath, int enumerateDepth)
throws InterruptedException {
Firestore fsItemFirestore =
FireStoreProject.get(container.getProjectResource().getGoogleProjectId()).getFirestore();
Firestore metadataFirestore = container.firestoreConnection().getFirestore();
String containerId = container.getId().toString();
FireStoreDirectoryEntry fireStoreDirectoryEntry =
directoryDao.retrieveByPath(fsItemFirestore, containerId, fullPath);
return Optional.ofNullable(fireStoreDirectoryEntry)
.map(
entry -> {
try {
return retrieveWorker(
fsItemFirestore,
metadataFirestore,
containerId,
enumerateDepth,
entry,
fullPath);
} catch (InterruptedException ex) {
throw new FileSystemExecutionException(
"Unexpected interruption during file system processing", ex);
}
});
}
/**
* Retrieve an FSItem by id
*
* @param container - dataset or snapshot containing file's directory entry
* @param fileId - id of the file or directory
* @param enumerateDepth - how far to enumerate the directory structure; 0 means not at all; 1
* means contents of this directory; 2 means this and its directories, etc. -1 means the
* entire tree.
* @return FSFile or FSDir of retrieved file; can return null on not found
*/
public FSItem retrieveById(FSContainerInterface container, String fileId, int enumerateDepth)
throws InterruptedException {
Firestore fsItemFirestore =
FireStoreProject.get(container.getProjectResource().getGoogleProjectId()).getFirestore();
Firestore metadataFirestore = container.firestoreConnection().getFirestore();
String datasetId = container.getId().toString();
FireStoreDirectoryEntry fireStoreDirectoryEntry =
directoryDao.retrieveById(fsItemFirestore, datasetId, fileId);
return retrieveWorker(
fsItemFirestore,
metadataFirestore,
datasetId,
enumerateDepth,
fireStoreDirectoryEntry,
fileId);
}
/**
* Given a snapshot, retrieve the -files metadata collection from its source Dataset Firestore
*
* @param snapshot target snapshot
* @return QuerySnapshot representation of the -files collection in source Dataset.
*/
public QuerySnapshot retrieveFilesCollection(Snapshot snapshot)
throws ExecutionException, InterruptedException {
Dataset sourceDataset = snapshot.getSourceDataset();
String collectionName = String.format("%s-files", sourceDataset.getId());
return retrieveCollectionByName(
sourceDataset.getProjectResource().getGoogleProjectId(), collectionName);
}
public QuerySnapshot retrieveCollectionByName(String projectId, String collectionName)
throws ExecutionException, InterruptedException {
final Firestore db = FireStoreProject.get(projectId).getFirestore();
;
final CollectionReference collection = db.collection(collectionName);
return collection.get().get();
}
public FSItem retrieveBySnapshotAndId(SnapshotProject snapshot, String fileId, int enumerateDepth)
throws InterruptedException {
String projectName = snapshot.getDataProject();
String datasetId = snapshot.getId().toString();
Firestore fsItemFirestore = FireStoreProject.get(projectName).getFirestore();
Firestore metadataFirestore =
FireStoreProject.get(snapshot.getFirstSourceDatasetProject().getDataProject())
.getFirestore();
FireStoreDirectoryEntry fireStoreDirectoryEntry =
directoryDao.retrieveById(fsItemFirestore, datasetId, fileId);
return retrieveWorker(
fsItemFirestore,
metadataFirestore,
datasetId,
enumerateDepth,
fireStoreDirectoryEntry,
fileId);
}
/**
* Retrieve a batch of FSFile by id
*
* @param container - dataset or snapshot containing file's directory entry
* @param fileIds - list of ids of file identifiers - directory identifiers will throw
* @return list of FSItem of retrieved files; throws on not found
*/
public List<FSFile> batchRetrieveById(
FSContainerInterface container, List<String> fileIds, int enumerateDepth)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(container.getProjectResource().getGoogleProjectId()).getFirestore();
String containerId = container.getId().toString();
List<FireStoreDirectoryEntry> directoryEntries =
directoryDao.batchRetrieveById(firestore, containerId, fileIds);
// TODO: When we have more than one dataset in a snapshot then we will have to
// split entries by underlying dataset. For now we know that they all come from one dataset.
List<FireStoreFile> files =
fileDao.batchRetrieveFileMetadata(firestore, containerId, directoryEntries);
List<FSFile> resultList = new ArrayList<>();
if (directoryEntries.size() != files.size()) {
throw new FileSystemExecutionException("List sizes should be identical");
}
for (int i = 0; i < files.size(); i++) {
FireStoreFile file = files.get(i);
FireStoreDirectoryEntry entry = directoryEntries.get(i);
FSFile fsFile =
new FSFile()
.fileId(UUID.fromString(entry.getFileId()))
.collectionId(UUID.fromString(entry.getDatasetId()))
.datasetId(UUID.fromString(entry.getDatasetId()))
.createdDate(Instant.parse(file.getFileCreatedDate()))
.path(FileMetadataUtils.getFullPath(entry.getPath(), entry.getName()))
.checksumCrc32c(file.getChecksumCrc32c())
.checksumMd5(file.getChecksumMd5())
.size(file.getSize())
.description(file.getDescription())
.cloudPath(file.getGspath())
.cloudPlatform(CloudPlatform.GCP)
.mimeType(file.getMimeType())
.bucketResourceId(file.getBucketResourceId())
.loadTag(file.getLoadTag());
resultList.add(fsFile);
}
return resultList;
}
public List<String> validateRefIds(Dataset dataset, List<String> refIdArray)
throws InterruptedException {
Firestore firestore =
FireStoreProject.get(dataset.getProjectResource().getGoogleProjectId()).getFirestore();
String datasetId = dataset.getId().toString();
return directoryDao.validateRefIds(firestore, datasetId, refIdArray);
}
// -- private methods --
/**
* Retrieves an FSItem object
*
* @param fsItemFirestore The firestore collection that contains the collection with the virtual
* file system. This can be a dataset or snapshot project based collection
* @param metadataFirestore The firestore collection that contains te collection with the file
* object metadata. This is always a dataset based project collection
* @param collectionId The ID of the collection in the fsItemFirestore connection that contains
* the virtual file system objects
* @param enumerateDepth how far to enumerate the directory structure
* @param fireStoreDirectoryEntry The object to enumerate entries within
* @param context provides either the file id or the file path, for use in error messages.
* @return An {@link FSItem} representation of the passed in fireStoreDirectoryEntry with nested
* FSItems
* @throws InterruptedException
*/
private FSItem retrieveWorker(
Firestore fsItemFirestore,
Firestore metadataFirestore,
String collectionId,
int enumerateDepth,
FireStoreDirectoryEntry fireStoreDirectoryEntry,
String context)
throws InterruptedException {
if (fireStoreDirectoryEntry == null) {
throw new FileNotFoundException("File not found: " + context);
}
if (fireStoreDirectoryEntry.getIsFileRef()) {
FSItem fsFile = makeFSFile(metadataFirestore, collectionId, fireStoreDirectoryEntry);
if (fsFile == null) {
// We found a file in the directory that is not done being created. We treat this
// as not found.
throw new FileNotFoundException(
String.format(
"Found a file (fileId: %s), but the directory is not done being created: %s",
fireStoreDirectoryEntry.getFileId(), context));
}
return fsFile;
}
return makeFSDir(
fsItemFirestore, metadataFirestore, collectionId, enumerateDepth, fireStoreDirectoryEntry);
}
/**
* Create an FSItem object
*
* @param fsItemFirestore The firestore collection that contains the collection with the virtual
* file system. This can be a dataset or snapshot project based collection
* @param metadataFirestore The firestore collection that contains the collection with the file
* object metadata. This is always a dataset based project collection
* @param collectionId The ID of the collection in the fsItemFirestore connection that contains
* the virtual file system objects
* @param level how far to enumerate the directory structure
* @param fireStoreDirectoryEntry The object to enumerate entries within
* @return An {@link FSItem} representation of the passed in fireStoreDirectoryEntry with nested
* FSItems
* @throws InterruptedException
*/
private FSItem makeFSDir(
Firestore fsItemFirestore,
Firestore metadataFirestore,
String collectionId,
int level,
FireStoreDirectoryEntry fireStoreDirectoryEntry)
throws InterruptedException {
if (fireStoreDirectoryEntry.getIsFileRef()) {
throw new IllegalStateException("Expected directory; got file!");
}
String fullPath =
FileMetadataUtils.getFullPath(
fireStoreDirectoryEntry.getPath(), fireStoreDirectoryEntry.getName());
FSDir fsDir = new FSDir();
fsDir
.fileId(UUID.fromString(fireStoreDirectoryEntry.getFileId()))
.collectionId(UUID.fromString(collectionId))
.createdDate(Instant.parse(fireStoreDirectoryEntry.getFileCreatedDate()))
.path(fullPath)
.checksumCrc32c(fireStoreDirectoryEntry.getChecksumCrc32c())
.checksumMd5(fireStoreDirectoryEntry.getChecksumMd5())
.size(fireStoreDirectoryEntry.getSize())
.description(StringUtils.EMPTY);
if (level != 0) {
List<FSItem> fsContents = new ArrayList<>();
List<FireStoreDirectoryEntry> dirContents =
directoryDao.enumerateDirectory(fsItemFirestore, collectionId, fullPath);
for (FireStoreDirectoryEntry fso : dirContents) {
if (fso.getIsFileRef()) {
// Files that are in the middle of being ingested can have a directory entry, but not yet
// have
// a file entry. We do not return files that do not yet have a file entry.
FSItem fsFile = makeFSFile(metadataFirestore, collectionId, fso);
if (fsFile != null) {
fsContents.add(fsFile);
}
} else {
fsContents.add(
makeFSDir(fsItemFirestore, metadataFirestore, collectionId, level - 1, fso));
}
}
fsDir.contents(fsContents);
}
return fsDir;
}
// Handle files - the fireStoreDirectoryEntry is a reference to a file in a dataset.
private FSItem makeFSFile(
Firestore datasetFirestore,
String collectionId,
FireStoreDirectoryEntry fireStoreDirectoryEntry)
throws InterruptedException {
if (!fireStoreDirectoryEntry.getIsFileRef()) {
throw new IllegalStateException("Expected file; got directory!");
}
String fullPath =
FileMetadataUtils.getFullPath(
fireStoreDirectoryEntry.getPath(), fireStoreDirectoryEntry.getName());
String fileId = fireStoreDirectoryEntry.getFileId();
// Lookup the file in its owning dataset, not in the collection. The collection may be a
// snapshot directory
// pointing to the files in one or more datasets.
FireStoreFile fireStoreFile =
fileDao.retrieveFileMetadata(
datasetFirestore, fireStoreDirectoryEntry.getDatasetId(), fileId);
if (fireStoreFile == null) {
return null;
}
FSFile fsFile = new FSFile();
fsFile
.fileId(UUID.fromString(fileId))
.collectionId(UUID.fromString(collectionId))
.datasetId(UUID.fromString(fireStoreDirectoryEntry.getDatasetId()))
.createdDate(Instant.parse(fireStoreFile.getFileCreatedDate()))
.path(fullPath)
.checksumCrc32c(fireStoreFile.getChecksumCrc32c())
.checksumMd5(fireStoreFile.getChecksumMd5())
.size(fireStoreFile.getSize())
.description(fireStoreFile.getDescription())
.cloudPath(fireStoreFile.getGspath())
.cloudPlatform(CloudPlatform.GCP)
.mimeType(fireStoreFile.getMimeType())
.bucketResourceId(fireStoreFile.getBucketResourceId())
.loadTag(fireStoreFile.getLoadTag());
return fsFile;
}
public FirestoreComputeHelper getHelper(
Firestore datasetFirestore, Firestore snapshotFirestore, String snapshotId) {
return new FirestoreComputeHelper(
fileDao,
directoryDao,
datasetFirestore,
snapshotFirestore,
snapshotId,
configurationService.getParameterValue(FIRESTORE_SNAPSHOT_BATCH_SIZE));
}
static class FirestoreComputeHelper implements SnapshotComputeHelper {
private final FireStoreFileDao fileDao;
private final FireStoreDirectoryDao directoryDao;
private final Firestore datasetFirestore;
private final Firestore snapshotFirestore;
private final String snapshotId;
private final Integer snapshotBatchSize;
FirestoreComputeHelper(
FireStoreFileDao fileDao,
FireStoreDirectoryDao directoryDao,
Firestore datasetFirestore,
Firestore snapshotFirestore,
String snapshotId,
Integer snapshotBatchSize) {
this.fileDao = fileDao;
this.directoryDao = directoryDao;
this.datasetFirestore = datasetFirestore;
this.snapshotFirestore = snapshotFirestore;
this.snapshotId = snapshotId;
this.snapshotBatchSize = snapshotBatchSize;
}
@Override
public List<FireStoreFile> batchRetrieveFileMetadata(
Map.Entry<String, List<FireStoreDirectoryEntry>> entry) throws InterruptedException {
return fileDao.batchRetrieveFileMetadata(datasetFirestore, entry.getKey(), entry.getValue());
}
@Override
public List<FireStoreDirectoryEntry> enumerateDirectory(String dirPath)
throws InterruptedException {
return directoryDao.enumerateDirectory(snapshotFirestore, snapshotId, dirPath);
}
@Override
public void updateEntry(
FireStoreDirectoryEntry entry, List<FireStoreDirectoryEntry> updateBatch)
throws InterruptedException {
updateBatch.add(entry);
if (updateBatch.size() >= snapshotBatchSize) {
logger.info("Snapshot compute updating batch of {} directory entries", snapshotBatchSize);
directoryDao.batchStoreDirectoryEntry(snapshotFirestore, snapshotId, updateBatch);
updateBatch.clear();
}
}
}
}
|
package br.com.clinicamed.security.configuration;
import br.com.clinicamed.security.handlers.AcessoNaoAutorizadoHandler;
import br.com.clinicamed.security.handlers.AcessoNegadoHandler;
import br.com.clinicamed.security.handlers.FalhaAutenticaoHandler;
import br.com.clinicamed.security.handlers.LoginUsuarioHandler;
import br.com.clinicamed.security.handlers.LogoutHandler;
import br.com.clinicamed.security.handlers.SucessoAutenticacaoHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
@Configuration
@ComponentScan("br.com.clinicamed.security")
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true)
public class SegurancaConfig extends WebSecurityConfigurerAdapter {
@Autowired
private LoginUsuarioHandler loginUsuarioHandler;
@Autowired
private SucessoAutenticacaoHandler sucessoAutenticacaoHandler;
@Autowired
public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
auth.userDetailsService(loginUsuarioHandler);
}
@Override
public void configure(WebSecurity web) throws Exception {
|
package com.akiban.cserver.service.memcache;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Set;
import com.akiban.cserver.service.session.Session;
import com.akiban.cserver.service.session.SessionImpl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandler;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.group.DefaultChannelGroup;
import com.akiban.cserver.api.HapiProcessor;
import com.akiban.cserver.store.Store;
import com.thimbleware.jmemcached.Cache;
import com.thimbleware.jmemcached.CacheElement;
import com.thimbleware.jmemcached.LocalCacheElement;
import com.thimbleware.jmemcached.MemCacheDaemon;
import com.thimbleware.jmemcached.protocol.Command;
import com.thimbleware.jmemcached.protocol.CommandMessage;
import com.thimbleware.jmemcached.protocol.ResponseMessage;
import com.thimbleware.jmemcached.protocol.exceptions.UnknownCommandException;
/**
* Processes CommandMessage and generate ResponseMessage, shared among all channels.
*
* Inspried by: com.thimbleware.jmemcached.protocol.MemcachedCommandHandler
*/
@ChannelHandler.Sharable
final class AkibanCommandHandler extends SimpleChannelUpstreamHandler
{
private static final String PAYLOAD = "PAYLOAD";
private static final String MODULE = AkibanCommandHandler.class.toString();
private static final String THREAD_ASSERT = "THREAD";
interface FormatGetter {
HapiProcessor.Outputter<byte[]> getFormat();
}
/**
* State variables that are universal for entire service.
* The handler *must* be declared with a ChannelPipelineCoverage of "all".
*/
private final Store store;
private final DefaultChannelGroup channelGroup;
private static final Log LOG = LogFactory.getLog(MemcacheService.class);
private final FormatGetter formatGetter;
public AkibanCommandHandler(Store store, DefaultChannelGroup channelGroup, FormatGetter formatGetter)
{
this.store = store;
this.channelGroup = channelGroup;
this.formatGetter = formatGetter;
}
/**
* On open we manage some statistics, and add this connection to the channel group.
*/
@Override
public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception {
ByteBuffer payload = ByteBuffer.allocate(65536);
Session session = new SessionImpl();
session.put(MODULE, PAYLOAD, payload);
session.put(MODULE, THREAD_ASSERT, Thread.currentThread());
context.setAttachment(session);
channelGroup.add(context.getChannel());
}
/**
* Track stats and then remove from channel group
*/
@Override
public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception {
context.setAttachment(null);
channelGroup.remove(context.getChannel());
}
/**
* Eat the exception, probably an improperly closed client.
*/
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception
{
LOG.error("Command handler caught exception: " + e, e.getCause());
}
/**
* Turn CommandMessages into executions against the CS and then pass on downstream message
*/
@Override
@SuppressWarnings("unchecked")
public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception {
if (!(event.getMessage() instanceof CommandMessage)) {
// Ignore what this encoder can't encode.
context.sendUpstream(event);
return;
}
CommandMessage<CacheElement> command = (CommandMessage<CacheElement>) event.getMessage();
Command cmdOp = command.cmd;
if(LOG.isDebugEnabled()) {
StringBuilder msg = new StringBuilder();
msg.append(command.cmd);
if(command.element != null) {
msg.append(" ").append(command.element.getKeystring());
}
for(int i = 0; i < command.keys.size(); ++i) {
msg.append(" ").append(command.keys.get(i));
}
LOG.debug(msg.toString());
}
Channel channel = event.getChannel();
switch(cmdOp) {
case GET:
case GETS: handleGets(context, command, channel); break;
case SET: handleSet(context, command, channel); break;
case CAS: handleCas(context, command, channel); break;
case ADD: handleAdd(context, command, channel); break;
case REPLACE: handleReplace(context, command, channel); break;
case APPEND: handleAppend(context, command, channel); break;
case PREPEND: handlePrepend(context, command, channel); break;
case INCR: handleIncr(context, command, channel); break;
case DECR: handleDecr(context, command, channel); break;
case DELETE: handleDelete(context, command, channel); break;
case STATS: handleStats(context, command, channel); break;
case VERSION: handleVersion(context, command, channel); break;
case QUIT: handleQuit(channel); break;
case FLUSH_ALL: handleFlush(context, command, channel); break;
default:
if(cmdOp == null) {
handleNoOp(context, command);
}
else {
throw new UnknownCommandException("unknown command:" + cmdOp);
}
}
}
protected void handleNoOp(ChannelHandlerContext context, CommandMessage<CacheElement> command) {
Channels.fireMessageReceived(context, new ResponseMessage(command));
}
protected void handleFlush(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
boolean flushSuccess = false;
// flushSuccess = cache.flush_all(command.time)
Channels.fireMessageReceived(context, new ResponseMessage(command).withFlushResponse(flushSuccess), channel.getRemoteAddress());
}
protected void handleQuit(Channel channel) {
channel.disconnect();
}
protected void handleVersion(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
ResponseMessage responseMessage = new ResponseMessage(command);
responseMessage.version = MemCacheDaemon.memcachedVersion;
Channels.fireMessageReceived(context, responseMessage, channel.getRemoteAddress());
}
protected void handleStats(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
// String option = "";
// if(command.keys.size() > 0) {
// option = new String(command.keys.get(0));
Map<String, Set<String>> statResponse = null;
// statResponse = cache.stat(option)
Channels.fireMessageReceived(context, new ResponseMessage(command).withStatResponse(statResponse), channel.getRemoteAddress());
}
protected void handleDelete(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.DeleteResponse dr = null;
//dr = cache.delete(command.keys.get(0), command.time);
Channels.fireMessageReceived(context, new ResponseMessage(command).withDeleteResponse(dr), channel.getRemoteAddress());
}
protected void handleDecr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Integer incrDecrResp = null;
//incDecrResp = cache.get_add(command.keys.get(0), -1 * command.incrAmount);
Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress());
}
protected void handleIncr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Integer incrDecrResp = null;
//incRecrResp = cache.get_add(command.keys.get(0), command.incrAmount); // TODO support default value and expiry!!
Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress());
}
protected void handlePrepend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.prepend(command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleAppend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.append(command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleReplace(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.replace(command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleAdd(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.add(command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleCas(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.cas(command.cas_key, command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleSet(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
Cache.StoreResponse ret = null;
//ret = cache.set(command.element);
Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress());
}
protected void handleGets(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) {
String[] keys = new String[command.keys.size()];
keys = command.keys.toArray(keys);
byte[] key = keys[0].getBytes();
String request = new String(key);
Session session = (Session) context.getAttachment();
assert session.get(MODULE, PAYLOAD).equals(Thread.currentThread())
: String.format("expected thread %s but on %s", session.get(MODULE, PAYLOAD), Thread.currentThread());
byte[] result_bytes = HapiProcessorImpl.processRequest(
store,
session,
request,
session.<ByteBuffer>get(MODULE, PAYLOAD),
formatGetter.getFormat()
);
CacheElement[] results = null;
if(result_bytes != null) {
LocalCacheElement element = new LocalCacheElement(keys[0]);
element.setData(result_bytes);
results = new CacheElement[] { element };
}
ResponseMessage<CacheElement> resp = new ResponseMessage<CacheElement>(command).withElements(results);
Channels.fireMessageReceived(context, resp, channel.getRemoteAddress());
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.asascience.ncsos.outputformatter.gc;
import com.asascience.ncsos.gc.GetCapabilitiesRequestHandler;
import com.asascience.ncsos.outputformatter.BaseOutputFormatter;
import com.asascience.ncsos.service.BaseRequestHandler;
import org.jdom.Element;
import org.jdom.Namespace;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.time.CalendarDateRange;
import ucar.unidata.geoloc.LatLonRect;
import java.io.IOException;
import java.io.Writer;
import java.util.HashMap;
import java.util.List;
/**
* @author kwilcox
*/
public class GetCapsFormatter extends BaseOutputFormatter {
public static final String CONTENTS = "Contents";
public static final String EPSG4326 = "EPSG:4326";
public static final String HTTP = "HTTP";
public static final String OPERATIONS_METADATA = "OperationsMetadata";
public static final String SERVICE_IDENTIFICATION = "ServiceIdentification";
public static final String SERVICE_PROVIDER = "ServiceProvider";
private boolean exceptionFlag = false;
private final static String TEMPLATE = "templates/GC.xml";
private GetCapabilitiesRequestHandler handler = null;
public GetCapsFormatter(GetCapabilitiesRequestHandler handler) {
super();
this.handler = handler;
}
@Override
protected String getTemplateLocation() {
return TEMPLATE;
}
public void parseServiceIdentification(HashMap<String, Object> attrs) {
Namespace owsns = this.getNamespace("ows");
Element si = this.getRoot().getChild(SERVICE_IDENTIFICATION, owsns);
// Name
si.getChild("Title", owsns).setText((String)attrs.get("title"));
// Title
si.getChild("Abstract", owsns).setText((String)attrs.get("summary"));
// Access
si.getChild("AccessConstraints", owsns).setText((String)attrs.get("access_constraints"));
// Keywords
Element keywords = si.getChild("Keywords", owsns);
try {
for (String keyword : ((String)attrs.get("keywords")).split(",")) {
keywords.addContent(new Element("Keyword", owsns).setText(keyword));
}
} catch(Exception e) {
// No keywords
}
// Fees
si.getChild("Fees", owsns).setText((String)attrs.get("fees"));
}
public void removeServiceIdentification() {
this.getRoot().removeChild(SERVICE_IDENTIFICATION, this.getNamespace("ows"));
}
public void parseServiceDescription() {
Namespace owsns = this.getNamespace("ows");
Element si = this.getRoot().getChild(SERVICE_PROVIDER, owsns);
// ProviderName
si.getChild("ProviderName", owsns).setText((String)this.handler.getGlobalAttribute("publisher_name", "No global attribute 'publisher_name' found."));
// ProviderSite
si.getChild("ProviderSite", owsns).getAttribute("href", this.getNamespace("xlink")).setValue((String)this.handler.getGlobalAttribute("publisher_url", "No global attribute 'publisher_url' found."));
// ServiceContact
Element sc = si.getChild("ServiceContact", owsns);
sc.getChild("IndividualName", owsns).setText((String)this.handler.getGlobalAttribute("publisher_name", "No global attribute 'publisher_name' found."));
sc.getChild("ContactInfo", owsns).getChild("Phone", owsns).getChild("Voice", owsns).setText((String)this.handler.getGlobalAttribute("publisher_phone", "No global attribute 'publisher_phone' found."));
sc.getChild("ContactInfo", owsns).getChild("Address", owsns).getChild("ElectronicMailAddress", owsns).setText((String)this.handler.getGlobalAttribute("publisher_email", "No global attribute 'publisher_email' found."));
}
public void removeServiceProvider() {
this.getRoot().removeChild(SERVICE_PROVIDER, this.getNamespace("ows"));
}
public void setURL(String threddsURI) {
Namespace owsns = this.getNamespace("ows");
Element si = this.getRoot().getChild(OPERATIONS_METADATA, owsns);
for (Object e : si.getChildren("Operation", owsns)) {
this.setHTTPMethods((Element) e, threddsURI);
}
}
public void setOperationsMetadataGetObs(String threddsURI, List<String> dataVarShortNames, String[] stationNames) {
Namespace owsns = this.getNamespace("ows");
Element si = this.getRoot().getChild(OPERATIONS_METADATA, owsns);
for (Object e : si.getChildren("Operation", owsns)) {
Element op = (Element) e;
if (op.getAttributeValue("name").equalsIgnoreCase("GetObservation")) {
for (Object par : op.getChildren("Parameter", owsns)) {
Element p = (Element) par;
String name = p.getAttributeValue("name");
Element allowed = new Element("AllowedValues", owsns);
if (name.equalsIgnoreCase("offering")) {
for (String s : stationNames) {
allowed.addContent(new Element("Value", owsns).setText(s));
}
// Always add a 'network-all' offering
allowed.addContent(new Element("Value", owsns).setText("network-all"));
p.addContent(allowed);
} else if (name.equalsIgnoreCase("observedProperty")) {
for (String s : dataVarShortNames) {
allowed.addContent(new Element("Value", owsns).setText(s));
}
p.addContent(allowed);
} else if (name.equalsIgnoreCase("procedure")) {
for (String s : stationNames) {
allowed.addContent(new Element("Value", owsns).setText(this.handler.getUrnName(s)));
}
p.addContent(allowed);
}
}
}
}
}
public void removeOperationsMetadata() {
this.getRoot().removeChild(OPERATIONS_METADATA, this.getNamespace("ows"));
}
public void setVersionMetadata() {
Namespace owsns = this.getNamespace("ows");
Namespace gmlns = this.getNamespace("gml");
List<Element> md = this.getRoot().getChild(OPERATIONS_METADATA, owsns)
.getChild("ExtendedCapabilities", owsns)
.getChildren("metaDataProperty", gmlns);
for (Element e : md) {
if (e.getAttributeValue("title", this.getNamespace("xlink")).equalsIgnoreCase("softwareVersion")) {
e.getChild("version", gmlns).setText(NCSOS_VERSION);
}
}
}
/**
* @param threddsURI
* @param stationNames
* @param sensorNames
*/
public void setOperationsMetadataDescSen(String threddsURI, List<String> sensorNames, String[] stationNames) {
Namespace owsns = this.getNamespace("ows");
Element si = this.getRoot().getChild(OPERATIONS_METADATA, owsns);
for (Object e : si.getChildren("Operation", owsns)) {
Element op = (Element) e;
if (op.getAttributeValue("name").equalsIgnoreCase("DescribeSensor")) {
for (Object par : op.getChildren("Parameter", owsns)) {
Element p = (Element) par;
String name = p.getAttributeValue("name");
Element allowed = new Element("AllowedValues", owsns);
if (name.equalsIgnoreCase("procedure")) {
for (String s : stationNames) {
allowed.addContent(new Element("Value", owsns).setText(this.handler.getUrnName(s)));
}
p.addContent(allowed);
}
}
}
}
}
private Element buildOffering() {
return new Element("ObservationOffering", this.getNamespace("sos"));
}
public void setObservationOfferingNetwork(LatLonRect datasetRect, String[] stations, List<String> sensors, CalendarDateRange dates, FeatureType ftype) {
Namespace owsns = this.getNamespace("ows");
Namespace gmlns = this.getNamespace("gml");
Namespace sosns = this.getNamespace("sos");
Namespace xlinkns = this.getNamespace("xlink");
Element cnt = this.getRoot().getChild(CONTENTS, sosns);
Element ol = cnt.getChild(OBSERVATION_OFFERING_LIST, sosns);
Element offering = this.buildOffering();
offering.setAttribute("id", "network-all", gmlns);
// Name
offering.addContent(new Element("name", gmlns).setText(this.handler.getUrnNetworkAll()));
// Description
offering.addContent(new Element("description", gmlns).setText("Network offering containing all features in the dataset"));
// SRS
offering.addContent(new Element("srsName", gmlns).setText(EPSG4326));
// Bounded By
offering.addContent(this.getBoundedBy(datasetRect));
// Time
offering.addContent(this.getTimePeriod(dates));
// Procedure
offering.addContent(new Element("procedure", sosns).setAttribute("href", this.handler.getUrnNetworkAll(), xlinkns));
for (String s : stations) {
offering.addContent(new Element("procedure", sosns).setAttribute("href", this.handler.getUrnName(s), xlinkns));
}
// ObservedProperty
for (String s : sensors) {
offering.addContent(new Element("observedProperty", sosns).setAttribute("href", s, xlinkns));
}
// FeatureOfInterest
for (String s : stations) {
offering.addContent(new Element("featureOfInterest", sosns).setAttribute("href", this.handler.getUrnName(s), xlinkns));
}
// ResponseFormat
offering.addContent(new Element("responseFormat", sosns).setText("text/xml; subtype=\"om/1.0.0\""));
switch (ftype) {
case STATION:
offering.addContent(new Element("responseFormat", sosns).setText("text/xml;subtype=\"om/1.0.0/profiles/ioos_sos/1.0\""));
break;
default:
break;
}
// ResultModel
offering.addContent(new Element("resultModel", sosns).setText("om:ObservationCollection"));
// ResponseMode
offering.addContent(new Element("responseMode", sosns).setText("inline"));
// Add the offering to the list
ol.addContent(offering);
}
public void setObservationOffering(String stationName, LatLonRect datasetRect, List<String> sensors, CalendarDateRange dates, FeatureType ftype) {
Namespace owsns = this.getNamespace("ows");
Namespace gmlns = this.getNamespace("gml");
Namespace sosns = this.getNamespace("sos");
Namespace xlinkns = this.getNamespace("xlink");
Element ol = this.getRoot().getChild(CONTENTS, sosns).getChild(OBSERVATION_OFFERING_LIST, sosns);
Element offering = this.buildOffering();
offering.setAttribute("id", stationName, gmlns);
// Name
offering.addContent(new Element("name", gmlns).setText(this.handler.getUrnName(stationName)));
// Description
// offering.getChild("description", gmlns).setText("Network offering containing all features in the dataset");
// SRS
offering.addContent(new Element("srsName", gmlns).setText(EPSG4326));
// Bounded By
offering.addContent(this.getBoundedBy(datasetRect));
// Time
offering.addContent(this.getTimePeriod(dates));
// Procedure
offering.addContent(new Element("procedure", sosns).setAttribute("href", this.handler.getUrnName(stationName), xlinkns));
// ObservedProperty
for (String s : sensors) {
offering.addContent(new Element("observedProperty", sosns).setAttribute("href", this.handler.getSensorUrnName(stationName, s), xlinkns));
}
// FeatureOfInterest
offering.addContent(new Element("featureOfInterest", sosns).setAttribute("href", this.handler.getUrnName(stationName), xlinkns));
// ResponseFormat
offering.addContent(new Element("responseFormat", sosns).setText("text/xml; subtype=\"om/1.0.0\""));
switch (ftype) {
case STATION:
offering.addContent(new Element("responseFormat", sosns).setText("text/xml;subtype=\"om/1.0.0/profiles/ioos_sos/1.0\""));
break;
default:
break;
}
// ResultModel
offering.addContent(new Element("resultModel", sosns).setText("om:ObservationCollection"));
// ResponseMode
offering.addContent(new Element("responseMode", sosns).setText("inline"));
// Add the offering to the list
ol.addContent(offering);
}
public void removeContents() {
this.getRoot().removeChild(CONTENTS, this.getNamespace("sos"));
}
/** Interface Methods **/
public void addDataFormattedStringToInfoList(String dataFormattedString) {
throw new UnsupportedOperationException("Not supported yet.");
}
public void writeOutput(Writer writer) throws IOException {
XMLOutputter xmlOutput = new XMLOutputter();
xmlOutput.setFormat(Format.getPrettyFormat());
xmlOutput.output(this.document, writer);
}
private void setHTTPMethods(Element parent, String threddsURI) {
Namespace owsns = this.getNamespace("ows");
// GET
parent.getChild("DCP", owsns).getChild("HTTP", owsns).getChild("Get", owsns).setAttribute("href", threddsURI, this.getNamespace("xlink"));
// TODO: When (if) we ever support POST methods, add an additional Post tag here with the URL
}
private Element getBoundedBy(LatLonRect rect) {
Namespace gmlns = this.getNamespace("gml");
Element bb = new Element("boundedBy", gmlns);
Element env = new Element("Envelope", gmlns);
env.setAttribute("srsName", "http:
String lc = null;
String uc = null;
try {
lc = rect.getLowerLeftPoint().getLatitude() + " " + rect.getLowerLeftPoint().getLongitude();
uc = rect.getUpperRightPoint().getLatitude() + " " + rect.getUpperRightPoint().getLongitude();
} catch(Exception e) {
lc = "UNKNOWN";
uc = "UNKNOWN";
} finally {
env.addContent(new Element("lowerCorner", gmlns).setText(lc));
env.addContent(new Element("upperCorner", gmlns).setText(uc));
}
bb.addContent(env);
return bb;
}
private Element getTimePeriod(CalendarDateRange dateRange) {
Namespace gmlns = this.getNamespace("gml");
Namespace sosns = this.getNamespace("sos");
Element tm = new Element("time", sosns);
Element tp = new Element("TimePeriod", gmlns);
tm.addContent(tp);
String st = null;
String et = null;
try {
st = dateRange.getStart().toString();
et = dateRange.getEnd().toString();
} catch (Exception e) {
st = "UNKNOWN";
et = "UNKNOWN";
} finally {
tp.addContent(new Element("beginPosition", gmlns).setText(st));
tp.addContent(new Element("endPosition", gmlns).setText(et));
}
return tm;
}
}
|
package com.chainstaysoftware.controls.drawerpanefx;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.ListChangeListener;
import javafx.event.EventHandler;
import javafx.geometry.Bounds;
import javafx.geometry.Orientation;
import javafx.scene.Cursor;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.SnapshotParameters;
import javafx.scene.control.CheckMenuItem;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.SplitPane;
import javafx.scene.control.ToggleButton;
import javafx.scene.control.ToolBar;
import javafx.scene.image.ImageView;
import javafx.scene.input.ClipboardContent;
import javafx.scene.input.DragEvent;
import javafx.scene.input.Dragboard;
import javafx.scene.input.MouseEvent;
import javafx.scene.input.TransferMode;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javafx.scene.shape.Rectangle;
import javafx.scene.transform.Rotate;
import javafx.stage.Stage;
import javafx.stage.Window;
import javafx.stage.WindowEvent;
import java.util.List;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
/**
* Abstract Child Pane to hold onto {@link DrawerNode} instances that are
* rendered on a side of a {@link DrawerPane}
*/
abstract class AbstractSide extends Pane {
// TODO: Allow setting max/min percentage of scene??
//private static final int SPLITPANE_MIN_WIDTH = 100;
private static final double MAX_PERCENTAGE_OF_SCENE = .30;
private static final int DIVIDER_WIDTH = 5;
private static final ResourceBundle resourceBundle = ResourceBundle.getBundle("drawerpanefx");
// Toolbar to contain the show/hide buttons for contained drawers.
private final ToolBar toolBar = new ToolBar();
private final HBox toolbarHbox = new HBox();
// SplitPane to contain visible non-floating drawers,
private final SplitPane splitPane = new SplitPane();
private final Divider divider;
private final Position position;
private final DragState dragState;
private final Region insertionSpacer = new Region();
private boolean allowMultipleOpenDrawers = true;
AbstractSide(final Position position,
final DragState dragState) {
this.position = position;
this.dragState = dragState;
this.divider = new Divider();
}
/**
* Initialization - must be called from within the constructor
* of implementing classes. Only should be called AFTER the
* implementing class can return a valid JFX instance from getPane().
*/
protected void init() {
toolbarHbox.setId("toolbarHbox");
toolbarHbox.setRotate(getRotation());
initToolbar();
initDivider();
getPane().getChildren().addAll(toolBar);
getChildren().add(getPane());
initSplitPane();
setClippingRegion();
insertionSpacer.getStyleClass().add("drawerpanefx-spacer");
}
/**
* Initialize the {@link ToolBar} instance.
*/
private void initToolbar() {
final Group toolbarGroup = new Group();
toolbarGroup.getChildren().add(toolbarHbox);
toolBar.getItems().add(toolbarGroup);
toolBar.setOrientation(Orientation.VERTICAL);
toolBar.getStyleClass().setAll(isHorizontal()
? "drawerpanefx-horizontal-toolbar"
: "drawerpanefx-vertical-toolbar");
if (isHorizontal()) {
toolBar.setMinHeight(20);
toolBar.prefWidthProperty().bind(widthProperty());
} else {
toolBar.setMinWidth(20);
toolBar.prefHeightProperty().bind(heightProperty());
}
toolBar.setOnDragOver(new ToolbarDragOverHandler());
toolBar.setOnDragDropped(new ToolbarDragDroppedHandler());
toolBar.setOnDragExited(event -> toolbarHbox.getChildren().remove(insertionSpacer));
}
/**
* {@link EventHandler} for handling drag over events for the {@link ToolBar}.
* Used to indicate if a drag event is valid for this {@link ToolBar}.
* And, to visibly indicate where a {@link DrawerNode} will be dropped.
*/
private class ToolbarDragOverHandler implements EventHandler<DragEvent> {
@Override
public void handle(final DragEvent event) {
if (canAccept(event)) {
// Remove the spacer if present.
toolbarHbox.getChildren().remove(insertionSpacer);
// Insert the spacer at the new proposed location.
toolbarHbox.getChildren().stream()
.filter(node -> node instanceof ToggleButton)
.findFirst()
.ifPresent(node -> insertionSpacer.setPrefWidth(((ToggleButton)node).getWidth()));
final int i = findInsertPosition(event);
toolbarHbox.getChildren().add(i, insertionSpacer);
event.acceptTransferModes(TransferMode.MOVE);
}
event.consume();
}
/**
* True of the event can be handled by the owning {@link ToolBar}.
*/
private boolean canAccept(final DragEvent dragEvent) {
final DrawerNode drawerNode = dragState.getDraggedNode();
return dragEvent.getDragboard().hasContent(DrawerDataFormat.CLIPBOARD_CONTENT_FORMAT)
&& drawerNode.isValidPosition(position);
}
}
/**
* {@link EventHandler} for handling drag dropped events for the {@link ToolBar}.
* Used to update the owning {@link ToolBar} with a dropped {@link DrawerNode}.
*/
private class ToolbarDragDroppedHandler implements EventHandler<DragEvent> {
@Override
public void handle(final DragEvent event) {
boolean success = false;
final DrawerNode drawerNode = dragState.getDraggedNode();
if (drawerNode != null) {
// Remove the spacer.
toolbarHbox.getChildren().remove(insertionSpacer);
// If the dragged node is present, then remove from the current location
toolbarHbox.getChildren().stream()
.filter(node -> node.getUserData() == drawerNode)
.findFirst()
.ifPresent(node -> toolbarHbox.getChildren().remove(node));
// Insert the dragged node into the new location.
final int i = findInsertPosition(event);
final ToggleButton newButton = createButton(drawerNode);
toolbarHbox.getChildren().add(i, newButton);
dragState.setDraggedNode(null);
// Close open drawers if only allow single open.
if (!allowMultipleOpenDrawers) {
splitPane.getItems().clear();
// Toggle buttons off.
toolbarHbox.getChildren()
.stream()
.filter(btn -> !newButton.equals(btn)
&& !((DrawerNode)btn.getUserData()).isFloating())
.forEach(btn -> ((ToggleButton)btn).setSelected(false));
}
// Update the split pane to include the dragged node
// in the correct location (if visible and not floating.
splitPane.getItems().clear();
splitPane.getItems().setAll(toolbarHbox.getChildren().stream()
.map(button -> (DrawerNode)button.getUserData())
.filter(Node::isVisible)
.filter(node -> !node.isFloating())
.collect(Collectors.toList()));
success = true;
}
event.setDropCompleted(success);
event.consume();
}
}
/**
* Initialize the {@link SplitPane}.
*/
private void initSplitPane() {
splitPane.setOrientation(getOrientation());
splitPane.getItems().addListener(new SplitPaneItemChangeListener());
}
/**
* {@link ListChangeListener} for the {@link SplitPane} Children.
* Used to show/hide the {@link SplitPane} and to set the divider positions
* when new drawers are shown/hidden.
*/
private class SplitPaneItemChangeListener implements ListChangeListener<Node> {
@Override
public void onChanged(Change<? extends Node> c) {
final int numItems = splitPane.getItems().size();
if (numItems == 0) {
getPane().getChildren().remove(splitPane);
getPane().getChildren().remove(divider);
return;
}
if (Position.Top.equals(position) || Position.Left.equals(position)) {
getPane().getChildren().setAll(toolBar, splitPane, divider);
} else if (Position.Bottom.equals(position) || Position.Right.equals(position)) {
getPane().getChildren().setAll(divider, splitPane, toolBar);
}
final double percent = 1.0 / numItems;
for (int i = 1; i < numItems; i++) {
splitPane.setDividerPosition(i - 1, i * percent);
}
}
}
/**
* Sets clipping region so that children of this Side do not spill out.
*/
private void setClippingRegion() {
final Rectangle clipRectangle = new Rectangle();
setClip(clipRectangle);
layoutBoundsProperty().addListener((observable, oldValue, newValue) -> {
clipRectangle.setWidth(newValue.getWidth());
clipRectangle.setHeight(newValue.getHeight());
});
}
protected abstract Pane getPane();
/**
* Rotation of this Pane.
*/
protected abstract double getRotation();
/**
* Orientation of this Pane.
*/
protected abstract Orientation getOrientation();
/**
* Adds a {@link DrawerNode} to the end of this Side.
*/
void addNode(final DrawerNode node) {
addNode(node, toolbarHbox.getChildren().size());
}
/**
* Adds a {@link DrawerNode} to this side a position index (0 based).
*/
void addNode(final DrawerNode node,
final int index) {
if (node == null) {
return;
}
if (!node.isFloating() && node.isVisible()) {
splitPane.getItems().add(index, node);
}
toolbarHbox.getChildren().add(index, createButton(node));
}
/**
* Creates a {@link ToggleButton} for the passed in {@link DrawerNode}.
*/
private ToggleButton createButton(final DrawerNode node) {
final ToggleButton button = new ToggleButton(node.getTitle());
button.setUserData(node);
button.setSelected(node.isVisible());
button.selectedProperty().addListener(new ButtonChangeListener(node));
button.setOnDragDetected(new ButtonDragDetectedHandler(button, node));
button.setOnDragDone(new ButtonDragDoneHandler(button, node));
// TODO: Setting focus traversable to false because pressing button
// in non-focused toolbar is not resulting in the select action. Turning
// off focus traversable works around this issue.
button.setFocusTraversable(false);
button.getStyleClass().add(isHorizontal()
? "drawerpanefx-horizontal-button"
: "drawerpanefx-vertical-button");
if (node.canFloat()) {
button.setContextMenu(createButtonContextMenu(node));
}
if (node.getIcon() != null) {
button.setGraphic(new ImageView(node.getIcon()));
}
return button;
}
/**
* {@link ChangeListener} for a drawer button. Used to show/hide the
* related {@link DrawerNode} within this side.
*/
private class ButtonChangeListener implements ChangeListener<Boolean> {
private final DrawerNode node;
ButtonChangeListener(final DrawerNode node) {
this.node = node;
}
@Override
public void changed(final ObservableValue<? extends Boolean> observable,
final Boolean oldValue,
final Boolean newValue) {
if (newValue) {
if (node.isFloating()) {
floatWindow(node);
} else {
dockWindow(node);
}
} else {
hideNodeInternal(node);
}
}
}
/**
* {@link EventHandler} for detecting a drag operation on the related
* {@link DrawerNode}. Used to start the JFX drag/drop.
*/
private class ButtonDragDetectedHandler implements EventHandler<MouseEvent> {
private final ToggleButton button;
private final DrawerNode node;
ButtonDragDetectedHandler(final ToggleButton button,
final DrawerNode node) {
this.button = button;
this.node = node;
}
@Override
public void handle(final MouseEvent event) {
final Dragboard db = button.startDragAndDrop(TransferMode.MOVE);
final ClipboardContent clipboardContent = new ClipboardContent();
clipboardContent.put(DrawerDataFormat.CLIPBOARD_CONTENT_FORMAT, "foo");
final SnapshotParameters snapshotParameters = new SnapshotParameters();
snapshotParameters.setTransform(new Rotate(getRotation()));
db.setContent(clipboardContent);
db.setDragView(button.snapshot(snapshotParameters, null));
dragState.setDraggedNode(node);
dragState.setInitialPosition(toolbarHbox.getChildren().indexOf(button));
toolbarHbox.getChildren().remove(button);
event.consume();
}
}
/**
* {@link EventHandler} to handle drag operation completion for a
* {@link DrawerNode}. Used to remove the dragged {@link DrawerNode}
* from this Side. This is a NOOP if the {@link DrawerNode} was just
* moved within this side.
*/
private class ButtonDragDoneHandler implements EventHandler<DragEvent> {
private final ToggleButton button;
private final DrawerNode node;
ButtonDragDoneHandler(final ToggleButton button,
final DrawerNode node) {
this.button = button;
this.node = node;
}
@Override
public void handle(final DragEvent event) {
if (!TransferMode.MOVE.equals(event.getTransferMode())) {
handleNotDropped();
} else {
handleDropped();
}
}
private void handleDropped() {
// Delete the node if the node was moved into another side.
if (!findButton(node).isPresent()){
splitPane.getItems().remove(node);
}
}
private void handleNotDropped() {
// Not dropped on a toolbar...
// add the button back to the toolbar.
toolbarHbox.getChildren().add(dragState.getInitialPosition(), button);
// If the node canFloat, and not dropped on a toolbar then
// float the node.
if (node.canFloat()) {
button.getContextMenu().getItems().stream()
.filter(menuItem -> menuItem.getText().equals(resourceBundle.getString("floatingmode.menuitem.txt")))
.findFirst()
.ifPresent(menuItem -> ((CheckMenuItem)menuItem).setSelected(true));
}
}
}
/**
* Creates a {@link ContextMenu} for a drawer button.
*/
private ContextMenu createButtonContextMenu(final DrawerNode node) {
final CheckMenuItem floatingMenuItem
= new CheckMenuItem(resourceBundle.getString("floatingmode.menuitem.txt"));
floatingMenuItem.setSelected(node.isFloating());
floatingMenuItem.selectedProperty()
.addListener((observable, oldValue, newValue) -> {
if (oldValue == newValue) {
return;
}
if (node.isFloating()) {
closeFloatingWindow(node);
}
node.setFloating(newValue);
findButton(node).ifPresent(toggleButton -> {
if (toggleButton.isSelected()) {
showNodeInternal(node);
}
});
});
final ContextMenu contextMenu = new ContextMenu();
contextMenu.getItems().addAll(floatingMenuItem);
return contextMenu;
}
void showNode(final DrawerNode node) {
if (node == null) {
return;
}
findButton(node).ifPresent(button -> {
if (button.isDisabled()) {
return;
}
button.setSelected(true);
});
}
/**
* Show a {@link DrawerNode}. Assumes that the associated button
* is selected.
*/
private void showNodeInternal(final DrawerNode node) {
if (node.isFloating()) {
floatWindow(node);
} else {
dockWindow(node);
}
}
/**
* Float a {@link DrawerNode} within its own window.
*/
private void floatWindow(final DrawerNode node) {
node.setVisible(true);
splitPane.getItems().remove(node);
final Stage floatingWindow = new FloatingWindowBuilder().create(getScene().getWindow(), node);
node.getFloatingX().ifPresent(floatingWindow::setX);
node.getFloatingY().ifPresent(floatingWindow::setY);
floatingWindow.setOnCloseRequest(event -> {
findButton(node).ifPresent(toggleButton -> toggleButton.setSelected(false));
((Pane)node.getParent()).getChildren().clear();
});
floatingWindow.xProperty().addListener((observable, oldValue, newValue)
-> node.setFloatingX(newValue.doubleValue()));
floatingWindow.yProperty().addListener((observable, oldValue, newValue)
-> node.setFloatingY(newValue.doubleValue()));
floatingWindow.show();
}
/**
* Dock a {@link DrawerNode} within this side's {@link SplitPane} at
* the appropriate location.
*/
private void dockWindow(final DrawerNode node) {
closeFloatingWindow(node);
if (!allowMultipleOpenDrawers) {
splitPane.getItems().clear();
// Toggle buttons off.
final Optional<ToggleButton> nodeButton = findButton(node);
toolbarHbox.getChildren()
.stream()
.filter(btn -> !nodeButton.map(nodeBtn -> nodeBtn.equals(btn)).orElse(false)
&& !((DrawerNode)btn.getUserData()).isFloating())
.forEach(btn -> ((ToggleButton)btn).setSelected(false));
}
node.setVisible(true);
splitPane.getItems().add(findInsertPosition(node), node);
}
void hideNode(final DrawerNode node) {
findButton(node).ifPresent(button -> button.setSelected(false));
}
/**
* Hide a {@link DrawerNode}. Assumes the associated button is
* already unselected.
*/
private void hideNodeInternal(final DrawerNode node) {
closeFloatingWindow(node);
node.setVisible(false);
splitPane.getItems().remove(node);
}
/**
* Close a {@link DrawerNode}'s floating window - if necessary.
*/
private void closeFloatingWindow(DrawerNode node) {
if (node.isFloating() && node.getScene() != null) {
final Window stage = node.getScene().getWindow();
stage.fireEvent(new WindowEvent(stage, WindowEvent.WINDOW_CLOSE_REQUEST));
}
}
/**
* Disable/enable a {@link DrawerNode}'s show/hide button. And, associated node if visible.
*/
public void disable(final DrawerNode node,
final boolean disable) {
node.setDisable(disable);
findButton(node).ifPresent(b -> b.setDisable(disable));
}
/**
* Find a button within this side's buttons.
*/
private Optional<ToggleButton> findButton(final DrawerNode node) {
final List<Node> buttons = toolbarHbox.getChildren();
return buttons.stream()
.filter(button -> button.getUserData().equals(node))
.map(button -> (ToggleButton)button)
.findFirst();
}
/**
* Remove a {@link DrawerNode} from this side.
*/
void removeNode(final DrawerNode node) {
if (node == null) {
return;
}
findButton(node)
.ifPresent(button -> {
toolbarHbox.getChildren().remove(button);
closeFloatingWindow(node);
splitPane.getItems().remove(node);
});
}
/**
* Determine the insert position for the {@link DrawerNode}
* based on the location of the associated {@link DrawerNode}'s button
* within the {@link ToolBar}.
*/
private int findInsertPosition(final DrawerNode node) {
int j = 0;
for (Node btn : toolbarHbox.getChildren()) {
final ToggleButton button = (ToggleButton)btn;
if (button.getUserData() == node) {
return j;
}
if (splitPane.getItems().contains(button.getUserData())) {
j++;
}
}
return j;
}
/**
* Determine the insert position for the {@link DrawerNode}
* based on the coordinates of the passed in {@link DragEvent}.
*/
private int findInsertPosition(final DragEvent event) {
return isHorizontal()
? findInsertPositionHorizontal(event)
: findInsertPositionVertical(event);
}
/**
* Determine the insert position for the {@link DrawerNode}
* based on the coordinates of the passed in {@link DragEvent} if this is
* a horizontal side.
*/
private int findInsertPositionHorizontal(final DragEvent event) {
final double x = event.getSceneX();
int i = 0;
for (Node node : toolbarHbox.getChildren()) {
if (node instanceof ToggleButton && centerOfButton((ToggleButton)node) > x) {
return i;
}
i++;
}
return i;
}
/**
* Determine the insert position for the {@link DrawerNode}
* based on the coordinates of the passed in {@link DragEvent} if this is
* a vertical side.
*/
private int findInsertPositionVertical(final DragEvent event) {
final double y = event.getSceneY();
int i = 0;
for (Node node : toolbarHbox.getChildren()) {
if (node instanceof ToggleButton && centerOfButton((ToggleButton)node) > y) {
return i;
}
i++;
}
return i;
}
/**
* Determine the center of the passed in button.
*/
private double centerOfButton(final ToggleButton button) {
return isHorizontal()
? centerOfButtonHorizontal(button)
: centerOfButtonVertical(button);
}
/**
* Determine the center of the passed in horizontal button.
*/
private double centerOfButtonHorizontal(final ToggleButton button) {
final Bounds boundsInScene = button.localToScene(button.getLayoutBounds());
return boundsInScene.getMinX() + (button.getWidth() / 2);
}
/**
* Determine the center of the passed in vertical button.
*/
private double centerOfButtonVertical(final ToggleButton button) {
final Bounds boundsInScene = button.localToScene(button.getLayoutBounds());
return boundsInScene.getMinY() + (button.getWidth() / 2);
}
/**
* True if this side is horizontal.
*/
private boolean isHorizontal() {
return Position.Top.equals(position) || Position.Bottom.equals(position);
}
/**
* Call to indicate if this side supports multiple opened drawers or
* a single opened drawer.
* TODO: repaint side if value changes...
*/
void setAllowMultipleOpenDrawers(boolean allowMultipleOpenDrawers) {
this.allowMultipleOpenDrawers = allowMultipleOpenDrawers;
}
List<DrawerNode> getNodes() {
return toolbarHbox.getChildren().stream()
.map(button -> (DrawerNode)button.getUserData())
.collect(Collectors.toList());
}
private void initDivider() {
if (isHorizontal()) {
divider.prefWidthProperty().bind(widthProperty());
divider.setPrefHeight(DIVIDER_WIDTH);
} else {
divider.setPrefWidth(DIVIDER_WIDTH);
divider.prefHeightProperty().bind(heightProperty());
}
}
/**
* Divider for dividing Side from Center of {@link DrawerPane}.
* Includes mouse handling for resizing the Side.
*/
private class Divider extends StackPane {
Divider() {
getStyleClass().setAll(isHorizontal()
? "drawerpanefx-horizontal-divider"
: "drawerpanefx-vertical-divider");
setCursor(isHorizontal() ? Cursor.V_RESIZE : Cursor.H_RESIZE);
final EventHandler<MouseEvent> mouseHandler
= isHorizontal()
? new MouseHandlerForHorizontal()
: new MouseHandlerForVertical();
setOnMouseMoved(mouseHandler);
setOnMouseDragged(mouseHandler);
setOnMousePressed(mouseHandler);
setOnMouseReleased(mouseHandler);
}
/**
* {@link EventHandler} for {@link SplitPane} mouse events if this is a vertical side.
* Used to allow resizing of side.
*/
private class MouseHandlerForVertical implements EventHandler<MouseEvent> {
private boolean mousePressed = false;
@Override
public void handle(final MouseEvent mouseEvent) {
if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_PRESSED)) {
mousePressed = true;
} else if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_DRAGGED)
&& mousePressed) {
final Bounds bounds = Divider.this.localToScene(Divider.this.getLayoutBounds());
final double mouseX = mouseEvent.getSceneX();
final double xDelta = mouseX - bounds.getMinX();
final double updatedWidth = splitPane.getWidth() + xDelta
* (Position.Right.equals(position) ? -1 : 1);
if (updatedWidth / splitPane.getScene().getWidth() <= MAX_PERCENTAGE_OF_SCENE) {
splitPane.setPrefWidth(updatedWidth);
}
//splitPane.setMinWidth(SPLITPANE_MIN_WIDTH);
} else if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_RELEASED)) {
mousePressed = false;
}
}
}
/**
* {@link EventHandler} for {@link SplitPane} mouse events if this is a horizontal side.
* Used to allow resizing of side.
*/
private class MouseHandlerForHorizontal implements EventHandler<MouseEvent> {
private boolean mousePressed = false;
@Override
public void handle(final MouseEvent mouseEvent) {
if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_PRESSED)) {
mousePressed = true;
} else if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_DRAGGED)
&& mousePressed) {
final Bounds bounds = Divider.this.localToScene(Divider.this.getLayoutBounds());
final double mouseY = mouseEvent.getSceneY();
final double yDelta = mouseY - bounds.getMinY();
final double updatedHeight = splitPane.getHeight() + yDelta
* (Position.Bottom.equals(position) ? -1 : 1);
if (updatedHeight / splitPane.getScene().getHeight() <= MAX_PERCENTAGE_OF_SCENE) {
splitPane.setPrefHeight(updatedHeight);
}
//splitPane.setMinWidth(SPLITPANE_MIN_WIDTH);
} else if (mouseEvent.getEventType().equals(MouseEvent.MOUSE_RELEASED)) {
mousePressed = false;
}
}
}
}
}
|
package com.ctrip.zeus.service.model.handler.impl;
import com.ctrip.zeus.model.entity.Group;
import com.ctrip.zeus.model.entity.Slb;
import com.ctrip.zeus.model.entity.VirtualServer;
import com.ctrip.zeus.model.transform.DefaultSaxParser;
import com.ctrip.zeus.support.ObjectJsonParser;
import org.xml.sax.SAXException;
import java.io.IOException;
public class ContentReaders {
public static Group readGroupContent(String content) throws IOException, SAXException {
if (content.charAt(0) == '<') {
return DefaultSaxParser.parseEntity(Group.class, content);
} else {
return ObjectJsonParser.parse(content, Group.class);
}
}
public static Slb readSlbContent(String content) throws IOException, SAXException {
if (content.charAt(0) == '<') {
return DefaultSaxParser.parseEntity(Slb.class, content);
} else {
return ObjectJsonParser.parse(content, Slb.class);
}
}
public static VirtualServer readVirtualServerContent(String content) throws IOException, SAXException {
VirtualServer vs;
if (content.charAt(0) == '<') {
vs = DefaultSaxParser.parseEntity(VirtualServer.class, content);
} else {
vs = ObjectJsonParser.parse(content, VirtualServer.class);
}
//TODO render for deprecated field
if (vs != null) {
if ((vs.getSlbIds() == null || vs.getSlbIds().isEmpty())
&& vs.getSlbId() != null) {
vs.getSlbIds().add(vs.getSlbId());
}
}
return vs;
}
}
|
package com.elmakers.mine.bukkit.magic.listener;
import com.elmakers.mine.bukkit.api.block.BlockData;
import com.elmakers.mine.bukkit.api.block.UndoList;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.magic.MagicController;
import com.elmakers.mine.bukkit.utility.CompatibilityUtils;
import com.elmakers.mine.bukkit.utility.NMSUtils;
import com.elmakers.mine.bukkit.utility.Targeting;
import com.elmakers.mine.bukkit.wand.Wand;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Item;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.entity.TNTPrimed;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityCombustEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.event.entity.ItemDespawnEvent;
import org.bukkit.event.entity.ItemSpawnEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.PlayerInventory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class EntityController implements Listener {
private final MagicController controller;
private boolean preventMeleeDamage = false;
private boolean keepWandsOnDeath = true;
private boolean disableItemSpawn = false;
private boolean preventWandMeleeDamage = true;
private int ageDroppedItems = 0;
public EntityController(MagicController controller) {
this.controller = controller;
}
public void setPreventMeleeDamage(boolean prevent) {
preventMeleeDamage = prevent;
}
public void setKeepWandsOnDeath(boolean keep) {
keepWandsOnDeath = keep;
}
public void setPreventWandMeleeDamage(boolean prevent) {
preventWandMeleeDamage = prevent;
}
public void setDisableItemSpawn(boolean disable) {
disableItemSpawn = disable;
}
public void setAgeDroppedItems(int age) {
ageDroppedItems = age;
}
@EventHandler
public void onProjectileHit(ProjectileHitEvent event) {
final Projectile projectile = event.getEntity();
// This happens before EntityDamageEvent, so the hit target will
// be assigned before the tracked projectile is checked.
// This is here to register misses, mainly.
Targeting.checkTracking(controller.getPlugin(), projectile, null);
}
@EventHandler
public void onEntityCombust(EntityCombustEvent event)
{
Entity entity = event.getEntity();
Mage apiMage = controller.getRegisteredMage(entity);
if (apiMage != null) {
if (apiMage instanceof com.elmakers.mine.bukkit.magic.Mage) {
com.elmakers.mine.bukkit.magic.Mage mage = (com.elmakers.mine.bukkit.magic.Mage) apiMage;
mage.onPlayerCombust(event);
}
}
if (!event.isCancelled())
{
UndoList undoList = controller.getPendingUndo(entity.getLocation());
if (undoList != null)
{
undoList.modify(entity);
}
}
}
@EventHandler(priority = EventPriority.HIGH, ignoreCancelled = true)
public void onEntityDamageByEntity(EntityDamageByEntityEvent event) {
Entity entity = event.getEntity();
if (entity instanceof Projectile || entity instanceof TNTPrimed) return;
Entity damager = event.getDamager();
UndoList undoList = controller.getEntityUndo(damager);
if (undoList != null) {
// Prevent dropping items from frames,
if (event.getCause() != EntityDamageEvent.DamageCause.ENTITY_ATTACK || undoList.isScheduled()) {
undoList.damage(entity, event.getDamage());
if (!entity.isValid()) {
event.setCancelled(true);
}
} else {
undoList.modify(entity);
}
}
}
@EventHandler(priority = EventPriority.LOWEST, ignoreCancelled = true)
public void onEntityPreDamageByEntity(EntityDamageByEntityEvent event) {
Entity entity = event.getEntity();
if (entity instanceof Projectile || entity instanceof TNTPrimed) return;
Mage entityMage = controller.getRegisteredMage(entity);
if (entityMage != null) {
if (entity instanceof Player) {
Player damaged = (Player)entity;
if (damaged.isBlocking()) {
com.elmakers.mine.bukkit.api.wand.Wand damagedWand = entityMage.getActiveWand();
if (damagedWand != null) {
damagedWand.playEffects("hit_blocked");
}
}
}
if (entityMage.isSuperProtected()) {
event.setCancelled(true);
return;
}
}
Entity damager = event.getDamager();
if (damager instanceof Player ) {
Mage damagerMage = controller.getRegisteredMage(damager);
com.elmakers.mine.bukkit.api.wand.Wand activeWand = null;
boolean isMelee = event.getCause() == EntityDamageEvent.DamageCause.ENTITY_ATTACK && !CompatibilityUtils.isDamaging;
if (isMelee && damagerMage != null) {
activeWand = damagerMage.getActiveWand();
if (activeWand != null) {
activeWand.playEffects("hit_entity");
activeWand.damageDealt(event.getDamage(), entity);
}
}
if (preventWandMeleeDamage)
{
boolean hasWand = activeWand != null;
Player player = (Player) damager;
ItemStack itemInHand = player.getItemInHand();
boolean isMeleeWeapon = controller.isMeleeWeapon(itemInHand);
if (isMelee && hasWand && !isMeleeWeapon) {
event.setCancelled(true);
CompatibilityUtils.isDamaging = true;
activeWand.cast();
CompatibilityUtils.isDamaging = false;
}
else if (!hasWand && preventMeleeDamage && isMelee && !isMeleeWeapon) {
event.setCancelled(true);
}
}
} else {
Targeting.checkTracking(controller.getPlugin(), damager, entity);
}
}
@EventHandler(priority = EventPriority.LOWEST)
public void onEntityDeath(EntityDeathEvent event)
{
Entity entity = event.getEntity();
if (entity.hasMetadata("nodrops")) {
event.setDroppedExp(0);
event.getDrops().clear();
}
Mage apiMage = controller.getRegisteredMage(entity);
if (apiMage == null) return;
if (!(apiMage instanceof com.elmakers.mine.bukkit.magic.Mage)) return;
com.elmakers.mine.bukkit.magic.Mage mage = (com.elmakers.mine.bukkit.magic.Mage)apiMage;
mage.onPlayerDeath(event);
mage.deactivateAllSpells();
if (!(entity instanceof Player)) {
return;
}
final Player player = (Player)entity;
String rule = entity.getWorld().getGameRuleValue("keepInventory");
if (rule.equals("true")) return;
List<ItemStack> drops = event.getDrops();
Wand wand = mage.getActiveWand();
if (wand != null) {
// Retrieve stored inventory before deactivating the wand
if (mage.hasStoredInventory()) {
// Remove the wand inventory from drops
drops.removeAll(Arrays.asList(player.getInventory().getContents()));
// Deactivate the wand.
wand.deactivate();
// Add restored inventory back to drops
ItemStack[] stored = player.getInventory().getContents();
for (ItemStack stack : stored) {
if (stack != null) {
drops.add(stack);
}
}
} else {
wand.deactivate();
}
}
List<ItemStack> removeDrops = new ArrayList<ItemStack>();
PlayerInventory inventory = player.getInventory();
ItemStack[] contents = inventory.getContents();
for (int index = 0; index < contents.length; index++)
{
ItemStack itemStack = contents[index];
if (itemStack == null || itemStack.getType() == Material.AIR) continue;
if (NMSUtils.isTemporary(itemStack) || Wand.isSkill(itemStack)) {
removeDrops.add(itemStack);
continue;
}
boolean keepItem = false;
if (Wand.isWand(itemStack)) {
keepItem = keepWandsOnDeath;
if (!keepItem) {
Wand testWand = new Wand(controller, itemStack);
keepItem = testWand.keepOnDeath();
}
}
if (keepItem)
{
mage.addToRespawnInventory(index, itemStack);
removeDrops.add(itemStack);
}
}
ItemStack[] armor = player.getInventory().getArmorContents();
for (int index = 0; index < armor.length; index++)
{
ItemStack itemStack = armor[index];
if (itemStack == null || itemStack.getType() == Material.AIR) continue;
if (NMSUtils.isTemporary(itemStack) || Wand.isSkill(itemStack)) {
removeDrops.add(itemStack);
continue;
}
boolean keepItem = false;
if (Wand.isWand(itemStack)) {
keepItem = keepWandsOnDeath;
if (!keepItem) {
Wand testWand = new Wand(controller, itemStack);
keepItem = testWand.keepOnDeath();
}
}
if (keepItem)
{
mage.addToRespawnArmor(index, itemStack);
removeDrops.add(itemStack);
}
}
drops.removeAll(removeDrops);
}
@EventHandler
public void onItemDespawn(ItemDespawnEvent event)
{
Item entity = event.getEntity();
if (Wand.isWand(event.getEntity().getItemStack()))
{
Wand wand = new Wand(controller, entity.getItemStack());
if (wand.isIndestructible()) {
event.getEntity().setTicksLived(1);
event.setCancelled(true);
} else {
controller.removeLostWand(wand.getId());
}
}
}
@EventHandler(priority=EventPriority.LOWEST)
public void onItemSpawn(ItemSpawnEvent event)
{
if (disableItemSpawn)
{
event.setCancelled(true);
return;
}
Item itemEntity = event.getEntity();
ItemStack spawnedItem = itemEntity.getItemStack();
Block block = itemEntity.getLocation().getBlock();
BlockData undoData = com.elmakers.mine.bukkit.block.UndoList.getBlockData(block.getLocation());
if (undoData != null && block.getType() != Material.AIR)
{
// if a block just broke via physics, it will not yet have its id changed to air
// So we can catch this as a one-time event, for blocks we have recorded.
if (undoData.getMaterial() != Material.AIR)
{
com.elmakers.mine.bukkit.block.UndoList.commit(undoData);
event.setCancelled(true);
return;
}
// If this was a block we built magically, don't drop items if the item being dropped
// matches the block type. This is messy, but avoid players losing all their items
// when suffocating inside a Blob
Collection<ItemStack> drops = block.getDrops();
if (drops != null) {
for (ItemStack drop : drops) {
if (drop.getType() == spawnedItem.getType()) {
com.elmakers.mine.bukkit.block.UndoList.commit(undoData);
event.setCancelled(true);
return;
}
}
}
}
if (Wand.isSkill(spawnedItem))
{
event.setCancelled(true);
return;
}
if (Wand.isWand(spawnedItem))
{
Wand wand = new Wand(controller, event.getEntity().getItemStack());
if (wand.isIndestructible()) {
CompatibilityUtils.setInvulnerable(event.getEntity());
// Don't show non-indestructible wands on dynmap
controller.addLostWand(wand, event.getEntity().getLocation());
Location dropLocation = event.getLocation();
controller.info("Wand " + wand.getName() + ", id " + wand.getId() + " spawned at " + dropLocation.getBlockX() + " " + dropLocation.getBlockY() + " " + dropLocation.getBlockZ());
}
} else {
// Don't do this, no way to differentiate between a dropped item from a broken block
// versus a dead player
// registerEntityForUndo(event.getEntity());
if (ageDroppedItems > 0) {
int ticks = ageDroppedItems * 20 / 1000;
Item item = event.getEntity();
CompatibilityUtils.ageItem(item, ticks);
}
}
}
@EventHandler
public void onEntityDamage(EntityDamageEvent event)
{
try {
Entity entity = event.getEntity();
Mage apiMage = controller.getRegisteredMage(event.getEntity());
if (apiMage != null)
{
if (!(apiMage instanceof com.elmakers.mine.bukkit.magic.Mage)) return;
com.elmakers.mine.bukkit.magic.Mage mage = (com.elmakers.mine.bukkit.magic.Mage) apiMage;
mage.onPlayerDamage(event);
}
else
{
Entity passenger = entity.getPassenger();
Mage apiMountMage = controller.getRegisteredMage(passenger);
if (apiMountMage != null) {
if (!(apiMountMage instanceof com.elmakers.mine.bukkit.magic.Mage)) return;
com.elmakers.mine.bukkit.magic.Mage mage = (com.elmakers.mine.bukkit.magic.Mage)apiMountMage;
mage.onPlayerDamage(event);
}
}
if (entity instanceof Item)
{
Item item = (Item)entity;
ItemStack itemStack = item.getItemStack();
if (Wand.isWand(itemStack))
{
Wand wand = new Wand(controller, item.getItemStack());
if (wand.isIndestructible()) {
event.setCancelled(true);
} else if (event.getDamage() >= itemStack.getDurability()) {
if (controller.removeLostWand(wand.getId())) {
controller.info("Wand " + wand.getName() + ", id " + wand.getId() + " destroyed");
}
}
}
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
|
package com.github.brandonbai.smartmonitor.interceptor;
import java.util.Date;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.AfterReturning;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.github.brandonbai.smartmonitor.pojo.Log;
import com.github.brandonbai.smartmonitor.pojo.Threshold;
import com.github.brandonbai.smartmonitor.service.LogService;
import com.github.brandonbai.smartmonitor.service.TokenService;
/**
*
* LogAspect
*
* @author Feihu Ji
* @since 20161015
*
*/
@Aspect
@Component
public class LogAspect {
@Autowired
private TokenService tokenService;
@Autowired
private LogService logService;
@AfterReturning("execution(* com.github.brandonbai.smartmonitor.service..*.updateThresholds(..))")
public void saveLogUpdateThreshold(JoinPoint pjp) {
Threshold threshold = (Threshold) pjp.getArgs()[0];
Log log = new Log();
log.setContent("id"+threshold.getId()+""+threshold.getMin()+"~"+threshold.getMax());
log.setTime(new Date());
log.setType(Log.CHANGE_THRESHOLD);
log.setUsername(tokenService.getUser().getId()+"");
logService.addLog(log);
}
@AfterReturning("execution(* com.github.brandonbai.smartmonitor.service..*.controlDevice(..))")
public void saveLogControl(JoinPoint pjp) {
String command = (String) pjp.getArgs()[0];
Log log = new Log();
log.setContent(""+command);
log.setTime(new Date());
log.setType(Log.CONTROL_DEVICE);
log.setUsername(tokenService.getUser().getId()+"");
logService.addLog(log);
}
}
|
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class RepeatingKeyXOR {
final static String XOR_KEY = "ICE";
final static byte[] XOR_KEY_BYTES = XOR_KEY.getBytes();
public static void main(String[] args) {
System.out.println("repeating key xor (using key \"ICE\")");
System.out.println("public domain - gilbert.fernandes@orange.fr");
System.out.println("sources @ https://github.com/gilbert-fernandes/");
if(args.length == 0) {
System.out.println("usage : rkXOR [filename]");
return;
}
byte[] fileContents = grabFileContents(args[0]);
if(fileContents == null) {
System.out.println(">> EXITING <<");
return;
}
final byte[] appliedXOR = rkXOR(fileContents);
final String outputFileName = args[0] + ".xor";
try (FileOutputStream fos = new FileOutputStream(outputFileName)) {
fos.write(appliedXOR);
} catch (IOException e) {
System.out.println("failed to save file : " + e.getMessage());
}
System.out.println("done, written " + appliedXOR.length + " bytes to '" + outputFileName + "'");
}
// file is read as one string directly even if we have n lines in it
private static byte[] grabFileContents(final String path) {
Path p = Paths.get(path);
try {
return Files.readAllBytes(p);
} catch (IOException e) {
System.out.println("could not load file contents : " + e.getMessage());
return null;
}
}
private static byte[] rkXOR(byte[] content) {
short currentByteKey = 0;
for(int i = 0; i<content.length; i++) {
// next lines used during development because I hate tracing execution from within eclipse...
// final String toBeConverted = String.format("%02x ", content[i]);
// final String onceConvered = String.format("%02x ", (byte)((int) content[i] ^ (int) XOR_KEY[currentByteKey]));
// System.out.println(i+1 + " : " + toBeConverted + " -" + currentByteKey + "-> " + onceConvered);
content[i] = (byte)((int) content[i] ^ (int) XOR_KEY_BYTES[currentByteKey]);
currentByteKey = incrementByteKey(currentByteKey);
}
return content;
}
// loop from 0 to (XOR_KEY-1) values
private static short incrementByteKey(short currentByteKey) {
return (++currentByteKey > XOR_KEY_BYTES.length-1) ? 0 : currentByteKey;
}
private static String convertByteArrayToHexPrettyPrint(byte[] theBytes) {
StringBuilder sb = new StringBuilder();
for (byte b : theBytes) {
sb.append(String.format("%02x", b)); // use X for uppercase
}
return sb.toString();
}
}
|
import java.util.ArrayList;
import java.util.List;
import javafx.geometry.Insets;
import javafx.scene.control.Label;
import javafx.scene.image.ImageView;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
// This isn't really a canvas but we'll keep the class name to stay as close to the assignment as possible
public class FacePamphletCanvas extends BorderPane implements FacePamphletConstants {
private static final int MESSAGE_FONT_SIZE = 18;
private static final int PROFILE_NAME_FONT_SIZE = 24;
private static final int PROFILE_FRIENDS_LABEL_FONT_SIZE = 16;
private static final int PROFILE_FRIENDS_LIST_FONT_SIZE = 16;
private static final int PROFILE_STATUS_FONT_SIZE = 16;
private Text profileFriendsLabel;
private Text profileFriendsList;
private ImageView profileImage;
private Text profileNameLabel;
private Text profileStatusLabel;
private Label messageLabel;
/**
* Constructor This method takes care of any initialization needed for the
* display
*/
public FacePamphletCanvas() {
profileNameLabel = createProfileNameLabel();
this.setTop(profileNameLabel);
HBox centerProfilePane = createCenterProfilePane();
this.setCenter(centerProfilePane);
messageLabel = createMessageLabel();
this.setBottom(messageLabel);
}
private Text createProfileNameLabel() {
Text profileNameLabel = new Text();
profileNameLabel.setFont(new Font(PROFILE_NAME_FONT_SIZE));
profileNameLabel.setFill(Color.BLUE);
BorderPane.setMargin(profileNameLabel, new Insets(TOP_MARGIN, 0, 0, LEFT_MARGIN));
return profileNameLabel;
}
private HBox createCenterProfilePane() {
HBox centerProfilePane = new HBox();
BorderPane.setMargin(centerProfilePane, new Insets(0, 0, 0, LEFT_MARGIN));
VBox leftProfilePane = createLeftProfilePane();
VBox rightProfilePane = createRightProfilePane();
centerProfilePane.getChildren().addAll(leftProfilePane, rightProfilePane);
return centerProfilePane;
}
private VBox createLeftProfilePane() {
VBox leftProfilePane = new VBox();
HBox.setHgrow(leftProfilePane, Priority.ALWAYS);
leftProfilePane.setMaxWidth(Double.MAX_VALUE);
profileImage = new ImageView();
profileImage.setFitHeight(IMAGE_HEIGHT);
profileImage.setFitWidth(IMAGE_WIDTH);
VBox.setMargin(profileImage, new Insets(IMAGE_MARGIN, 0, 0, 0));
profileStatusLabel = new Text();
profileStatusLabel.setFont(new Font(PROFILE_STATUS_FONT_SIZE));
VBox.setMargin(profileStatusLabel, new Insets(STATUS_MARGIN, 0, 0, 0));
leftProfilePane.getChildren().addAll(profileImage, profileStatusLabel);
// TODO fix width
leftProfilePane.setStyle("-fx-background-color: yellow");
return leftProfilePane;
}
private VBox createRightProfilePane() {
VBox rightProfilePane = new VBox();
HBox.setHgrow(rightProfilePane, Priority.ALWAYS);
rightProfilePane.setMaxWidth(Double.MAX_VALUE);
profileFriendsLabel = new Text();
profileFriendsLabel.setFont(new Font(PROFILE_FRIENDS_LABEL_FONT_SIZE));
profileFriendsLabel.setStyle("-fx-font-weight: bold");
profileFriendsList = new Text();
profileFriendsList.setFont(new Font(PROFILE_FRIENDS_LIST_FONT_SIZE));
rightProfilePane.getChildren().addAll(profileFriendsLabel, profileFriendsList);
// TODO fix width
rightProfilePane.setStyle("-fx-background-color: orange");
return rightProfilePane;
}
private Label createMessageLabel() {
Label messageLabel = new Label();
messageLabel.setFont(new Font(MESSAGE_FONT_SIZE));
BorderPane.setMargin(messageLabel, new Insets(0, 0, BOTTOM_MESSAGE_MARGIN, LEFT_MARGIN));
return messageLabel;
}
/**
* This method displays a message string near the bottom of the canvas.
* Every time this method is called, the previously displayed message (if
* any) is replaced by the new message text passed in.
*/
public void showMessage(String msg) {
messageLabel.setText(msg);
}
/**
* This method displays the given profile on the canvas. The canvas is first
* cleared of all existing items (including messages displayed near the
* bottom of the screen) and then the given profile is displayed. The
* profile display includes the name of the user from the profile, the
* corresponding image (or an indication that an image does not exist), the
* status of the user, and a list of the user's friends in the social
* network.
*/
public void displayProfile(FacePamphletProfile profile) {
showMessage("");
profileNameLabel.setText(profile.getName());
profileStatusLabel.setText(profile.getStatus());
profileImage.setImage(profile.getImage());
displayFriends(profile);
}
private void displayFriends(FacePamphletProfile profile) {
profileFriendsLabel.setText("Friends:");
List<String> friendsList = new ArrayList<String>();
profile.getFriends().forEachRemaining(friendsList::add);
profileFriendsList.setText(String.join("\n", friendsList));
}
public void clearProfile() {
profileFriendsLabel.setText("");
profileFriendsList.setText("");
profileImage.setImage(null);
profileNameLabel.setText("");
profileStatusLabel.setText("");
}
}
|
package com.github.dozedoff.commonj.image;
import java.awt.Dimension;
import java.awt.Image;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
import javax.imageio.ImageIO;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import sun.awt.image.ImageFormatException;
@SuppressWarnings("restriction")
public class SubsamplingImageLoader {
public static Image loadAsImage(Path imagepath, Dimension targetDimension) throws ImageFormatException, IOException {
ImageInputStream iis = getImageInputStream(imagepath);
ImageReader reader = getImageReader(iis);
if (reader == null) {
throw new ImageFormatException("Could not decode file");
}
Image image = subsampleRead(iis, reader, targetDimension);
return image;
}
public static JLabel loadAsLabel(Path imagepath, Dimension targetDimension) throws ImageFormatException, IOException {
Image image = loadAsImage(imagepath, targetDimension);
ImageIcon imageicon = new ImageIcon(image);
JLabel imageLabel = new JLabel(imageicon, JLabel.CENTER);
return imageLabel;
}
/**
* Use {@link #loadAsLabel(Path imagepath, Dimension targetDimension) } instead.
*/
@Deprecated
public static JLabel loadImage(Path imagepath, Dimension targetDimension) throws ImageFormatException, IOException {
ImageInputStream iis = getImageInputStream(imagepath);
ImageReader reader = getImageReader(iis);
if (reader == null) {
throw new ImageFormatException("Could not decode file");
}
Image image = subsampleRead(iis, reader, targetDimension);
ImageIcon imageicon = new ImageIcon(image);
JLabel imageLabel = new JLabel(imageicon, JLabel.CENTER);
return imageLabel;
}
private static Image subsampleRead(ImageInputStream iis, ImageReader reader, Dimension targetDimension) throws IOException {
ImageReadParam readerParameters = reader.getDefaultReadParam();
reader.setInput(iis, true, true);
int readerWidth = reader.getWidth(0);
int readerHeight = reader.getHeight(0);
Dimension imageSize = new Dimension(readerWidth, readerHeight);
int sampleRate = getSampleRate(imageSize, targetDimension);
readerParameters.setSourceSubsampling(sampleRate, sampleRate, 0, 0);
Image image = reader.read(0, readerParameters);
return image;
}
private static ImageInputStream getImageInputStream(Path image) throws IOException {
InputStream is = Files.newInputStream(image);
ImageInputStream iis = ImageIO.createImageInputStream(is);
return iis;
}
private static int getSampleRate(Dimension image, Dimension displayArea) {
double heightRatio = (double) image.getHeight() / (double)displayArea.getHeight();
double widthRato = (double) image.getWidth() / (double)displayArea.getWidth();
int xSampleRate = (int)Math.ceil(heightRatio);
int ySampleRate = (int)Math.ceil(widthRato);
int sampleRate = Math.max(xSampleRate, ySampleRate);
if(sampleRate < 1){
sampleRate = 1;
}
return sampleRate;
}
private static ImageReader getImageReader(ImageInputStream iis) {
Iterator<?> iter = ImageIO.getImageReaders(iis);
if (!iter.hasNext()) {
return null;
}
ImageReader reader = (ImageReader)iter.next();
return reader;
}
}
|
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.layout.VBox;
import javafx.scene.text.Text;
import javafx.stage.Stage;
// This isn't really a canvas but we'll keep the class name to stay as close to the assignment as possible
public class FacePamphletCanvas implements FacePamphletConstants {
/**
* Constructor This method takes care of any initialization needed for the
* display
*/
public FacePamphletCanvas(Stage primaryStage) {
BorderPane parentPane = new BorderPane();
parentPane.setPrefSize(APPLICATION_WIDTH, APPLICATION_HEIGHT);
HBox topPane = createTopPane();
parentPane.setTop(topPane);
VBox leftPane = createLeftPane();
parentPane.setLeft(leftPane);
Pane mainPane = createCenterPane();
parentPane.setCenter(mainPane);
Scene scene = new Scene(parentPane);
primaryStage.setScene(scene);
primaryStage.show();
}
private HBox createTopPane() {
HBox topPane = new HBox();
styleTopPane(topPane);
addComponentsToTopPane(topPane);
return topPane;
}
private void styleTopPane(HBox topPane) {
topPane.setPadding(new Insets(15, 12, 15, 12));
topPane.setSpacing(10);
topPane.setAlignment(Pos.CENTER);
}
private void addComponentsToTopPane(HBox topPane) {
Text nameLabel = new Text("Name");
TextField nameInput = new TextField();
Button buttonAdd = new Button("Add");
buttonAdd.setPrefSize(100, 20);
buttonAdd.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
Button buttonDelete = new Button("Delete");
buttonDelete.setPrefSize(100, 20);
buttonDelete.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
Button buttonLookup = new Button("Lookup");
buttonLookup.setPrefSize(100, 20);
buttonLookup.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
topPane.getChildren().addAll(nameLabel, nameInput, buttonAdd, buttonDelete, buttonLookup);
}
private VBox createLeftPane() {
VBox leftPane = new VBox();
styleLeftPane(leftPane);
addComponentsToLeftPane(leftPane);
return leftPane;
}
private void styleLeftPane(VBox leftPane) {
leftPane.setPadding(new Insets(15, 12, 15, 12));
leftPane.setSpacing(10);
leftPane.setPrefHeight(APPLICATION_HEIGHT);
leftPane.setAlignment(Pos.CENTER);
}
private void addComponentsToLeftPane(VBox leftPane) {
TextField inputChangeStatus = new TextField();
inputChangeStatus.setOnKeyPressed(new EventHandler<KeyEvent>() {
@Override
public void handle(KeyEvent keyEvent) {
if (keyEvent.getCode().equals(KeyCode.ENTER)) {
// TODO
}
}
});
Button buttonChangeStatus = new Button("Change Status");
setHeightWidthToFit(buttonChangeStatus);
buttonChangeStatus.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
TextField inputChangePicture = new TextField();
inputChangePicture.setOnKeyPressed(new EventHandler<KeyEvent>() {
@Override
public void handle(KeyEvent keyEvent) {
if (keyEvent.getCode().equals(KeyCode.ENTER)) {
// TODO
}
}
});
Button buttonChangePicture = new Button("Change Picture");
setHeightWidthToFit(buttonChangePicture);
buttonChangePicture.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
TextField inputAddFriend = new TextField();
inputAddFriend.setOnKeyPressed(new EventHandler<KeyEvent>() {
@Override
public void handle(KeyEvent keyEvent) {
if (keyEvent.getCode().equals(KeyCode.ENTER)) {
// TODO
}
}
});
Button buttonAddFriend = new Button("Add Friend");
setHeightWidthToFit(buttonAddFriend);
buttonAddFriend.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
// TODO
}
});
leftPane.getChildren().addAll(inputChangeStatus, buttonChangeStatus, inputChangePicture, buttonChangePicture,
inputAddFriend, buttonAddFriend);
}
private void setHeightWidthToFit(Region region) {
region.setMaxSize(Double.MAX_VALUE, Double.MAX_VALUE);
}
private Pane createCenterPane() {
Pane centerPane = new Pane();
setWhiteBackground(centerPane);
return centerPane;
}
private void setWhiteBackground(Pane pane) {
pane.setStyle("-fx-background-color: white");
}
/**
* This method displays a message string near the bottom of the canvas.
* Every time this method is called, the previously displayed message (if
* any) is replaced by the new message text passed in.
*/
public void showMessage(String msg) {
// You fill this in
}
/**
* This method displays the given profile on the canvas. The canvas is first
* cleared of all existing items (including messages displayed near the
* bottom of the screen) and then the given profile is displayed. The
* profile display includes the name of the user from the profile, the
* corresponding image (or an indication that an image does not exist), the
* status of the user, and a list of the user's friends in the social
* network.
*/
public void displayProfile(FacePamphletProfile profile) {
// You fill this in
}
}
|
package com.github.vangj.jbayes.inf.exact.graph.pptc;
import com.github.vangj.jbayes.inf.exact.graph.Edge;
import com.github.vangj.jbayes.inf.exact.graph.Node;
import com.github.vangj.jbayes.inf.exact.graph.Ug;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class Triangulate {
private static class NodeClique {
private Node node;
private Set<Node> neighbors;
private int weight;
private List<Edge> edges;
public NodeClique(Node node, List<Node> neighbors, int weight, List<Edge> edges) {
this.node = node;
this.neighbors = new LinkedHashSet<>(neighbors);
this.weight = weight;
this.edges = edges;
}
@Override
public String toString() {
List<Node> nodes = new ArrayList<>();
nodes.add(node);
nodes.addAll(neighbors);
String ids = nodes.stream().map(Node::getId).collect(Collectors.joining(",","[","]"));
return (new StringBuilder())
.append(ids)
.append(" ")
.append(edges.size())
.append(" ")
.append(weight)
.toString();
}
}
private Triangulate() {
}
public static List<Clique> triangulate(Ug m) {
List<Clique> cliques = new ArrayList<>();
Ug mm = (Ug)m.duplicate();
while(mm.nodes().size() > 0) {
NodeClique nodeClique = selectNode(mm);
Clique clique = new Clique(nodeClique.node, mm.neighbors(nodeClique.node));
cliques.add(clique);
mm.remove(nodeClique.node);
connectNeighbors(m, nodeClique.edges);
connectNeighbors(mm, nodeClique.edges);
}
return cliques;
}
private static void connectNeighbors(Ug m, List<Edge> edges) {
edges.forEach(edge -> {
m.addEdge(edge.getLeft(), edge.getRight());
});
}
private static NodeClique selectNode(Ug m) {
List<NodeClique> cliques = m.nodes()
.stream()
.map(node -> {
int weight = weight(node, m);
List<Edge> edges = edgesToAdd(node, m);
return new NodeClique(node, m.neighbors(node), weight, edges);
})
.sorted( (c1, c2) -> {
int result = Integer.compare(c1.edges.size(), c2.edges.size());
if(0 == result) {
result = Integer.compare(c1.weight, c2.weight);
if(0 == result) {
result = -1 * c1.node.getId().compareTo(c2.node.getId());
}
}
return result;
})
.collect(Collectors.toList());
return cliques.get(0);
}
private static int weight(Node n, Ug m) {
int weight = n.weight();
for(Node neighbor : m.neighbors(n)) {
weight *= neighbor.weight();
}
return weight;
}
private static List<Edge> edgesToAdd(Node n, Ug m) {
List<Edge> edges = new ArrayList<>();
List<Node> neighbors = m.neighbors(n);
final int size = neighbors.size();
for(int i=0; i < size; i++) {
Node ne1 = neighbors.get(i);
for(int j=i+1; j < size; j++) {
Node ne2 = neighbors.get(j);
if(!m.edgeExists(ne1.getId(), ne2.getId())) {
edges.add(Edge.newBuilder().left(ne1).right(ne2).build());
}
}
}
return edges;
}
}
|
package se.raddo.raddose3D;
import java.util.Map;
public class CrystalCylinder extends CrystalPolyhedron {
@Override
public void loadVertices(final Map<Object, Object> mergedProperties) {
int[][] tempIndices = {
{ 2, 4, 3},
{ 3, 4, 6},
{ 6, 8, 7},
{ 8, 10, 9},
{ 10, 12, 11},
{ 12, 14, 13},
{ 14, 16, 15},
{ 15, 16, 18},
{ 18, 20, 19},
{ 20, 22, 21},
{ 21, 22, 24},
{ 24, 26, 25},
{ 26, 28, 27},
{ 27, 28, 30},
{ 30, 32, 31},
{ 32, 34, 33},
{ 34, 36, 35},
{ 35, 36, 38},
{ 38, 40, 39},
{ 39, 40, 42},
{ 42, 44, 43},
{ 44, 46, 45},
{ 45, 46, 48},
{ 48, 50, 49},
{ 49, 50, 52},
{ 52, 54, 53},
{ 53, 54, 56},
{ 56, 58, 57},
{ 58, 60, 59},
{ 59, 60, 62},
{ 62, 38, 22},
{ 64, 2, 1},
{ 62, 64, 63},
{ 53, 55, 63},
{ 1, 2, 3},
{ 5, 3, 6},
{ 5, 6, 7},
{ 7, 8, 9},
{ 9, 10, 11},
{ 11, 12, 13},
{ 13, 14, 15},
{ 17, 15, 18},
{ 17, 18, 19},
{ 19, 20, 21},
{ 23, 21, 24},
{ 23, 24, 25},
{ 25, 26, 27},
{ 29, 27, 30},
{ 29, 30, 31},
{ 31, 32, 33},
{ 33, 34, 35},
{ 37, 35, 38},
{ 37, 38, 39},
{ 41, 39, 42},
{ 41, 42, 43},
{ 43, 44, 45},
{ 47, 45, 48},
{ 47, 48, 49},
{ 51, 49, 52},
{ 51, 52, 53},
{ 55, 53, 56},
{ 55, 56, 57},
{ 57, 58, 59},
{ 61, 59, 62},
{ 2, 64, 4},
{ 64, 62, 14},
{ 58, 54, 60},
{ 58, 56, 54},
{ 54, 52, 50},
{ 48, 46, 44},
{ 42, 48, 44},
{ 42, 40, 38},
{ 34, 32, 36},
{ 30, 22, 32},
{ 26, 22, 28},
{ 26, 24, 22},
{ 22, 20, 18},
{ 14, 22, 16},
{ 10, 8, 12},
{ 54, 62, 60},
{ 64, 6, 4},
{ 8, 14, 12},
{ 22, 30, 28},
{ 38, 48, 42},
{ 38, 36, 22},
{ 6, 64, 8},
{ 61, 62, 63},
{ 36, 32, 22},
{ 54, 38, 62},
{ 38, 50, 48},
{ 38, 54, 50},
{ 8, 64, 14},
{ 63, 64, 1},
{ 22, 18, 16},
{ 62, 22, 14},
{ 63, 1, 3},
{ 3, 5, 7},
{ 15, 9, 13},
{ 15, 17, 19},
{ 21, 23, 31},
{ 23, 25, 27},
{ 27, 29, 31},
{ 31, 33, 35},
{ 35, 37, 39},
{ 39, 41, 43},
{ 47, 51, 45},
{ 47, 49, 51},
{ 53, 39, 43},
{ 59, 63, 57},
{ 59, 61, 63},
{ 7, 31, 3},
{ 9, 11, 13},
{ 63, 55, 57},
{ 23, 27, 31},
{ 39, 63, 35},
{ 45, 51, 53},
{ 15, 7, 9},
{ 15, 19, 21},
{ 7, 21, 31},
{ 15, 21, 7},
{ 39, 53, 63},
{ 31, 63, 3},
{ 63, 31, 35},
{ 43, 45, 53}
};
Double radius = (Double) mergedProperties.get(Crystal.CRYSTAL_DIM_X) / 2;
Double height = (Double) mergedProperties.get(Crystal.CRYSTAL_DIM_Y);
double[][] tempVertices = createCylinderVertices(radius, height);
setIndices(tempIndices);
vertices = new double[tempVertices.length][3];
for (int i = 0; i < tempVertices.length; i++) {
System.arraycopy(tempVertices[i], 0, vertices[i], 0, 3);
}
}
/**
* Creates the vertices required for the cylinder object
* @param radius of the circular cross-section of the cylinder
* @param height - This is the axial length of the cylinder
* @return an nx3 array (where n is the number of vertices) containing
* the x,y,z coordinates of each vertex
*/
private double[][] createCylinderVertices(Double radius, Double height) {
// Set the default number of half the amount of vertices for the cylinder
int numOfVertices = 32;
//Create x Coordinates for the base and the top of the cylinder
double midPoint = height / 2;
double xCoordBase = -midPoint;
double xCoordTop = midPoint;
// Calculate angular step around circle.
// The negative sign is used to go anti clockwise around the circle to be
//consistent with the output from BLENDER software.
double angleToVertex = -2 * Math.PI/numOfVertices;
//Create variable to store the x,y,z coordinates of each vertex.
//Note that there are two circles for the cylinder: one at the base and one
//at the top. Hence the total number of vertices is 2 * numOfVertices
double[][] vertices = new double[2 * numOfVertices][3];
//Loop through each vertex of a circle
for (int vertex = 0; vertex < numOfVertices; vertex++){
//Calculate points around the circle
double yCoord = radius * Math.cos(vertex * angleToVertex);
double ZCoord = radius * Math.sin(vertex * angleToVertex);
//Add points to the vertices array for the Base
vertices[2 * vertex][0] = xCoordBase;
vertices[2 * vertex][1] = yCoord;
vertices[2 * vertex][2] = ZCoord;
//Add points to the vertices array for the Top
vertices[2 * vertex + 1][0] = xCoordTop;
vertices[2 * vertex + 1][1] = yCoord;
vertices[2 * vertex + 1][2] = ZCoord;
}
return vertices;
}
public CrystalCylinder(final Map<Object, Object> properties) {
super(properties);
}
/*
* (non-Javadoc)
*
* @see se.raddo.raddose3D.Crystal#crystalInfo()
*/
@Override
public String crystalInfo() {
String s = String
.format(
"Cylinder (Polyhedron) crystal of "
+ "diameter %.2f mm and length %.2f mm at a "
+ "resolution of %.2f microns per voxel edge.",
crystSizeUM[0],
crystSizeUM[1],
1 / crystalPixPerUM);
if (l == 0 && p == 0) {
return s;
} else {
return s + String.format(
"%nRotated by %.1f deg in the plane of the loop and the loop is "
+ "bent by %.1f relative to the rotation axis at phi = 0.",
Math.toDegrees(p), Math.toDegrees(l));
}
}
}
|
package com.kolinkrewinkel.BitLimitTweaks;
import org.bukkit.plugin.Plugin;
import java.util.*;
import org.bukkit.event.*;
import org.bukkit.*;
import org.bukkit.entity.*;
import org.bukkit.event.entity.CreatureSpawnEvent.*;
import org.bukkit.event.entity.*;
import org.bukkit.configuration.file.FileConfiguration;
public class BitLimitTweaksListener implements Listener {
private final BitLimitTweaks plugin; // Reference main plugin
public BitLimitTweaksListener(BitLimitTweaks plugin) {
// Notify plugin manager that this plugin handles implemented events (block place, etc.)
plugin.getServer().getPluginManager().registerEvents(this, plugin);
this.plugin = plugin;
}
@EventHandler
public void onCreatureSpawnEvent(CreatureSpawnEvent event) {
// CreatureSpawnEvent (Entity spawnee, CreatureType type, Location loc, SpawnReason reason
FileConfiguration config = this.plugin.getConfig();
if (!config.getBoolean("enabled-slimes"))
return;
// Gather information to determine if these are the slimes we are looking for.
EntityType entityType = event.getEntityType();
SpawnReason reason = event.getSpawnReason();
if (entityType == EntityType.SLIME && (reason == SpawnReason.NATURAL || reason == SpawnReason.SLIME_SPLIT)) {
// Pseudo-randomly cancel slime spawns to reduce their numbers.
boolean shouldCancel = getRandomBoolean();
event.setCancelled(shouldCancel);
}
}
public boolean getRandomBoolean() {
Random random = new Random();
return random.nextBoolean();
}
}
|
package com.novoda.notils.logger.toast;
import android.content.Context;
import android.widget.Toast;
public class NonStackingToastDisplayer implements ToastDisplayer {
private final Context context;
private Toast toast;
/**
* @param context Application context should be passed
*/
private NonStackingToastDisplayer(Context context) {
this.context = context;
}
public static NonStackingToastDisplayer newInstance(Context context) {
return new NonStackingToastDisplayer(context.getApplicationContext());
}
/**
* {@inheritDoc}
* Cancels all previous Toasts before displaying this one.
*
* @param message
*/
@Override
public void display(String message) {
display(message, Toast.LENGTH_SHORT);
}
/**
* {@inheritDoc}
* Cancels all previous Toasts before displaying this one.
*
* @param stringResourceId
*/
@Override
public void display(int stringResourceId) {
display(stringResourceId, Toast.LENGTH_SHORT);
}
/**
* {@inheritDoc}
* Cancels all pending Toasts before displaying this one.
*
* @param message
*/
@Override
public void displayLong(String message) {
display(message, Toast.LENGTH_LONG);
}
/**
* {@inheritDoc}
* Cancels all pending Toasts before displaying this one.
*
* @param stringResourceId
*/
@Override
public void displayLong(int stringResourceId) {
display(stringResourceId, Toast.LENGTH_SHORT);
}
private void display(String message, int lengthMillis) {
cancelAll();
toast = Toast.makeText(context, message, lengthMillis);
toast.show();
}
private void display(int stringResourceId, int lengthMillis) {
cancelAll();
toast = Toast.makeText(context, stringResourceId, lengthMillis);
toast.show();
}
@Override
public void cancelAll() {
toast.cancel();
}
}
|
/**
* Illustrates using counters and broadcast variables for chapter 6
*/
package com.oreilly.learningsparkexamples.java;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.*;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.Accumulator;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
public class ChapterSixExample {
public static void main(String[] args) throws Exception {
if (args.length != 3) {
throw new Exception("Usage AccumulatorExample sparkMaster inputFile outDirectory");
}
String sparkMaster = args[0];
String inputFile = args[1];
String outputDir = args[2];
JavaSparkContext sc = new JavaSparkContext(
sparkMaster, "ChapterSixExample", System.getenv("SPARK_HOME"), System.getenv("JARS"));
// Create Accumulators initialized at 0
final Accumulator<Integer> blankLines = sc.accumulator(0);
JavaRDD<String> rdd = sc.textFile(inputFile);
JavaRDD<String> callSigns = rdd.flatMap(
new FlatMapFunction<String, String>() { public Iterable<String> call(String line) {
if (line.equals("")) {
blankLines.add(1);
}
return Arrays.asList(line.split(" "));
}});
callSigns.saveAsTextFile(outputDir + "/callsigns");
System.out.println("Blank lines: "+ blankLines.value());
// Start validating the call signs
final Accumulator<Integer> validSignCount = sc.accumulator(0);
final Accumulator<Integer> invalidSignCount = sc.accumulator(0);
JavaRDD<String> validCallSigns = callSigns.filter(
new Function<String, Boolean>(){ public Boolean call(String callSign) {
Pattern p = Pattern.compile("\\A\\d\\p{Alpha}{1,2}\\d{1,4}\\{Alpha}{1,3}\\Z");
Matcher m = p.matcher(callSign);
boolean b = m.matches();
if (b) {
validSignCount.add(1);
} else {
invalidSignCount.add(1);
}
return b;
}
});
// Force evaluation so the counters are populated
validCallSigns.count();
if (invalidSignCount.value() < 0.1 * validSignCount.value()) {
validCallSigns.saveAsTextFile(outputDir + "/validatedSigns");
} else {
System.out.println("Too many errors " + invalidSignCount.value() + " for " + validSignCount.value());
System.exit(1);
}
}
}
|
package eu.albertomorales.commander.model.impl;
import eu.albertomorales.commander.model.Server;
enum Operation {
START, STOP
}
public class StartStopRunnable implements Runnable {
@Override
public void run() {
started = true;
if (operation.equals(Operation.START)) {
result = server.start();
} else if (operation.equals(Operation.STOP)) {
result = server.stop();
}
finished = true;
}
public StartStopRunnable(Server server, Operation operation) {
super();
this.server = server;
this.operation = operation;
}
public String getDescription() {
return server.getDescription();
}
public String getResult() {
return result;
}
public boolean isStarted() {
return started;
}
public boolean isFinished() {
return finished;
}
private final Server server;
private final Operation operation;
private String result = "Nothing done!!!";
private boolean started = false;
private boolean finished = false;
}
|
package experimentalPhysics.guis;
import java.awt.Color;
import org.lwjgl.opengl.GL11;
import experimentalPhysics.ExperimentalPhysics;
import experimentalPhysics.containers.ContainerAdvancedRefinerInsertionLock;
import experimentalPhysics.tileEntitys.TileEntityStoring;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.StatCollector;
public class GuiAdvancedRefinerInsertionLock extends GuiContainer
{
public GuiAdvancedRefinerInsertionLock(EntityPlayer player, TileEntityStoring tileInsertionLock)
{
super(new ContainerAdvancedRefinerInsertionLock(player, tileInsertionLock));
}
@Override
protected void drawGuiContainerForegroundLayer(int par1, int par2)
{
this.drawString(Minecraft.getMinecraft().fontRenderer, StatCollector.translateToLocal("container.inventory"), 7, 73, Color.WHITE.getRGB());
this.drawString(Minecraft.getMinecraft().fontRenderer, StatCollector.translateToLocal("container.advancedRefinerInsertionLock"), 7, 7, Color.WHITE.getRGB());
}
@Override
protected void drawGuiContainerBackgroundLayer(float p_146976_1_, int p_146976_2_, int p_146976_3_)
{
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
mc.renderEngine.bindTexture(new ResourceLocation(ExperimentalPhysics.MODID + ":textures/guis/GuiAdvancedRefinerInsertionLock.png"));
int j = (width - xSize) / 2;
int k = (height - ySize) / 2;
drawTexturedModalRect(j, k, 0, 0, xSize, ySize);
}
}
|
package fr.lille1.maven_data_extraction.visualization;
import java.io.File;
import javax.swing.JFrame;
import org.apache.log4j.Logger;
import fr.lille1.maven_data_extraction.core.extraction.MavenDataExtraction;
import fr.lille1.maven_data_extraction.core.extraction.MavenDataExtractionImpl;
import fr.lille1.maven_data_extraction.core.graph.MavenLabeledEdge;
import fr.lille1.maven_data_extraction.core.graph.MavenMultigraph;
import fr.lille1.maven_data_extraction.core.graph.MavenMultigraphFactory;
import fr.lille1.maven_data_extraction.core.graph.MavenMultigraphLabeled;
/**
* @author Alexandre Bonhomme
*
*/
public class MainApplet {
private final static Logger log = Logger.getLogger(MainApplet.class);
private final static File root = new File("src/test/resources/asia");
private final static File root_big = new File("../download/org/apache");
/**
* @param args
*/
public static void main(String[] args) {
/*
* Graph creation
*/
MavenMultigraphFactory factory = new MavenMultigraphFactory(
MavenMultigraphLabeled.class);
MavenDataExtraction extractor = new MavenDataExtractionImpl(root_big);
log.info("Starting graph creation...");
MavenMultigraph<MavenLabeledEdge> graph = (MavenMultigraph<MavenLabeledEdge>) factory
.build(extractor);
/*
* Applet creation
*/
log.info("Starting graph visualization...");
MavenGraphApplet applet = new MavenGraphApplet(graph);
applet.init();
JFrame frame = new JFrame();
frame.getContentPane().add(applet);
frame.setTitle("JGraphT Adapter to JGraph Demo");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.pack();
frame.setVisible(true);
}
}
|
package io.github.mzmine.gui.chartbasics.gui.javafx;
import io.github.mzmine.gui.chartbasics.gestures.ChartGestureHandler;
import io.github.mzmine.gui.chartbasics.gestures.interf.GestureHandlerFactory;
import io.github.mzmine.gui.chartbasics.graphicsexport.GraphicsExportModule;
import io.github.mzmine.gui.chartbasics.graphicsexport.GraphicsExportParameters;
import io.github.mzmine.gui.chartbasics.gui.javafx.menu.MenuExportToClipboard;
import io.github.mzmine.gui.chartbasics.gui.javafx.menu.MenuExportToExcel;
import io.github.mzmine.gui.chartbasics.gui.swing.ChartGestureMouseAdapter;
import io.github.mzmine.gui.chartbasics.gui.wrapper.ChartViewWrapper;
import io.github.mzmine.gui.chartbasics.listener.AxesRangeChangedListener;
import io.github.mzmine.gui.chartbasics.listener.AxisRangeChangedListener;
import io.github.mzmine.gui.chartbasics.listener.ZoomHistory;
import io.github.mzmine.main.MZmineCore;
import io.github.mzmine.util.SaveImage;
import io.github.mzmine.util.SaveImage.FileType;
import io.github.mzmine.util.io.XSSFExcelWriterReader;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.print.PrinterJob;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.Clipboard;
import javafx.scene.input.ClipboardContent;
import javafx.stage.FileChooser;
import javafx.stage.FileChooser.ExtensionFilter;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.axis.ValueAxis;
import org.jfree.chart.fx.ChartViewer;
import org.jfree.chart.fx.interaction.MouseHandlerFX;
import org.jfree.chart.plot.CombinedDomainXYPlot;
import org.jfree.chart.plot.CombinedRangeXYPlot;
import org.jfree.chart.plot.Plot;
import org.jfree.chart.plot.XYPlot;
import org.jfree.data.Range;
import org.jfree.data.RangeType;
import org.jfree.data.general.DatasetChangeEvent;
import org.jfree.data.general.DatasetChangeListener;
import org.jfree.data.xy.XYDataset;
import org.jfree.data.xy.XYZDataset;
/**
* This is an extended version of the ChartViewer (JFreeChartFX). it Adds: ChartGestures (with a set
* of standard chart gestures), ZoomHistory, AxesRangeChangeListener, data export, graphics export,
*
* @author Robin Schmid (robinschmid@uni-muenster.de)
*/
public class EChartViewer extends ChartViewer implements DatasetChangeListener {
private Logger logger = Logger.getLogger(this.getClass().getName());
// one history for each plot/subplot
protected ZoomHistory zoomHistory;
protected List<AxesRangeChangedListener> axesRangeListener;
protected boolean isMouseZoomable = true;
protected boolean stickyZeroForRangeAxis = false;
protected boolean standardGestures = true;
// only for XYData (not for categoryPlots)
protected boolean addZoomHistory = true;
private ChartGestureMouseAdapterFX mouseAdapter;
private Menu exportMenu;
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false <br>
* Graphics and data export menu are added
*
*/
public EChartViewer() {
this(null, true, true, true, true, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false <br>
* Graphics and data export menu are added
*
* @param chart
*/
public EChartViewer(JFreeChart chart) {
this(chart, true, true, true, true, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export<br>
* stickyZeroForRangeAxis = false
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param dataExportMenu adds data export menu
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures) {
this(chart, graphicsExportMenu, dataExportMenu, standardGestures, false);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param dataExportMenu adds data export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param stickyZeroForRangeAxis
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures, boolean stickyZeroForRangeAxis) {
this(chart, graphicsExportMenu, dataExportMenu, standardGestures, true, stickyZeroForRangeAxis);
}
/**
* Enhanced ChartPanel with extra scrolling methods, zoom history, graphics and data export
*
* @param chart
* @param graphicsExportMenu adds graphics export menu
* @param dataExportMenu adds data export menu
* @param standardGestures adds the standard ChartGestureHandlers
* @param stickyZeroForRangeAxis
*/
public EChartViewer(JFreeChart chart, boolean graphicsExportMenu, boolean dataExportMenu,
boolean standardGestures, boolean addZoomHistory, boolean stickyZeroForRangeAxis) {
super(null);
this.stickyZeroForRangeAxis = stickyZeroForRangeAxis;
this.standardGestures = standardGestures;
this.addZoomHistory = addZoomHistory;
// Add chart and configure
if (chart != null)
setChart(chart);
exportMenu = (Menu) getContextMenu().getItems().get(0);
// Add Export to Excel and graphics export menu
if (graphicsExportMenu || dataExportMenu) {
addExportMenu(graphicsExportMenu, dataExportMenu);
}
addMenuItem(getContextMenu(), "Reset Zoom", event -> {
ValueAxis xAxis = getChart().getXYPlot().getDomainAxis();
ValueAxis yAxis = getChart().getXYPlot().getDomainAxis();
xAxis.setAutoRange(true);
yAxis.setAutoRange(true);
});
// TODO:
/*
addMenuItem(getContextMenu(), "Set Range on Axis", event -> {
AxesSetupDialog dialog =
new AxesSetupDialog((Stage) this.getScene().getWindow(), chart.getXYPlot());
dialog.show();
});
*/
addMenuItem(exportMenu, "EPS..", event -> handleSave("EMF Image", "EMF", ".emf", FileType.EMF));
addMenuItem(exportMenu, "EMF..", event -> handleSave("EPS Image", "EPS", ".eps", FileType.EPS));
addMenuItem(getContextMenu(), "Copy chart to clipboard", event -> {
BufferedImage bufferedImage =
getChart().createBufferedImage((int) this.getWidth(), (int) this.getHeight());
Image image = SwingFXUtils.toFXImage(bufferedImage, null);
ClipboardContent content = new ClipboardContent();
content.putImage(image);
Clipboard.getSystemClipboard().setContent(content);
});
addMenuItem(getContextMenu(), "Print", event -> {
BufferedImage bufferedImage =
getChart().createBufferedImage((int) this.getWidth(), (int) this.getHeight());
Image image = SwingFXUtils.toFXImage(bufferedImage, null);
ImageView imageView = new ImageView(image);
PrinterJob job = PrinterJob.createPrinterJob();
if (job != null) {
boolean doPrint = job.showPrintDialog(this.getScene().getWindow());
if (doPrint) {
job.printPage(imageView);
job.endJob();
}
} else {
MZmineCore.getDesktop().displayErrorMessage("No Printing Service Found");
}
});
}
private void handleSave(String description, String extensions, String extension,
FileType filetype) {
FileChooser chooser = new FileChooser();
chooser.getExtensionFilters().add(new ExtensionFilter(description, extensions));
File file = chooser.showSaveDialog(null);
if (file != null) {
String filepath = file.getPath();
if (!filepath.toLowerCase().endsWith(extension)) {
filepath += extension;
}
int width = (int) this.getWidth();
int height = (int) this.getHeight();
// Save image
SaveImage SI = new SaveImage(getChart(), filepath, width, height, filetype);
new Thread(SI).start();
}
}
protected void addMenuItem(Menu parent, String title, EventHandler<ActionEvent> al) {
MenuItem pngItem = new MenuItem(title);
pngItem.setOnAction(al);
parent.getItems().add(pngItem);
}
protected void addMenuItem(ContextMenu parent, String title, EventHandler<ActionEvent> al) {
MenuItem pngItem = new MenuItem(title);
pngItem.setOnAction(al);
parent.getItems().add(pngItem);
}
@Override
public void setChart(JFreeChart chart) {
super.setChart(chart);
// If no chart, end here
if (chart == null)
return;
final EChartViewer chartPanel = this;
// apply the theme here, let's see how that works
MZmineCore.getConfiguration().getDefaultChartTheme().apply(chart);
// remove old init
if (mouseAdapter != null) {
this.getCanvas().removeMouseHandler(mouseAdapter);
}
if (chartPanel.getChart().getPlot() instanceof XYPlot) {
// set sticky zero
if (stickyZeroForRangeAxis) {
ValueAxis rangeAxis = chartPanel.getChart().getXYPlot().getRangeAxis();
if (rangeAxis instanceof NumberAxis) {
NumberAxis axis = (NumberAxis) rangeAxis;
axis.setAutoRangeIncludesZero(true);
axis.setAutoRange(true);
axis.setAutoRangeStickyZero(true);
axis.setRangeType(RangeType.POSITIVE);
}
}
Plot p = getChart().getPlot();
if (addZoomHistory && p instanceof XYPlot
&& !(p instanceof CombinedDomainXYPlot || p instanceof CombinedRangeXYPlot)) {
// zoom history
zoomHistory = new ZoomHistory(this, 20);
// axis range changed listener for zooming and more
ValueAxis rangeAxis = this.getChart().getXYPlot().getRangeAxis();
ValueAxis domainAxis = this.getChart().getXYPlot().getDomainAxis();
if (rangeAxis != null) {
rangeAxis.addChangeListener(new AxisRangeChangedListener(new ChartViewWrapper(this)) {
@Override
public void axisRangeChanged(ChartViewWrapper chart, ValueAxis axis, Range lastR,
Range newR) {
// notify listeners of changed range
if (axesRangeListener != null) {
for (AxesRangeChangedListener l : axesRangeListener) {
l.axesRangeChanged(chart, axis, lastR, newR);
}
}
}
});
}
if (domainAxis != null) {
domainAxis.addChangeListener(new AxisRangeChangedListener(new ChartViewWrapper(this)) {
@Override
public void axisRangeChanged(ChartViewWrapper chart, ValueAxis axis, Range lastR,
Range newR) {
// notify listeners of changed range
if (axesRangeListener != null) {
for (AxesRangeChangedListener l : axesRangeListener) {
l.axesRangeChanged(chart, axis, lastR, newR);
}
}
}
});
}
}
// mouse adapter for scrolling and zooming
mouseAdapter = new ChartGestureMouseAdapterFX("gestures", this);
addMouseHandler(mouseAdapter);
// add gestures
if (standardGestures) {
addStandardGestures();
}
// mouseAdapter.addDebugHandler();
}
}
public void addMouseHandler(MouseHandlerFX handler) {
this.getCanvas().addAuxiliaryMouseHandler(handler);
}
/**
* Adds all standard gestures defined in {@link ChartGestureHandler#getStandardGestures()}
*/
public void addStandardGestures() {
// add ChartGestureHandlers
ChartGestureMouseAdapterFX m = getGestureAdapter();
if (m != null) {
m.clearHandlers();
for (GestureHandlerFactory f : ChartGestureHandler.getStandardGestures()) {
m.addGestureHandler(f.createHandler());
}
}
}
/**
* Adds the GraphicsExportDialog menu and the data export menu
*/
protected void addExportMenu(boolean graphics, boolean data) {
if (graphics) {
// Graphics Export
addMenuItem(getContextMenu(), "Export graphics...", e -> {
GraphicsExportParameters parameters = (GraphicsExportParameters) MZmineCore
.getConfiguration().getModuleParameters(GraphicsExportModule.class);
MZmineCore.getModuleInstance(GraphicsExportModule.class).openDialog(getChart(), parameters);
});
}
if (data) {
// General data export
Menu export = new Menu("Export data ...");
// Excel XY
MenuExportToExcel exportXY =
new MenuExportToExcel(new XSSFExcelWriterReader(), "to Excel", this);
export.getItems().add(exportXY);
// clip board
MenuExportToClipboard exportXYClipboard = new MenuExportToClipboard("to Clipboard", this);
export.getItems().add(exportXYClipboard);
// add to panel
getContextMenu().getItems().add(export);
}
}
/**
* Default tries to extract all series from an XYDataset or XYZDataset<br>
* series 1 | Series 2 <br>
* x y x y x y z x y z
*
* @return Data array[columns][rows]
*/
public Object[][] getDataArrayForExport() {
if (getChart().getPlot() instanceof XYPlot && getChart().getXYPlot() != null
/*&& getChart().getXYPlot().getDataset() != null*/) { // getDataset() may be null if the
// first dataset was removed, but the plot may still hold other datasets
try {
List<Object[]> modelList = new ArrayList<>();
for (int d = 0; d < getChart().getXYPlot().getDatasetCount(); d++) {
XYDataset data = getChart().getXYPlot().getDataset(d);
if (data instanceof XYZDataset) {
XYZDataset xyz = (XYZDataset) data;
int series = data.getSeriesCount();
Object[][] model = new Object[series * 3][];
for (int s = 0; s < series; s++) {
int size = 2 + xyz.getItemCount(s);
Object[] x = new Object[size];
Object[] y = new Object[size];
Object[] z = new Object[size];
// create new Array model[row][col]
// Write header
Comparable title = data.getSeriesKey(series);
x[0] = title;
y[0] = "";
z[0] = "";
x[1] = getChart().getXYPlot().getDomainAxis().getLabel();
y[1] = getChart().getXYPlot().getRangeAxis().getLabel();
z[1] = "z-axis";
// write data
for (int i = 0; i < xyz.getItemCount(s); i++) {
x[i + 2] = xyz.getX(s, i);
y[i + 2] = xyz.getY(s, i);
z[i + 2] = xyz.getZ(s, i);
}
model[s * 3] = x;
model[s * 3 + 1] = y;
model[s * 3 + 2] = z;
}
for (Object[] o : model) {
modelList.add(o);
}
} else if (data != null) {
int series = data.getSeriesCount();
Object[][] model = new Object[series * 2][];
for (int s = 0; s < series; s++) {
int size = 2 + data.getItemCount(s);
Object[] x = new Object[size];
Object[] y = new Object[size];
// create new Array model[row][col]
// Write header
Comparable title = data.getSeriesKey(s);
x[0] = title;
y[0] = "";
x[1] = getChart().getXYPlot().getDomainAxis().getLabel();
y[1] = getChart().getXYPlot().getRangeAxis().getLabel();
// write data
for (int i = 0; i < data.getItemCount(s); i++) {
x[i + 2] = data.getX(s, i);
y[i + 2] = data.getY(s, i);
}
model[s * 2] = x;
model[s * 2 + 1] = y;
}
for (Object[] o : model) {
modelList.add(o);
}
}
}
return modelList.toArray(new Object[modelList.size()][]);
} catch (Exception ex) {
logger.log(Level.WARNING, "Cannot retrieve data for export", ex);
return null;
}
}
return null;
}
public void addAxesRangeChangedListener(AxesRangeChangedListener l) {
if (axesRangeListener == null) {
axesRangeListener = new ArrayList<AxesRangeChangedListener>(1);
}
axesRangeListener.add(l);
}
public void removeAxesRangeChangedListener(AxesRangeChangedListener l) {
if (axesRangeListener != null) {
axesRangeListener.remove(l);
}
}
public void clearAxesRangeChangedListeners() {
if (axesRangeListener != null) {
axesRangeListener.clear();
}
}
public void setMouseZoomable(boolean flag) {
setDomainZoomable(flag);
setRangeZoomable(flag);
isMouseZoomable = flag;
// TODO find better solution
// clear handler to stop zoom rectangle (hacky solution)
getCanvas().clearLiveHandler();
}
public void setRangeZoomable(boolean flag) {
getCanvas().setRangeZoomable(flag);
}
public void setDomainZoomable(boolean flag) {
getCanvas().setDomainZoomable(flag);
}
public boolean isMouseZoomable() {
return isMouseZoomable;
}
public boolean isDomainZoomable() {
return getCanvas().isDomainZoomable();
}
public boolean isRangeZoomable() {
return getCanvas().isRangeZoomable();
}
public ZoomHistory getZoomHistory() {
return zoomHistory;
}
public void setZoomHistory(ZoomHistory h) {
zoomHistory = h;
}
/**
* Returns the {@link ChartGestureMouseAdapter} alternatively for other ChartPanel classes use:
*
* <pre>
* this.getCanvas().addAuxiliaryMouseHandler(handler);
* </pre>
*
* @return
*/
public ChartGestureMouseAdapterFX getGestureAdapter() {
return mouseAdapter;
}
public void setGestureAdapter(ChartGestureMouseAdapterFX mouseAdapter) {
this.mouseAdapter = mouseAdapter;
}
@Override
public void datasetChanged(DatasetChangeEvent event) {
// may be overridden by extending classes
}
}
|
package io.github.mzmine.modules.io.import_mzxml;
import com.google.common.base.Strings;
import io.github.mzmine.datamodel.MZmineProject;
import io.github.mzmine.datamodel.MassSpectrumType;
import io.github.mzmine.datamodel.PolarityType;
import io.github.mzmine.datamodel.RawDataFile;
import io.github.mzmine.datamodel.impl.SimpleScan;
import io.github.mzmine.taskcontrol.AbstractTask;
import io.github.mzmine.taskcontrol.TaskStatus;
import io.github.mzmine.util.CompressionUtils;
import io.github.mzmine.util.ExceptionUtils;
import io.github.mzmine.util.scans.ScanUtils;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.util.Base64;
import java.util.Date;
import java.util.LinkedList;
import java.util.logging.Logger;
import java.util.zip.DataFormatException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.Duration;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class MzXMLImportTask extends AbstractTask {
private Logger logger = Logger.getLogger(this.getClass().getName());
private File file;
private MZmineProject project;
private RawDataFile newMZmineFile;
private int totalScans = 0, parsedScans;
private int peaksCount = 0;
private StringBuilder charBuffer;
private boolean compressFlag = false;
private DefaultHandler handler = new MzXMLHandler();
private String precision;
// Retention time parser
private DatatypeFactory dataTypeFactory;
/*
* This variables are used to set the number of fragments that one single scan can have. The
* initial size of array is set to 10, but it depends of fragmentation level.
*/
private int parentTreeValue[] = new int[10];
private int msLevelTree = 0;
/*
* This stack stores the current scan and all his fragments until all the information is recover.
* The logic is FIFO at the moment of write into the RawDataFile
*/
private LinkedList<SimpleScan> parentStack;
/*
* This variable hold the present scan or fragment, it is send to the stack when another
* scan/fragment appears as a parser.startElement
*/
private SimpleScan buildingScan;
public MzXMLImportTask(MZmineProject project, File fileToOpen, RawDataFile newMZmineFile) {
// 256 kilo-chars buffer
charBuffer = new StringBuilder(1 << 18);
parentStack = new LinkedList<SimpleScan>();
this.project = project;
this.file = fileToOpen;
this.newMZmineFile = newMZmineFile;
}
/**
* @see io.github.mzmine.taskcontrol.Task#getFinishedPercentage()
*/
@Override
public double getFinishedPercentage() {
return totalScans == 0 ? 0 : (double) parsedScans / totalScans;
}
/**
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
setStatus(TaskStatus.PROCESSING);
logger.info("Started parsing file " + file);
// Use the default (non-validating) parser
SAXParserFactory factory = SAXParserFactory.newInstance();
try {
dataTypeFactory = DatatypeFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
saxParser.parse(file, handler);
project.addFile(newMZmineFile);
} catch (Throwable e) {
e.printStackTrace();
/* we may already have set the status to CANCELED */
if (getStatus() == TaskStatus.PROCESSING) {
setStatus(TaskStatus.ERROR);
setErrorMessage(ExceptionUtils.exceptionToString(e));
}
return;
}
if (isCanceled()) {
return;
}
if (parsedScans == 0) {
setStatus(TaskStatus.ERROR);
setErrorMessage("No scans found");
return;
}
logger.info("Finished parsing " + file + ", parsed " + parsedScans + " scans");
setStatus(TaskStatus.FINISHED);
}
@Override
public String getTaskDescription() {
return "Opening file " + file;
}
private class MzXMLHandler extends DefaultHandler {
@Override
public void startElement(String namespaceURI, String lName, // local
// name
String qName, // qualified name
Attributes attrs) throws SAXException {
if (isCanceled()) {
throw new SAXException("Parsing Cancelled");
}
// <msRun>
if (qName.equals("msRun")) {
String s = attrs.getValue("scanCount");
if (s != null) {
totalScans = Integer.parseInt(s);
}
}
// <scan>
if (qName.equalsIgnoreCase("scan")) {
if (buildingScan != null) {
parentStack.addFirst(buildingScan);
buildingScan = null;
}
/*
* Only num, msLevel & peaksCount values are required according with mzxml standard, the
* others are optional
*/
int scanNumber = Integer.parseInt(attrs.getValue("num"));
// mzXML files with empty msLevel attribute do exist, so we use
// 1 as default
int msLevel = 1;
if (!Strings.isNullOrEmpty(attrs.getValue("msLevel"))) {
msLevel = Integer.parseInt(attrs.getValue("msLevel"));
}
String scanType = attrs.getValue("scanType");
String filterLine = attrs.getValue("filterLine");
String scanId = filterLine;
if (Strings.isNullOrEmpty(scanId)) {
scanId = scanType;
}
PolarityType polarity;
String polarityAttr = attrs.getValue("polarity");
if ((polarityAttr != null) && (polarityAttr.length() == 1)) {
polarity = PolarityType.fromSingleChar(polarityAttr);
} else {
polarity = PolarityType.UNKNOWN;
}
peaksCount = Integer.parseInt(attrs.getValue("peaksCount"));
// Parse retention time
float retentionTime = 0;
String retentionTimeStr = attrs.getValue("retentionTime");
if (retentionTimeStr != null) {
Date currentDate = new Date();
Duration dur = dataTypeFactory.newDuration(retentionTimeStr);
retentionTime = (float) (dur.getTimeInMillis(currentDate) / 1000d / 60d);
} else {
setStatus(TaskStatus.ERROR);
setErrorMessage("This file does not contain retentionTime for scans");
throw new SAXException("Could not read retention time");
}
int parentScan = -1;
if (msLevel > 9) {
setStatus(TaskStatus.ERROR);
setErrorMessage("msLevel value bigger than 10");
throw new SAXException("The value of msLevel is bigger than 10");
}
/*
* if (msLevel > 1) { parentScan = parentTreeValue[msLevel - 1]; for (SimpleScan p :
* parentStack) { if (p.getScanNumber() == parentScan) { p.addFragmentScan(scanNumber); } }
* }
*/
// Setting the level of fragment of scan and parent scan number
msLevelTree++;
parentTreeValue[msLevel] = scanNumber;
buildingScan = new SimpleScan(newMZmineFile, scanNumber, msLevel, retentionTime, 0, 0,
new double[0], new double[0], null, polarity, scanId, null);
}
// <peaks>
if (qName.equalsIgnoreCase("peaks")) {
// clean the current char buffer for the new element
charBuffer.setLength(0);
compressFlag = false;
String compressionType = attrs.getValue("compressionType");
if ((compressionType == null) || (compressionType.equals("none"))) {
compressFlag = false;
} else {
compressFlag = true;
}
precision = attrs.getValue("precision");
}
// <precursorMz>
if (qName.equalsIgnoreCase("precursorMz")) {
// clean the current char buffer for the new element
charBuffer.setLength(0);
String precursorCharge = attrs.getValue("precursorCharge");
if (precursorCharge != null) {
buildingScan.setPrecursorCharge(Integer.parseInt(precursorCharge));
}
}
}
/**
* endElement()
*/
@Override
public void endElement(String namespaceURI, String sName, // simple name
String qName // qualified name
) throws SAXException {
// </scan>
if (qName.equalsIgnoreCase("scan")) {
msLevelTree
/*
* At this point we verify if the scan and his fragments are closed, so we include the
* present scan/fragment into the stack and start to take elements from them (FIFO) for the
* RawDataFile.
*/
if (msLevelTree == 0) {
parentStack.addFirst(buildingScan);
buildingScan = null;
while (!parentStack.isEmpty()) {
SimpleScan currentScan = parentStack.removeLast();
try {
newMZmineFile.addScan(currentScan);
} catch (IOException e) {
e.printStackTrace();
setStatus(TaskStatus.ERROR);
setErrorMessage("IO error: " + e);
throw new SAXException("Parsing error: " + e);
}
parsedScans++;
}
/*
* The scan with all his fragments is in the RawDataFile, now we clean the stack for the
* next scan and fragments.
*/
parentStack.clear();
}
return;
}
// <precursorMz>
if (qName.equalsIgnoreCase("precursorMz")) {
final String textContent = charBuffer.toString();
double precursorMz = 0d;
if (!textContent.isEmpty()) {
precursorMz = Double.parseDouble(textContent);
}
buildingScan.setPrecursorMZ(precursorMz);
return;
}
// <peaks>
if (qName.equalsIgnoreCase("peaks")) {
byte[] peakBytes = Base64.getDecoder().decode(charBuffer.toString());
if (compressFlag) {
try {
peakBytes = CompressionUtils.decompress(peakBytes);
} catch (DataFormatException e) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Corrupt compressed peak: " + e.toString());
throw new SAXException("Parsing Cancelled");
}
}
// make a data input stream
DataInputStream peakStream = new DataInputStream(new ByteArrayInputStream(peakBytes));
double mzValues[] = new double[peaksCount];
double intensityValues[] = new double[peaksCount];
try {
for (int i = 0; i < peaksCount; i++) {
// Always respect this order pairOrder="m/z-int"
double mz;
double intensity;
if ("64".equals(precision)) {
mz = peakStream.readDouble();
intensity = peakStream.readDouble();
} else {
mz = peakStream.readFloat();
intensity = peakStream.readFloat();
}
// Copy m/z and intensity data
mzValues[i] = mz;
intensityValues[i] = intensity;
}
} catch (IOException eof) {
setStatus(TaskStatus.ERROR);
setErrorMessage("Corrupt mzXML file");
throw new SAXException("Parsing Cancelled");
}
// Auto-detect whether this scan is centroided
MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(mzValues, intensityValues);
// Set the centroided tag
buildingScan.setSpectrumType(spectrumType);
// Set the final data points to the scan
buildingScan.setDataPoints(mzValues, intensityValues);
return;
}
}
/**
* characters()
*
* @see org.xml.sax.ContentHandler#characters(char[], int, int)
*/
@Override
public void characters(char buf[], int offset, int len) throws SAXException {
charBuffer.append(buf, offset, len);
}
}
}
|
package io.github.mzmine.parameters.dialogs;
import com.google.common.base.Strings;
import io.github.mzmine.gui.helpwindow.HelpWindow;
import io.github.mzmine.main.MZmineCore;
import io.github.mzmine.parameters.Parameter;
import io.github.mzmine.parameters.ParameterSet;
import io.github.mzmine.parameters.UserParameter;
import io.github.mzmine.parameters.parametertypes.HiddenParameter;
import io.github.mzmine.util.ExitCode;
import io.github.mzmine.util.javafx.FxIconUtil;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonBar;
import javafx.scene.control.ButtonBar.ButtonData;
import javafx.scene.control.CheckBox;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.TextField;
import javafx.scene.control.Tooltip;
import javafx.scene.image.Image;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Region;
import javafx.scene.web.WebView;
import javafx.stage.Stage;
/**
* This class represents the parameter setup dialog to set the values of SimpleParameterSet. Each
* Parameter is represented by a component. The component can be obtained by calling
* getComponentForParameter(). Type of component depends on parameter type:
* <p>
* TODO: parameter setup dialog should show the name of the module in the title
*/
public class ParameterSetupDialog extends Stage {
public static final Logger logger = Logger.getLogger(ParameterSetupDialog.class.getName());
protected final URL helpURL;
// Parameters and their representation in the dialog
protected final ParameterSet parameterSet;
protected final Map<String, Node> parametersAndComponents = new HashMap<>();
// Buttons
protected final Button btnOK, btnCancel, btnHelp;
// Button panel - added here so it is possible to move buttons as a whole,
// if needed.
protected final ButtonBar pnlButtons;
// Footer message
protected final String footerMessage;
/**
* This single panel contains a grid of all the components of this dialog. Row number 100 contains
* all the buttons of the dialog. Derived classes may add their own components such as previews to
* the unused cells of the grid.
*/
protected final GridPane paramsPane;
protected final BorderPane mainPane;
protected final ScrollPane mainScrollPane;
/*
* Structure: <p></p> //
* - mainPane <p></p> //
* -bottom <p></p> //
* - pnlButtons <p></p> //
* -center <p></p> //
* - mainScrollPane <p></p> //
* - paramsPane <p></p> //
*/
// If true, the dialog won't allow the OK button to proceed, unless all
// parameters pass the value check. This is undesirable in the BatchMode
// setup dialog, where some parameters need to be set in advance according
// to values that are not yet imported etc.
private final boolean valueCheckRequired;
/**
* Help window for this setup dialog. Initially null, until the user clicks the Help button.
*/
protected HelpWindow helpWindow = null;
private ExitCode exitCode = ExitCode.UNKNOWN;
/**
* Constructor
*/
public ParameterSetupDialog(boolean valueCheckRequired, ParameterSet parameters) {
this(valueCheckRequired, parameters, null);
}
/**
* Method to display setup dialog with a html-formatted footer message at the bottom.
*
* @param message: html-formatted text
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public ParameterSetupDialog(boolean valueCheckRequired, ParameterSet parameters, String message) {
Image mzmineIcon = FxIconUtil.loadImageFromResources("MZmineIcon.png");
this.getIcons().add(mzmineIcon);
this.valueCheckRequired = valueCheckRequired;
this.parameterSet = parameters;
this.helpURL = parameters.getClass().getResource("help/help.html");
this.footerMessage = message;
// Main panel which holds all the components in a grid
mainPane = new BorderPane();
Scene scene = new Scene(mainPane);
// Use main CSS
scene.getStylesheets()
.addAll(MZmineCore.getDesktop().getMainWindow().getScene().getStylesheets());
setScene(scene);
paramsPane = new GridPane();
// paramsPane.setStyle("-fx-border-color: blue;");
ColumnConstraints column1 = new ColumnConstraints();
/*
* Adding an empty ColumnConstraints object for column2 has the effect of not setting any
* constraints, leaving the GridPane to compute the column's layout based solely on its
* content's size preferences and constraints.
*/
ColumnConstraints column2 = new ColumnConstraints();
paramsPane.getColumnConstraints().addAll(column1, column2);
mainScrollPane = new ScrollPane(paramsPane);
// mainScrollPane.setStyle("-fx-border-color: red;");
mainScrollPane.setFitToWidth(true);
mainScrollPane.setFitToHeight(true);
mainScrollPane.setPadding(new Insets(10.0));
mainPane.setCenter(mainScrollPane);
int rowCounter = 0;
int vertWeightSum = 0;
// Create labels and components for each parameter
for (Parameter<?> p : parameterSet.getParameters()) {
if (!(p instanceof UserParameter)) {
continue;
}
UserParameter up = (UserParameter) p;
Node comp = up.createEditingComponent();
addToolTipToControls(comp, up.getDescription());
if (comp instanceof Region) {
double minWidth = ((Region) comp).getMinWidth();
// if (minWidth > column2.getMinWidth()) column2.setMinWidth(minWidth);
// paramsPane.setMinWidth(minWidth + 200);
}
GridPane.setMargin(comp, new Insets(5.0, 0.0, 5.0, 0.0));
// Set the initial value
Object value = up.getValue();
if (value != null) {
up.setValueToComponent(comp, value);
}
// Add listeners so we are notified about any change in the values
addListenersToNode(comp);
// By calling this we make sure the components will never be resized
// smaller than their optimal size
// comp.setMinimumSize(comp.getPreferredSize());
// comp.setToolTipText(up.getDescription());
Label label = new Label(p.getName());
label.minWidthProperty().bind(label.widthProperty());
label.setPadding(new Insets(0.0, 10.0, 0.0, 0.0));
label.setStyle("-fx-font-weight: bold");
paramsPane.add(label, 0, rowCounter);
label.setLabelFor(comp);
parametersAndComponents.put(p.getName(), comp);
// TODO: Multiple selection will be expandable, other components not
/*
* JComboBox t = new JComboBox(); int comboh = t.getPreferredSize().height; int comph =
* comp.getPreferredSize().height; int verticalWeight = comph > 2 * comboh ? 1 : 0;
* vertWeightSum += verticalWeight;
*/
paramsPane.add(comp, 1, rowCounter, 1, 1);
rowCounter++;
}
btnOK = new Button("OK");
btnOK.setOnAction(e -> {
closeDialog(ExitCode.OK);
});
ButtonBar.setButtonData(btnOK, ButtonData.OK_DONE);
btnCancel = new Button("Cancel");
btnCancel.setOnAction(e -> {
closeDialog(ExitCode.CANCEL);
});
ButtonBar.setButtonData(btnCancel, ButtonData.CANCEL_CLOSE);
// Add buttons to the ButtonBar
pnlButtons = new ButtonBar();
pnlButtons.getButtons().addAll(btnOK, btnCancel);
pnlButtons.setPadding(new Insets(10.0));
if (helpURL != null) {
btnHelp = new Button("Help");
btnHelp.setOnAction(e -> {
if (helpWindow != null) {
helpWindow.show();
helpWindow.toFront();
} else {
helpWindow = new HelpWindow(helpURL.toString());
helpWindow.show();
}
});
ButtonBar.setButtonData(btnHelp, ButtonData.HELP);
pnlButtons.getButtons().add(btnHelp);
} else {
btnHelp = null;
}
mainPane.setBottom(pnlButtons);
if (!Strings.isNullOrEmpty(footerMessage)) {
WebView label = new WebView();
label.getEngine().loadContent(footerMessage);
label.setMaxHeight(100.0);
// label.setWrapText(true);
// notificationPane.setShowFromTop(false);
// notificationPane.getActions().add(new Action("Close", e -> notificationPane.hide()));
mainPane.setTop(label);
/*
* JEditorPane editorPane = GUIUtils.addEditorPane(footerMessage);
* editorPane.addHyperlinkListener(new HyperlinkListener() {
*
* @Override public void hyperlinkUpdate(HyperlinkEvent e) { if
* (HyperlinkEvent.EventType.ACTIVATED.equals(e.getEventType())) { try {
* Desktop.getDesktop().browse(e.getURL().toURI()); } catch (Exception ex) {
* ex.printStackTrace(); } } } });
*/
// This line is important on Windows, where resizing the dialog has
// unexpected consequences on
// some components
// editorPane.setMinimumSize(editorPane.getPreferredSize());
// mainPanel.add(editorPane, 0, 98, 3, 1);
// mainPanel.addCenter(pnlButtons, 0, 100, 3, 1);
}
// Add some space around the widgets
// GUIUtils.addMargin(mainPanel, 10);
setTitle("Please set the parameters");
// minWidthProperty().bind(scene.widthProperty());
// minHeightProperty().bind(scene.widthProperty().divide(1.5));
setMinWidth(500.0);
setMinHeight(400.0);
centerOnScreen();
}
/**
* Method for reading exit code
*/
public ExitCode getExitCode() {
return exitCode;
}
@SuppressWarnings("unchecked")
public <ComponentType extends Node> ComponentType getComponentForParameter(
UserParameter<?, ComponentType> p) {
return (ComponentType) parametersAndComponents.get(p.getName());
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected void updateParameterSetFromComponents() {
for (Parameter<?> p : parameterSet.getParameters()) {
if (!(p instanceof UserParameter) && !(p instanceof HiddenParameter)) {
continue;
}
UserParameter up;
if (p instanceof UserParameter) {
up = (UserParameter) p;
} else {
up = (UserParameter) ((HiddenParameter) p).getEmbeddedParameter();
}
Node component = parametersAndComponents.get(p.getName());
// if a parameter is a HiddenParameter it does not necessarily have
// component
if (component != null) {
up.setValueFromComponent(component);
}
}
}
protected int getNumberOfParameters() {
return parameterSet.getParameters().length;
}
/**
* This method may be called by some of the dialog components, for example as a result of
* double-click by user
*/
public void closeDialog(ExitCode exitCode) {
if (exitCode == ExitCode.OK) {
// commit the changes to the parameter set
updateParameterSetFromComponents();
if (valueCheckRequired) {
ArrayList<String> messages = new ArrayList<String>();
boolean allParametersOK = parameterSet.checkParameterValues(messages);
if (!allParametersOK) {
StringBuilder message = new StringBuilder("Please check the parameter settings:\n\n");
for (String m : messages) {
message.append(m);
message.append("\n");
}
MZmineCore.getDesktop().displayMessage(null, message.toString());
return;
}
}
}
this.exitCode = exitCode;
hide();
}
/**
* This method does nothing, but it is called whenever user changes the parameters. It can be
* overridden in extending classes to update the preview components, for example.
*/
protected void parametersChanged() {
}
protected void addListenersToNode(Node node) {
if (node instanceof TextField) {
TextField textField = (TextField) node;
textField.textProperty().addListener(((observable, oldValue, newValue) -> {
parametersChanged();
}));
}
if (node instanceof ComboBox) {
ComboBox<?> comboComp = (ComboBox<?>) node;
comboComp.valueProperty()
.addListener(((observable, oldValue, newValue) -> parametersChanged()));
}
if (node instanceof ChoiceBox) {
ChoiceBox<?> choiceBox = (ChoiceBox) node;
choiceBox.valueProperty()
.addListener(((observable, oldValue, newValue) -> parametersChanged()));
}
if (node instanceof CheckBox) {
CheckBox checkBox = (CheckBox) node;
checkBox.selectedProperty()
.addListener(((observable, oldValue, newValue) -> parametersChanged()));
}
if (node instanceof Region) {
Region panelComp = (Region)
node;
for (int i = 0; i < panelComp.getChildrenUnmodifiable().size(); i++) {
Node child =
panelComp.getChildrenUnmodifiable().get(i);
/*if (!(child instanceof Control)) {
continue;
}*/
addListenersToNode(child);
}
}
}
public boolean isValueCheckRequired() {
return valueCheckRequired;
}
protected void addToolTipToControls(Node node, String toolTipText) {
if (node instanceof TextField) {
TextField textField = (TextField) node;
textField.setTooltip(new Tooltip(toolTipText));
}
if (node instanceof ComboBox) {
ComboBox<?> comboComp = (ComboBox<?>) node;
comboComp.setTooltip(new Tooltip(toolTipText));
}
if (node instanceof ChoiceBox) {
ChoiceBox<?> choiceBox = (ChoiceBox) node;
choiceBox.setTooltip(new Tooltip(toolTipText));
}
if (node instanceof CheckBox) {
CheckBox checkBox = (CheckBox) node;
checkBox.setTooltip(new Tooltip(toolTipText));
}
if (node instanceof Region) {
Region panelComp = (Region)
node;
for (int i = 0; i < panelComp.getChildrenUnmodifiable().size(); i++) {
Node child =
panelComp.getChildrenUnmodifiable().get(i);
addListenersToNode(child);
}
}
}
}
|
package io.github.mzmine.util.spectraldb.parser;
import io.github.mzmine.datamodel.DataPoint;
import io.github.mzmine.taskcontrol.AbstractTask;
import io.github.mzmine.util.spectraldb.entry.SpectralDBEntry;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Logger;
public abstract class SpectralDBParser {
private static final Logger logger = Logger.getLogger(SpectralDBParser.class.getName());
protected final int bufferEntries;
// process entries
protected final LibraryEntryProcessor processor;
private final List<SpectralDBEntry> list;
private int processedEntries = 0;
public SpectralDBParser(int bufferEntries, LibraryEntryProcessor processor) {
list = new ArrayList<>();
this.bufferEntries = bufferEntries;
this.processor = processor;
}
/**
* Parses the file and creates spectral db entries
*
* @param dataBaseFile file to parse
* @return the list or an empty list if something went wrong (e.g., wrong format)
* @throws IOException exception while reading file
*/
public abstract boolean parse(AbstractTask mainTask, File dataBaseFile)
throws UnsupportedFormatException, IOException;
/**
* Add DB entry and push every 1000 entries. Does not allow 0 intensity values.
*
* @param entry handle parsed library entry
*/
protected boolean addLibraryEntry(SpectralDBEntry entry) {
// no 0 values allowed in entry
if (Arrays.stream(entry.getDataPoints()).mapToDouble(DataPoint::getIntensity)
.anyMatch(v -> Double.compare(v, 0) == 0)) {
return false;
}
synchronized (list) {
list.add(entry);
if (list.size() % bufferEntries == 0) {
// start new task for every 1000 entries
// push entries
processor.processNextEntries(list, processedEntries);
processedEntries += list.size();
// new list
list.clear();
}
return true;
}
}
/**
* Finish and push last entries
*/
protected void finish() {
// push entries
synchronized (list) {
if (!list.isEmpty()) {
logger.info("Imported last " + list.size() + " library entries");
processor.processNextEntries(list, processedEntries);
processedEntries += list.size();
list.clear();
}
}
logger.info(processedEntries + " library entries imported");
}
}
|
package som.primitives.actors;
import com.oracle.truffle.api.CompilerDirectives.CompilationFinal;
import com.oracle.truffle.api.dsl.GenerateNodeFactory;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.source.SourceSection;
import som.compiler.MixinBuilder.MixinDefinitionId;
import som.interpreter.actors.SFarReference;
import som.interpreter.actors.SPromise;
import som.interpreter.actors.SPromise.SResolver;
import som.interpreter.nodes.nary.UnaryExpressionNode;
import som.primitives.Primitive;
import som.vmobjects.SClass;
import som.vmobjects.SObject.SImmutableObject;
public final class ActorClasses {
@CompilationFinal public static SImmutableObject ActorModule;
@CompilationFinal public static MixinDefinitionId FarRefId;
@GenerateNodeFactory
@Primitive(primitive = "actorsFarReferenceClass:")
public abstract static class SetFarReferenceClassPrim extends UnaryExpressionNode {
public SetFarReferenceClassPrim(final boolean eagWrap, final SourceSection source) { super(eagWrap, source); }
@Specialization
public final SClass setClass(final SClass value) {
SFarReference.setSOMClass(value);
FarRefId = value.getMixinDefinition().getMixinId();
return value;
}
}
@GenerateNodeFactory
@Primitive(primitive = "actorsPromiseClass:")
public abstract static class SetPromiseClassPrim extends UnaryExpressionNode {
public SetPromiseClassPrim(final boolean eagWrap, final SourceSection source) { super(eagWrap, source); }
@Specialization
public final SClass setClass(final SClass value) {
SPromise.setSOMClass(value);
return value;
}
}
@GenerateNodeFactory
@Primitive(primitive = "actorsPairClass:")
public abstract static class SetPairClassPrim extends UnaryExpressionNode {
public SetPairClassPrim(final boolean eagWrap, final SourceSection source) { super(eagWrap, source); }
@Specialization
public final SClass setClass(final SClass value) {
SPromise.setPairClass(value);
return value;
}
}
@GenerateNodeFactory
@Primitive(primitive = "actorsResolverClass:")
public abstract static class SetResolverClassPrim extends UnaryExpressionNode {
public SetResolverClassPrim(final boolean eagWrap, final SourceSection source) { super(eagWrap, source); }
@Specialization
public final SClass setClass(final SClass value) {
SResolver.setSOMClass(value);
return value;
}
}
@GenerateNodeFactory
@Primitive(primitive = "actorsModule:")
public abstract static class SetModulePrim extends UnaryExpressionNode {
public SetModulePrim(final boolean eagWrap, final SourceSection source) { super(eagWrap, source); }
@Specialization
public final SImmutableObject setClass(final SImmutableObject value) {
ActorModule = value;
return value;
}
}
}
|
package io.jenkins.plugins.kubernetes;
import hudson.Extension;
import hudson.model.Label;
import hudson.model.LoadStatistics;
import hudson.slaves.Cloud;
import hudson.slaves.NodeProvisioner;
import jenkins.model.Jenkins;
import org.csanchez.jenkins.plugins.kubernetes.KubernetesCloud;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Implementation of {@link NodeProvisioner.Strategy} which will provision a new node immediately as
* a task enter the queue.
* In kubernetes, we don't really need to wait before provisioning a new node,
* because kubernetes agents can be started and destroyed quickly
*
* @author <a href="mailto:root@junwuhui.cn">runzexia</a>
*/
@Extension(ordinal = 100)
public class NoDelayProvisionerStrategy extends NodeProvisioner.Strategy {
private static final Logger LOGGER = Logger.getLogger(NoDelayProvisionerStrategy.class.getName());
private static final boolean DISABLE_NODELAY_PROVISING = Boolean.valueOf(
System.getProperty("io.jenkins.plugins.kubernetes.disableNoDelayProvisioning"));
@Override
public NodeProvisioner.StrategyDecision apply(NodeProvisioner.StrategyState strategyState) {
if (DISABLE_NODELAY_PROVISING) {
LOGGER.log(Level.FINE, "Provisioning not complete, NoDelayProvisionerStrategy is disabled");
return NodeProvisioner.StrategyDecision.CONSULT_REMAINING_STRATEGIES;
}
final Label label = strategyState.getLabel();
LoadStatistics.LoadStatisticsSnapshot snapshot = strategyState.getSnapshot();
int availableCapacity =
snapshot.getAvailableExecutors() // live executors
+ snapshot.getConnectingExecutors() // executors present but not yet connected
+ strategyState.getPlannedCapacitySnapshot() // capacity added by previous strategies from previous rounds
+ strategyState.getAdditionalPlannedCapacity(); // capacity added by previous strategies _this round_
int currentDemand = snapshot.getQueueLength();
LOGGER.log(Level.FINE, "Available capacity={0}, currentDemand={1}",
new Object[]{availableCapacity, currentDemand});
if (availableCapacity < currentDemand) {
List<Cloud> jenkinsClouds = new ArrayList<>(Jenkins.get().clouds);
Collections.shuffle(jenkinsClouds);
for (Cloud cloud : jenkinsClouds) {
if (!(cloud instanceof KubernetesCloud)) continue;
if (!cloud.canProvision(label)) continue;
Collection<NodeProvisioner.PlannedNode> plannedNodes = cloud.provision(label, currentDemand - availableCapacity);
LOGGER.log(Level.FINE, "Planned {0} new nodes", plannedNodes.size());
strategyState.recordPendingLaunches(plannedNodes);
availableCapacity += plannedNodes.size();
LOGGER.log(Level.FINE, "After provisioning, available capacity={0}, currentDemand={1}", new Object[]{availableCapacity, currentDemand});
break;
}
}
if (availableCapacity >= currentDemand) {
LOGGER.log(Level.FINE, "Provisioning completed");
return NodeProvisioner.StrategyDecision.PROVISIONING_COMPLETED;
} else {
LOGGER.log(Level.FINE, "Provisioning not complete, consulting remaining strategies");
return NodeProvisioner.StrategyDecision.CONSULT_REMAINING_STRATEGIES;
}
}
}
|
package ml.duncte123.skybot.commands.essentials.eval;
import groovy.lang.GroovyShell;
import ml.duncte123.skybot.commands.essentials.eval.filter.EvalFilter;
import ml.duncte123.skybot.objects.command.Command;
import ml.duncte123.skybot.utils.AirUtils;
import ml.duncte123.skybot.utils.Settings;
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.kohsuke.groovy.sandbox.SandboxTransformer;
import javax.script.Bindings;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.*;
public class EvalCommand extends Command {
private ScriptEngine engine;
private GroovyShell sh;
private List<String> packageImports;
private ScheduledExecutorService service = Executors.newScheduledThreadPool(1, r -> new Thread(r, "Eval-Thread"));
private EvalFilter filter = new EvalFilter();
/**
* This initialises the engine
*/
public EvalCommand() {
//the GroovyShell is for the public eval
sh = new GroovyShell(
new CompilerConfiguration().addCompilationCustomizers(new SandboxTransformer())
);
//ScriptEngine for owner eval
engine = new ScriptEngineManager(sh.getClassLoader()).getEngineByName("groovy");
packageImports = Arrays.asList("java.io",
"java.lang",
"java.util",
"net.dv8tion.jda.core",
"net.dv8tion.jda.core.entities",
"net.dv8tion.jda.core.entities.impl",
"net.dv8tion.jda.core.managers",
"net.dv8tion.jda.core.managers.impl",
"net.dv8tion.jda.core.utils",
"ml.duncte123.skybot.utils");
}
@Override
public void executeCommand(String invoke, String[] args, GuildMessageReceivedEvent event) {
boolean isRanByBotOwner = event.getAuthor().getId().equals(Settings.ownerId);
ScheduledFuture<?> future = null;
try {
StringBuilder importStringBuilder = new StringBuilder();
for (final String s : packageImports) {
importStringBuilder.append("import ").append(s).append(".*;");
}
String script = importStringBuilder.toString() +
event.getMessage().getRawContent().substring(event.getMessage().getRawContent().split(" ")[0].length())
.replaceAll("getToken", "getSelfUser");
int timeout = 5;
if(isRanByBotOwner) {
timeout = 10;
engine.put("commands", AirUtils.commandManager.getCommands());
engine.put("message", event.getMessage());
engine.put("channel", event.getMessage().getTextChannel());
engine.put("guild", event.getGuild());
engine.put("member", event.getMember());
engine.put("jda", event.getJDA());
engine.put("shardmanager", event.getJDA().asBot().getShardManager());
engine.put("event", event);
engine.put("args", args);
future = service.schedule(() -> engine.eval(script), 0, TimeUnit.MILLISECONDS);
} else {
if(filter.filterArrays(script))
throw new IllegalArgumentException("Arrays are not allowed");
if(filter.filterLoops(script))
throw new IllegalArgumentException("Loops are not allowed");
future = service.schedule(() -> {
filter.register();
return sh.evaluate(script);
}, 0, TimeUnit.MILLISECONDS);
}
Object out = future.get(timeout, TimeUnit.SECONDS);
if (out != null && !String.valueOf(out).isEmpty() ) {
sendMsg(event, (!isRanByBotOwner ? "**" + event.getAuthor().getName() + ":** " : "") + out.toString());
} else {
sendSuccess(event.getMessage());
}
}
catch (ExecutionException e1) {
event.getChannel().sendMessage("ERROR: " + e1.getCause().toString()).queue();
//e.printStackTrace();
sendError(event.getMessage());
}
catch (TimeoutException | InterruptedException e2) {
future.cancel(true);
event.getChannel().sendMessage("ERROR: " + e2.toString()).queue();
//e.printStackTrace();
if(!future.isCancelled()) future.cancel(true);
sendError(event.getMessage());
}
catch (IllegalArgumentException e3) {
sendMsg(event, "ERROR: " + e3.toString());
}
//service.shutdown();
System.gc();
}
@Override
public String help() {
return "A simple eval command";
}
@Override
public String getName() {
return "eval";
}
}
|
package net.imagej.ops.image.normalize;
import net.imagej.ops.AbstractComputerOp;
import net.imagej.ops.ComputerOp;
import net.imagej.ops.OpEnvironment;
import net.imglib2.IterableInterval;
import net.imglib2.converter.Converter;
import net.imglib2.type.numeric.RealType;
import net.imglib2.util.Pair;
/**
* Simple {@link ComputerOp} and {@link Converter} to perform a normalization.
*
* @author Christian Dietz (University of Konstanz)
*/
class NormalizeRealTypeComputer<T extends RealType<T>> extends
AbstractComputerOp<T, T> implements Converter<T, T>
{
private double targetMin, targetMax, sourceMin, factor;
public NormalizeRealTypeComputer(final OpEnvironment ops, final T sourceMin,
final T sourceMax, final T targetMin, final T targetMax,
final IterableInterval<T> input)
{
double tmp = 0.0;
if (sourceMin == null || sourceMax == null) {
final Pair<T,T> minMax = ops.stats().minMax(input);
if (sourceMin == null) {
this.sourceMin = minMax.getA().getRealDouble();
}
else {
this.sourceMin = sourceMin.getRealDouble();
}
if (sourceMax == null) {
tmp = minMax.getB().getRealDouble();
}
else {
tmp = sourceMax.getRealDouble();
}
} else {
this.sourceMin = sourceMin.getRealDouble();
tmp = sourceMax.getRealDouble();
}
if (targetMax == null) {
this.targetMax = input.firstElement().getMaxValue();
}
else {
this.targetMax = targetMax.getRealDouble();
}
if (targetMin == null) {
this.targetMin = input.firstElement().getMinValue();
}
else {
this.targetMin = targetMin.getRealDouble();
}
this.factor =
1.0d / (tmp - this.sourceMin) * (this.targetMax - this.targetMin);
}
@Override
public void compute(final T input, final T output) {
final double res = (input.getRealDouble() - sourceMin) * factor + targetMin;
if (res > targetMax) {
output.setReal(targetMax);
}
else if (res < targetMin) {
output.setReal(targetMin);
}
else {
output.setReal(res);
}
}
@Override
public void convert(T input, T output) {
compute(input, output);
}
}
|
package net.malisis.core.renderer.model.loader;
import java.io.InputStreamReader;
import java.io.Reader;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import net.malisis.core.MalisisCore;
import net.malisis.core.renderer.animation.Animation;
import net.malisis.core.renderer.animation.transformation.AlphaTransform;
import net.malisis.core.renderer.animation.transformation.BrightnessTransform;
import net.malisis.core.renderer.animation.transformation.ChainedTransformation;
import net.malisis.core.renderer.animation.transformation.ColorTransform;
import net.malisis.core.renderer.animation.transformation.ParallelTransformation;
import net.malisis.core.renderer.animation.transformation.Rotation;
import net.malisis.core.renderer.animation.transformation.Scale;
import net.malisis.core.renderer.animation.transformation.Transformation;
import net.malisis.core.renderer.animation.transformation.Translation;
import net.malisis.core.renderer.element.Shape;
import net.malisis.core.renderer.model.IAnimationLoader;
import net.malisis.core.util.Silenced;
import net.minecraft.client.Minecraft;
import net.minecraft.client.resources.IResource;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.MathHelper;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.InstanceCreator;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.annotations.SerializedName;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
public class AnimationImporter implements IAnimationLoader
{
/** List of {@link Transform} defined in the JSON. */
private Map<String, Transform> transforms = Maps.newHashMap();
/** List of {@link Anim} defined in the JSON. */
private Multimap<String, Anim> anims = ArrayListMultimap.create();
/** List of {@link Animation} built from the JSON. */
private Multimap<String, Animation<Shape>> animations = ArrayListMultimap.create();
/**
* Instantiates a new {@link AnimationImporter}.
*
* @param resourceLocation the resource location
*/
public AnimationImporter(ResourceLocation resourceLocation)
{
load(resourceLocation);
}
/**
* Build and return the {@link Animation Animations}.
*
* @param shapes the shapes
* @return the animations
*/
@Override
public Multimap<String, Animation<Shape>> getAnimations(Map<String, Shape> shapes)
{
for (Entry<String, Anim> entry : anims.entries())
{
Anim anim = entry.getValue();
Shape s = shapes.get(anim.group);
Transformation<?, Shape> t = getTransform(anim.transform);
if (s != null && t != null)
{
t.reversed(anim.reversed);
Animation<Shape> a = new Animation<>(s, t);
a.setRender(false, anim.persist);
animations.put(entry.getKey(), a);
}
}
return animations;
}
/**
* Loads and reads the JSON.
*
* @param resourceLocation the resource location
*/
public void load(ResourceLocation resourceLocation)
{
IResource res = Silenced.get(() -> Minecraft.getMinecraft().getResourceManager().getResource(resourceLocation));
if (res == null)
return;
GsonBuilder gsonBuilder = new GsonBuilder();
//we don't want GSON to create a new AnimationImporte but use this current one
gsonBuilder.registerTypeAdapter(AnimationImporter.class, (InstanceCreator<AnimationImporter>) type -> this);
//no builtin way to dezerialize multimaps
gsonBuilder.registerTypeAdapter(Multimap.class, (JsonDeserializer<Multimap<String, Anim>>) this::deserializeAnim);
Gson gson = gsonBuilder.create();
try (Reader reader = new InputStreamReader(res.getInputStream(), "UTF-8"))
{
JsonReader jsonReader = new JsonReader(reader);
jsonReader.setLenient(true);
gson.fromJson(jsonReader, AnimationImporter.class);
}
catch (Exception e)
{
MalisisCore.log.error("Failed to read {}", resourceLocation, e);
}
}
/**
* Gets the {@link Transformation} from its name.
*
* @param name the name
* @return the transform
*/
public Transformation<?, Shape> getTransform(String name)
{
Transform t = transforms.get(name);
if (t == null)
return null;
return t.getTransformation(this);
}
/**
* Deserialize "anims" multimap.
*
* @param json the json
* @param typeOfT the type of t
* @param context the context
* @return the multimap
* @throws JsonParseException the json parse exception
*/
public Multimap<String, Anim> deserializeAnim(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException
{
Multimap<String, Anim> anims = ArrayListMultimap.create();
JsonObject obj = json.getAsJsonObject();
TypeToken<ArrayList<Anim>> token = new TypeToken<ArrayList<Anim>>()
{
};
for (Entry<String, JsonElement> entry : obj.entrySet())
anims.putAll(entry.getKey(), context.deserialize(entry.getValue(), token.getType()));
return anims;
}
private static enum TransformType
{
@SerializedName("translation")
TRANSLATION,
@SerializedName("rotation")
ROTATION,
@SerializedName("scale")
SCALE,
@SerializedName("chained")
CHAINED,
@SerializedName("parallel")
PARALLEL,
@SerializedName("color")
COLOR,
@SerializedName("alpha")
ALPHA,
@SerializedName("brightness")
BRIGHTNESS,
}
private static class Transform
{
private TransformType type;
private Object from = null;
private Object to = null;
//rotatation/alpha/brightness
private float fromA = 0;
private float toA = 0;
//rotation
private String axis = "Y";
//rotation/scale
private double[] offset = { 0, 0, 0 };
//translation/scale
private double[] fromXYZ = { 0, 0, 0 };
private double[] toXYZ = { 0, 0, 0 };
//chained/parallel
private String[] transforms = {};
//all
private int ticks = 0;
private int delay = 0;
private int loops = 1;
/**
* Builds a new {@link Transformation} from this {@link Transform}
*
* @param importer the importer
* @return the transformation
*/
private Transformation<?, ?> build(AnimationImporter importer)
{
if (type == null)
return null;
setFromTo();
switch (type)
{
case TRANSLATION:
return buildTranslation();
case ROTATION:
return buildRotation();
case SCALE:
return buildScale();
case CHAINED:
return buildChained(importer);
case PARALLEL:
return buildParallel(importer);
case COLOR:
return buildColor();
case ALPHA:
return buildAlpha();
case BRIGHTNESS:
return buildBrightness();
}
return null;
}
/**
* Sets from and to based on type.<br>
* Both float and array are possible for "from" and "to" depending on the type
*/
@SuppressWarnings("unchecked")
private void setFromTo()
{
if (from instanceof List)
{
List<Double> f = (List<Double>) from;
fromXYZ = new double[] { f.get(0), f.get(1), f.get(2) };
}
if (to instanceof List)
{
List<Double> t = (List<Double>) to;
toXYZ = new double[] { t.get(0), t.get(1), t.get(2) };
}
if (from instanceof Number)
fromA = (float) from;
if (to instanceof Number)
toA = (float) to;
}
/**
* Gets a new {@link Transformation} from this {@link Transform}
*
* @param importer the importer
* @return the transformation
*/
@SuppressWarnings("unchecked")
public Transformation<?, Shape> getTransformation(AnimationImporter importer)
{
return (Transformation<?, Shape>) build(importer);
}
/**
* Builds the {@link Translation}.
*
* @return the translation
*/
public Translation buildTranslation()
{
Translation t = new Translation((float) fromXYZ[0], (float) fromXYZ[1], (float) fromXYZ[2], (float) toXYZ[0], (float) toXYZ[1],
(float) toXYZ[2]);
t.forTicks(ticks, delay);
t.loop(loops);
return t;
}
/**
* Builds the {@link Rotation}.
*
* @return the rotation
*/
public Rotation buildRotation()
{
Rotation r = new Rotation(fromA, toA);
axis = axis.toLowerCase();
r.aroundAxis(axis.equals("x") ? 1 : 0, axis.equals("y") ? 1 : 0, axis.equals("z") ? 1 : 0);
r.offset((float) offset[0], (float) offset[1], (float) offset[2]);
r.forTicks(ticks, delay);
r.loop(loops);
return r;
}
/**
* Builds the {@link Scale}.
*
* @return the scale
*/
public Scale buildScale()
{
Scale s = new Scale((float) fromXYZ[0], (float) fromXYZ[1], (float) fromXYZ[2], (float) toXYZ[0], (float) toXYZ[1],
(float) toXYZ[2]);
s.offset((float) offset[0], (float) offset[1], (float) offset[2]);
s.forTicks(ticks, delay);
s.loop(loops);
return s;
}
/**
* Builds the {@link ChainedTransformation}.
*
* @param importer the importer
* @return the chained transformation
*/
public ChainedTransformation buildChained(AnimationImporter importer)
{
Transformation<?, ?>[] transfos = Lists.newArrayList(transforms)
.stream()
.map(importer::getTransform)
.toArray(Transformation<?, ?>[]::new);
return new ChainedTransformation(transfos);
}
/**
* Builds the {@link ParallelTransformation}.
*
* @param importer the importer
* @return the parallel transformation
*/
public ParallelTransformation buildParallel(AnimationImporter importer)
{
Transformation<?, ?>[] transfos = Lists.newArrayList(transforms)
.stream()
.map(importer::getTransform)
.toArray(Transformation<?, ?>[]::new);
return new ParallelTransformation(transfos);
}
/**
* Builds the {@link ColorTransform}.
*
* @return the color transform
*/
public ColorTransform buildColor()
{
//make sure default is white
if (from == null)
fromXYZ = new double[] { 1, 1, 1 };
if (to == null)
toXYZ = new double[] { 1, 1, 1 };
ColorTransform ct = new ColorTransform(getColor(fromXYZ), getColor(toXYZ));
ct.forTicks(ticks, delay);
ct.loop(loops);
return ct;
}
/**
* Builds the {@link AlphaTransform}.
*
* @return the alpha transform
*/
public AlphaTransform buildAlpha()
{
AlphaTransform at = new AlphaTransform((int) (fromA * 255), (int) (toA * 255));
at.forTicks(ticks, delay);
at.loop(loops);
return at;
}
/**
* Builds the {@link BrightnessTransform}.
*
* @return the brightness transform
*/
public BrightnessTransform buildBrightness()
{
BrightnessTransform bt = new BrightnessTransform((int) (fromA * 14), (int) (toA * 14));
bt.forTicks(ticks, delay);
bt.loop(loops);
return bt;
}
/**
* Gets the color form the array.
*
* @param xyz the xyz
* @return the color
*/
private int getColor(double[] xyz)
{
int r = (int) (MathHelper.clamp_double(xyz[0], 0, 1) * 255);
int g = (int) (MathHelper.clamp_double(xyz[0], 0, 1) * 255);
int b = (int) (MathHelper.clamp_double(xyz[0], 0, 1) * 255);
return (r << 16) | (g << 8) | b;
}
}
private static class Anim
{
/** Group/shape name the {@link Transformation} is applied to. */
private String group;
/** The {@link Transform} name. */
private String transform;
/** Whether the transformation should still be applied after the animation is finished. */
private boolean persist = true;
/** Whether the transform should be animated backwards. */
private boolean reversed = false;
}
}
|
package net.onrc.onos.ofcontroller.core.internal;
import java.util.ArrayList;
import java.util.List;
import net.floodlightcontroller.routing.Link;
import net.onrc.onos.graph.DBOperation;
import net.onrc.onos.ofcontroller.core.ILinkStorage;
import net.onrc.onos.ofcontroller.core.INetMapTopologyObjects.IDeviceObject;
import net.onrc.onos.ofcontroller.core.INetMapTopologyObjects.IPortObject;
import net.onrc.onos.ofcontroller.core.INetMapTopologyObjects.ISwitchObject;
import net.onrc.onos.ofcontroller.linkdiscovery.LinkInfo;
import org.openflow.util.HexString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tinkerpop.blueprints.impls.ramcloud.PerfMon;
import net.onrc.onos.graph.GraphDBManager;
/**
* This is the class for storing the information of links into GraphDB
*/
public class LinkStorageImpl implements ILinkStorage {
protected final static Logger log = LoggerFactory.getLogger(LinkStorageImpl.class);
protected DBOperation dbop;
private static PerfMon pm = PerfMon.getInstance();
/**
* Initialize the object. Open LinkStorage using given configuration file.
* @param conf Path (absolute path for now) to configuration file.
*/
@Override
public void init(final String dbStore, final String conf) {
this.dbop = GraphDBManager.getDBOperation();
}
// Method designing policy:
// op.commit() and op.rollback() MUST called in public (first-class) methods.
// A first-class method MUST NOT call other first-class method.
// Routine process should be implemented in private method.
// A private method MUST NOT call commit or rollback.
/**
* Update a record in the LinkStorage in a way provided by dmop.
* @param link Record of a link to be updated.
* @param linkinfo Meta-information of a link to be updated.
* @param dmop Operation to be done.
*/
@Override
public boolean update(Link link, LinkInfo linkinfo, DM_OPERATION dmop) {
boolean success = false;
switch (dmop) {
case CREATE:
case INSERT:
if (link != null) {
try {
if (addLinkImpl(link)) {
dbop.commit();
success = true;
}
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:update {} link:{} failed", dmop, link);
}
}
break;
case UPDATE:
if (link != null && linkinfo != null) {
try {
if (setLinkInfoImpl(link, linkinfo)) {
dbop.commit();
success = true;
}
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:update {} link:{} failed", dmop, link);
}
}
break;
case DELETE:
if (link != null) {
try {
if (deleteLinkImpl(link)) {
dbop.commit();
success = true;
log.debug("LinkStorageImpl:update {} link:{} succeeded", dmop, link);
} else {
dbop.rollback();
log.debug("LinkStorageImpl:update {} link:{} failed", dmop, link);
}
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:update {} link:{} failed", dmop, link);
}
}
break;
}
return success;
}
@Override
public boolean addLink(Link link) {
return addLink(link, null);
}
private void deleteDeviceOnPort(Long dpid, Short number)
{
IPortObject srcPortObject = dbop.searchPort(HexString.toHexString(dpid), number);
if (srcPortObject == null)
return;
Iterable<IDeviceObject> devices = srcPortObject.getDevices();
if (devices == null)
return;
if (devices.iterator().hasNext()) {
for (IDeviceObject deviceObject: srcPortObject.getDevices()) {
srcPortObject.removeDevice(deviceObject);
log.debug("delete Device "+ deviceObject.getMACAddress() +
" from sw: {} port: {} due to a new link added",
dpid, number);
}
}
}
@Override
public boolean addLink(Link link, LinkInfo linfo) {
boolean success = false;
try {
//delete the Device attachment points for the related switch and port
deleteDeviceOnPort(link.getSrc(),link.getSrcPort());
deleteDeviceOnPort(link.getDst(),link.getDstPort());
pm.addlink_start();
if (addLinkImpl(link)) {
// Set LinkInfo only if linfo is non-null.
if (linfo != null && (! setLinkInfoImpl(link, linfo))) {
log.debug("Adding linkinfo failed: {}", link);
dbop.rollback();
}
dbop.commit();
pm.addlink_end();
success = true;
} else {
pm.addlink_end();
// If we fail here that's because the ports aren't added
// before we try to add the link
log.debug("Adding link failed: {}", link);
dbop.rollback();
}
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:addLink link:{} linfo:{} failed", link, linfo);
}
return success;
}
/**
* Update multiple records in the LinkStorage in a way provided by op.
* @param links List of records to be updated.
* @param op Operation to be done.
*/
@Override
public boolean addLinks(List<Link> links) {
boolean success = false;
for (Link lt: links) {
if (! addLinkImpl(lt)) {
return false;
}
}
try {
dbop.commit();
success = true;
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:addLinks link:s{} failed", links);
}
return success;
}
/**
* Delete a record in the LinkStorage.
* @param lt Record to be deleted.
*/
@Override
public boolean deleteLink(Link lt) {
boolean success = false;
log.debug("LinkStorageImpl:deleteLink(): {}", lt);
try {
if (deleteLinkImpl(lt)) {
dbop.commit();
success = true;
log.debug("LinkStorageImpl:deleteLink(): deleted edges {}", lt);
} else {
dbop.rollback();
log.error("LinkStorageImpl:deleteLink(): failed invalid vertices {}", lt);
}
} catch (Exception e) {
dbop.rollback();
log.error("LinkStorageImpl:deleteLink(): failed {} {}",
new Object[]{lt, e.toString()});
e.printStackTrace();
}
return success;
}
/**
* Delete multiple records in LinkStorage.
* @param links List of records to be deleted.
*/
@Override
public boolean deleteLinks(List<Link> links) {
boolean success = false;
try {
for (Link lt : links) {
if (! deleteLinkImpl(lt)) {
dbop.rollback();
return false;
}
}
dbop.commit();
success = true;
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:deleteLinks failed invalid vertices {}", links);
}
return success;
}
/**
* Get list of all links connected to the port specified by given DPID and port number.
* @param dpid DPID of desired port.
* @param port Port number of desired port.
* @return List of links. Empty list if no port was found.
*/
@Override
public List<Link> getLinks(Long dpid, short port) {
List<Link> links = new ArrayList<Link>();
IPortObject srcPort = dbop.searchPort(HexString.toHexString(dpid), port);
if (srcPort == null)
return links;
ISwitchObject srcSw = srcPort.getSwitch();
if (srcSw == null)
return links;
for(IPortObject dstPort : srcPort.getLinkedPorts()) {
ISwitchObject dstSw = dstPort.getSwitch();
if (dstSw != null) {
Link link = new Link(dpid, port,
HexString.toLong(dstSw.getDPID()),
dstPort.getNumber());
links.add(link);
}
}
return links;
}
/**
* Get list of all reverse links connected to the port specified by given DPID and port number.
* @param dpid DPID of desired port.
* @param port Port number of desired port.
* @return List of reverse links. Empty list if no port was found.
*/
@Override
public List<Link> getReverseLinks(Long dpid, short port) {
List<Link> links = new ArrayList<Link>();
IPortObject srcPort = dbop.searchPort(HexString.toHexString(dpid), port);
if (srcPort == null)
return links;
ISwitchObject srcSw = srcPort.getSwitch();
if (srcSw == null)
return links;
for(IPortObject dstPort : srcPort.getReverseLinkedPorts()) {
ISwitchObject dstSw = dstPort.getSwitch();
if (dstSw != null) {
Link link = new Link(HexString.toLong(dstSw.getDPID()),
dstPort.getNumber(),
dpid, port);
links.add(link);
}
}
return links;
}
/**
* Delete records of the links connected to the port specified by given DPID and port number.
* @param dpid DPID of desired port.
* @param port Port number of desired port.
*/
@Override
public boolean deleteLinksOnPort(Long dpid, short port) {
boolean success = false;
List<Link> linksToDelete = getLinks(dpid, port);
try {
for(Link l : linksToDelete) {
if (! deleteLinkImpl(l)) {
dbop.rollback();
log.error("LinkStorageImpl:deleteLinksOnPort dpid:{} port:{} failed", dpid, port);
return false;
}
}
dbop.commit();
success = true;
} catch (Exception e) {
dbop.rollback();
e.printStackTrace();
log.error("LinkStorageImpl:deleteLinksOnPort dpid:{} port:{} failed", dpid, port);
}
return success;
}
/**
* Get list of all links connected to the switch specified by given DPID.
* @param dpid DPID of desired switch.
* @return List of links. Empty list if no port was found.
*/
@Override
public List<Link> getLinks(String dpid) {
List<Link> links = new ArrayList<Link>();
ISwitchObject srcSw = dbop.searchSwitch(dpid);
if(srcSw != null) {
for(IPortObject srcPort : srcSw.getPorts()) {
for(IPortObject dstPort : srcPort.getLinkedPorts()) {
ISwitchObject dstSw = dstPort.getSwitch();
if(dstSw != null) {
Link link = new Link(HexString.toLong(dpid),
srcPort.getNumber(),
HexString.toLong(dstSw.getDPID()),
dstPort.getNumber());
links.add(link);
}
}
}
}
return links;
}
/**
* Get list of all reverse links connected to the switch specified by
* given DPID.
* @param dpid DPID of desired switch.
* @return List of reverse links. Empty list if no port was found.
*/
@Override
public List<Link> getReverseLinks(String dpid) {
List<Link> links = new ArrayList<Link>();
ISwitchObject srcSw = dbop.searchSwitch(dpid);
if(srcSw != null) {
for(IPortObject srcPort : srcSw.getPorts()) {
for(IPortObject dstPort : srcPort.getReverseLinkedPorts()) {
ISwitchObject dstSw = dstPort.getSwitch();
if(dstSw != null) {
Link link = new Link(
HexString.toLong(dstSw.getDPID()),
dstPort.getNumber(),
HexString.toLong(dpid),
srcPort.getNumber());
links.add(link);
}
}
}
}
return links;
}
/**
* Get list of all links whose state is ACTIVE.
* @return List of active links. Empty list if no port was found.
*/
@Override
public List<Link> getActiveLinks() {
Iterable<ISwitchObject> switches = dbop.getActiveSwitches();
List<Link> links = new ArrayList<Link>();
for (ISwitchObject srcSw : switches) {
for(IPortObject srcPort : srcSw.getPorts()) {
for(IPortObject dstPort : srcPort.getLinkedPorts()) {
ISwitchObject dstSw = dstPort.getSwitch();
if(dstSw != null && dstSw.getState().equals("ACTIVE")) {
links.add(new Link(HexString.toLong(srcSw.getDPID()),
srcPort.getNumber(),
HexString.toLong(dstSw.getDPID()),
dstPort.getNumber()));
}
}
}
}
return links;
}
@Override
public LinkInfo getLinkInfo(Link link) {
// TODO implement this
return null;
}
/**
* Finalize the object.
*/
@Override
protected void finalize() {
close();
}
/**
* Close LinkStorage.
*/
@Override
public void close() {
// TODO Auto-generated method stub
// graph.shutdown();
}
/**
* Update a record of link with meta-information in the LinkStorage.
* @param link Record of a link to update.
* @param linkinfo Meta-information of a link to be updated.
*/
private boolean setLinkInfoImpl(Link link, LinkInfo linkinfo) {
// TODO implement this
return false;
}
private boolean addLinkImpl(Link lt) {
boolean success = false;
IPortObject vportSrc = null, vportDst = null;
// get source port vertex
String dpid = HexString.toHexString(lt.getSrc());
short port = lt.getSrcPort();
log.debug("addLinkImpl Src dpid : {} port : {}", dpid, port);
vportSrc = dbop.searchPort(dpid, port);
// get dest port vertex
dpid = HexString.toHexString(lt.getDst());
port = lt.getDstPort();
log.debug("addLinkImpl Dst dpid : {} port : {}", dpid, port);
vportDst = dbop.searchPort(dpid, port);
log.debug("addLinkImpl vportSrc : {} vportDst : {}", vportSrc, vportDst);
if (vportSrc != null && vportDst != null) {
IPortObject portExist = null;
// check if the link exists
for (IPortObject V : vportSrc.getLinkedPorts()) {
log.debug("vportSrc.getLinkedPorts() :{}", V);
if (V.equals(vportDst)) {
portExist = V;
break;
}
}
if (portExist == null) {
vportSrc.setLinkPort(vportDst);
success = true;
} else {
log.error("LinkStorageImpl:addLinkImpl failed link exists {}",
new Object[]{lt});
}
} else {
log.error("Ports not found : {}", lt);
}
return success;
}
private boolean deleteLinkImpl(Link lt) {
boolean success = false;
IPortObject vportSrc = null, vportDst = null;
// get source port vertex
String dpid = HexString.toHexString(lt.getSrc());
short port = lt.getSrcPort();
vportSrc = dbop.searchPort(dpid, port);
// get dst port vertex
dpid = HexString.toHexString(lt.getDst());
port = lt.getDstPort();
vportDst = dbop.searchPort(dpid, port);
// FIXME: This needs to remove all edges
if (vportSrc != null && vportDst != null) {
vportSrc.removeLink(vportDst);
log.debug("deleteLinkImpl(): deleted edge {} src {} dst {}", new Object[]{
lt, vportSrc, vportDst});
success = true;
}
return success;
}
}
|
package nl.armatiek.xslweb.saxon.functions.expath.file;
import java.io.File;
import java.io.OutputStream;
import java.util.Properties;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import net.sf.saxon.dom.NodeOverNodeInfo;
import net.sf.saxon.expr.XPathContext;
import net.sf.saxon.om.Item;
import net.sf.saxon.om.NodeInfo;
import net.sf.saxon.om.SequenceIterator;
import net.sf.saxon.trans.XPathException;
import net.sf.saxon.tree.iter.SingletonIterator;
import net.sf.saxon.value.BooleanValue;
import net.sf.saxon.value.StringValue;
import nl.armatiek.xslweb.saxon.functions.expath.file.error.ExpectedFileException;
import nl.armatiek.xslweb.saxon.functions.expath.file.error.FILE0003Exception;
import nl.armatiek.xslweb.saxon.functions.expath.file.error.FILE0004Exception;
import nl.armatiek.xslweb.saxon.functions.expath.file.error.FILE9999Exception;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
public class WriteCall extends FileExtensionFunctionCall {
private static final long serialVersionUID = 1L;
private boolean append;
public WriteCall(boolean append) {
this.append = append;
}
protected void serialize(NodeInfo nodeInfo, OutputStream os, Properties outputProperties) throws Exception {
TransformerFactory factory = TransformerFactory.newInstance();
Transformer transformer = factory.newTransformer();
if (outputProperties != null) {
transformer.setOutputProperties(outputProperties);
}
transformer.transform(nodeInfo, new StreamResult(os));
}
@SuppressWarnings("rawtypes")
public SequenceIterator<BooleanValue> call(SequenceIterator[] arguments, XPathContext context) throws XPathException {
try {
File file = getFile(((StringValue) arguments[0].next()).getStringValue());
File parentFile = file.getParentFile();
if (!parentFile.exists()) {
throw new FILE0003Exception(parentFile);
}
if (file.isDirectory()) {
throw new FILE0004Exception(file);
}
Properties outputProperties = new Properties();
Element serParamElem;
if (arguments.length > 2) {
NodeInfo nodeInfo = (NodeInfo) arguments[2].next();
NodeOverNodeInfo nodeOverNodeInfo = NodeOverNodeInfo.wrap(nodeInfo);
serParamElem = nodeOverNodeInfo.getOwnerDocument().getDocumentElement();
Node child = serParamElem.getFirstChild();
while ((child = child.getNextSibling()) != null) {
if (child.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
outputProperties.put(child.getLocalName(), ((Element) child).getAttribute("value"));
}
}
OutputStream os = FileUtils.openOutputStream(file, append);
try {
SequenceIterator itemsArg = arguments[1];
Item item;
while ((item = itemsArg.next()) != null) {
if (item instanceof NodeInfo) {
serialize((NodeInfo) item, os, outputProperties);
} else {
IOUtils.write(item.toString(), os);
}
}
} finally {
os.close();
}
return SingletonIterator.makeIterator(BooleanValue.TRUE);
} catch (ExpectedFileException e) {
throw e;
} catch (Exception e) {
throw new FILE9999Exception(e);
}
}
}
|
package org.buddycloud.channelserver.queue;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.log4j.Logger;
import org.buddycloud.channelserver.ChannelsEngine;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub;
import org.dom4j.Attribute;
import org.dom4j.Element;
import org.dom4j.Namespace;
import org.dom4j.dom.DOMElement;
import org.hsqldb.Server;
import org.xmpp.component.ComponentException;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import org.xmpp.packet.Packet;
import org.xmpp.packet.PacketError;
public class FederatedQueueManager {
private static final Logger logger = Logger
.getLogger(FederatedQueueManager.class);
public static final String NO_CHANNEL_SERVER = "NO_CHANNEL_SERVER";
public static final String DISCO_ITEMS = "DISCO_ITEMS";
public static final String DISCO_INFO = "DISCO_INFO";
public static final String DISCOVERED = "DISCOVERED";
public static final String IDENTITY_TYPE_CHANNELS = "channels";
public static final String BUDDYCLOUD_SERVER = "buddycloud-server";
private int id = 1;
private final ChannelsEngine component;
private ConcurrentHashMap<String, String> discoveredServers = new ConcurrentHashMap<String, String>();
private ConcurrentHashMap<String, List<Packet>> waitingStanzas = new ConcurrentHashMap<String, List<Packet>>();
private ConcurrentHashMap<String, String> remoteChannelDiscoveryStatus = new ConcurrentHashMap<String, String>();
private ConcurrentHashMap<String, Integer> remoteServerItemsToProcess = new ConcurrentHashMap<String, Integer>();
private ConcurrentHashMap<String, String> remoteServerInfoRequestIds = new ConcurrentHashMap<String, String>();
private ConcurrentHashMap<String, JID> sentRemotePackets = new ConcurrentHashMap<String, JID>();
private ConcurrentHashMap<String, String> nodeMap = new ConcurrentHashMap<String, String>();
private String localServer;
private BlockingQueue<Packet> federatedResponseQueue;
public FederatedQueueManager(ChannelsEngine component, String localServer) {
this.component = component;
this.localServer = localServer;
}
private int getId() {
int id = this.id;
this.id++;
return id;
}
public void process(Packet packet) throws ComponentException {
String to = packet.getTo().toString();
sentRemotePackets.put(packet.getID(), packet.getFrom());
packet.setFrom(localServer);
try {
extractNodeDetails(packet);
// Do we have a map already?
if (discoveredServers.containsKey(to)) {
packet.setTo(new JID(discoveredServers.get(to)));
sendPacket(packet.createCopy());
return;
}
// Are we already discovering a remote server?
if (false == remoteChannelDiscoveryStatus.containsKey(to)) {
discoverRemoteChannelServer(to, packet.getID());
} else if (remoteChannelDiscoveryStatus.get(to).equals(
NO_CHANNEL_SERVER)) {
logger.error("No remote channel server for " + to);
IQ reply = IQ.createResultIQ((IQ) packet);
reply.setError(new PacketError(
PacketError.Condition.remote_server_not_found,
PacketError.Type.cancel));
component.sendPacket(reply);
return;
}
// Add packet to list
if (false == waitingStanzas.containsKey(to)) {
waitingStanzas.put(to, new ArrayList<Packet>());
}
logger.debug("Adding packet to waiting stanza list for " + to
+ " (size " + waitingStanzas.get(to).size() + ")");
waitingStanzas.get(to).add(packet);
} catch (Exception e) {
logger.error(e);
}
}
private void extractNodeDetails(Packet packet) {
try {
String packetXml = packet.toXML();
if (false == packetXml.contains("node=")) return;
nodeMap.put(
packet.getID(),
packetXml.split("node=\"")[1].split("\"")[0]
);
} catch (NullPointerException e) {
logger.info("No node details found in federated packet");
logger.error(e);
} catch (ArrayIndexOutOfBoundsException e) {
logger.info("Error extracting node information from federated packet");
logger.error(e);
}
}
private void sendPacket(Packet packet) throws ComponentException {
logger.info("OUT:FQM-> " + packet.toXML());
component.sendPacket(packet.createCopy());
}
private void discoverRemoteChannelServer(String remoteDomain, String id)
throws ComponentException {
logger.info("Attemping to discover remote server " + remoteDomain);
IQ discover = new IQ(IQ.Type.get);
discover.setFrom(localServer);
discover.setTo(remoteDomain);
discover.setID(id);
discover.getElement().addElement("query", JabberPubsub.NS_DISCO_ITEMS);
component.sendPacket(discover);
remoteChannelDiscoveryStatus.put(remoteDomain, DISCO_ITEMS);
}
public void sendInfoRequests(JID from, List<Element> items)
throws ComponentException {
for (Element item : items) {
Attribute name = item.attribute("name");
if ((null != name)
&& (true == name.getStringValue().equals(BUDDYCLOUD_SERVER))) {
remoteChannelDiscoveryStatus.put(from.toString(), DISCOVERED);
setDiscoveredServer(from.toString(), item.attributeValue("jid"));
sendFederatedRequests(from.toString());
return;
}
}
IQ infoRequest = new IQ(IQ.Type.get);
infoRequest.setFrom(localServer);
infoRequest.getElement()
.addElement("query", JabberPubsub.NS_DISCO_INFO);
remoteServerItemsToProcess.put(from.toString(), items.size());
String infoRequestId;
for (Element item : items) {
infoRequestId = getId() + ":info";
infoRequest.setTo(item.attributeValue("jid"));
infoRequest.setID(infoRequestId);
remoteServerInfoRequestIds.put(infoRequestId, from.toString());
component.sendPacket(infoRequest.createCopy());
}
remoteChannelDiscoveryStatus.put(from.toString(), DISCO_INFO);
}
private void setDiscoveredServer(String server, String handler) {
discoveredServers.put(server, handler);
}
public void processInfoResponses(JID from, String id,
List<Element> identities) throws ComponentException {
String originatingServer = remoteServerInfoRequestIds.get(id);
if (null == originatingServer) return;
remoteServerInfoRequestIds.remove(id);
remoteServerItemsToProcess.put(originatingServer,
remoteServerItemsToProcess.get(originatingServer) - 1);
String identityType;
for (Element identity : identities) {
identityType = identity.attributeValue("type");
if ((identityType != null)
&& (true == identityType.equals(IDENTITY_TYPE_CHANNELS))) {
setDiscoveredServer(originatingServer, from.toString());
sendFederatedRequests(originatingServer);
}
}
if (remoteServerItemsToProcess.get(originatingServer) < 1) {
if (false == discoveredServers.containsKey(originatingServer)) {
sendRemoteChannelServerNotFoundErrorResponses(originatingServer);
remoteChannelDiscoveryStatus.put(originatingServer,
NO_CHANNEL_SERVER);
waitingStanzas.remove(originatingServer);
} else {
remoteChannelDiscoveryStatus.put(originatingServer, DISCOVERED);
}
}
}
private void sendFederatedRequests(String originatingServer)
throws ComponentException {
String remoteServer = discoveredServers.get(originatingServer);
List<Packet> packetsToSend = waitingStanzas.get(originatingServer);
if (null == packetsToSend) {
return;
}
for (Packet packet : packetsToSend) {
packet.setTo(remoteServer);
sendPacket(packet);
}
waitingStanzas.remove(originatingServer);
}
private void sendRemoteChannelServerNotFoundErrorResponses(String server)
throws ComponentException {
List<Packet> queued = waitingStanzas.get(server);
if (null == queued) {
return;
}
Element noRemoteServer = new DOMElement("text", new Namespace("",
JabberPubsub.NS_PUBSUB_ERROR));
noRemoteServer.setText("No pubsub channel service discovered for "
+ server);
Element itemNotFound = new DOMElement(
PacketError.Condition.item_not_found.toXMPP(), new Namespace(
"", JabberPubsub.NS_XMPP_STANZAS));
Element error = new DOMElement("error");
error.addAttribute("type", PacketError.Type.cancel.toXMPP());
error.add(itemNotFound);
error.add(noRemoteServer);
IQ response;
for (Packet packet : queued) {
response = IQ.createResultIQ((IQ) packet);
response.setFrom(localServer);
response.setType(IQ.Type.error);
response.setChildElement(error);
component.sendPacket(response);
}
}
public void passResponseToRequester(IQ packet) throws Exception {
if (false == sentRemotePackets.containsKey(packet.getID())) {
throw new UnknownFederatedPacketException(
"Can not find original requesting packet! (ID:"
+ packet.getID() + ")");
}
logger.debug("Forwarding remote packet to "
+ sentRemotePackets.get(packet.getID()) + " from "
+ packet.getFrom());
packet.setTo(sentRemotePackets.get(packet.getID()));
packet.setFrom(localServer);
sentRemotePackets.remove(packet.getID());
component.sendPacket(packet);
}
public String getRelatedNodeForRemotePacket(IQ packet) {
String id = null;
if (nodeMap.containsKey(packet.getID()))
id = nodeMap.get(packet.getID());
nodeMap.remove(packet.getID());
return id;
}
public void addChannelMap(JID server) {
setDiscoveredServer(server.getDomain(), server.getDomain());
remoteChannelDiscoveryStatus.put(server.getDomain(), DISCOVERED);
try {
sendFederatedRequests(server.getDomain());
} catch (ComponentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
logger.error(e);
}
}
}
|
package org.camunda.bpm.extension.mockito.function;
import org.camunda.bpm.engine.repository.Deployment;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.model.bpmn.BpmnModelInstance;
/**
* @deprecated to avoid dependency to 4.12 this will be removed/altered with 4.10
*/
@Deprecated
public enum DeployProcess {
INSTANCE;
public Deployment apply(ProcessEngineRule rule, BpmnModelInstance instance, String processId) {
final Deployment deployment = rule.getRepositoryService().createDeployment()
.addModelInstance(processId + ".bpmn", instance)
.deploy();
rule.manageDeployment(deployment);
return deployment;
}
}
|
package guitests;
import static org.junit.Assert.assertTrue;
import static seedu.taskboss.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import org.junit.Test;
import guitests.guihandles.TaskCardHandle;
import seedu.taskboss.commons.core.Messages;
import seedu.taskboss.logic.commands.EditCommand;
import seedu.taskboss.model.category.Tag;
import seedu.taskboss.model.task.Information;
import seedu.taskboss.model.task.Name;
import seedu.taskboss.model.task.PriorityLevel;
import seedu.taskboss.testutil.TaskBuilder;
import seedu.taskboss.testutil.TestTask;
// TODO: reduce GUI tests by transferring some tests to be covered by lower level tests.
public class EditCommandTest extends TaskBossGuiTest {
// The list of tasks in the task list panel is expected to match this list.
// This list is updated with every successful call to assertEditSuccess().
TestTask[] expectedTasksList = td.getTypicalTasks();
@Test
public void edit_allFieldsSpecified_success() throws Exception {
String detailsToEdit = "Bobby p/1 i/Block 123, Bobby Street 3 t/husband";
int taskBossIndex = 1;
TestTask editedTask = new TaskBuilder().withName("Bobby").withPriorityLevel("1")
.withInformation("Block 123, Bobby Street 3").withTags("husband").build();
assertEditSuccess(taskBossIndex, taskBossIndex, detailsToEdit, editedTask);
}
@Test
public void edit_notAllFieldsSpecified_success() throws Exception {
String detailsToEdit = "t/sweetie t/bestie";
int taskBossIndex = 2;
TestTask taskToEdit = expectedTasksList[taskBossIndex - 1];
TestTask editedTask = new TaskBuilder(taskToEdit).withTags("sweetie", "bestie").build();
assertEditSuccess(taskBossIndex, taskBossIndex, detailsToEdit, editedTask);
}
@Test
public void edit_clearTags_success() throws Exception {
String detailsToEdit = "t/";
int taskBossIndex = 2;
TestTask taskToEdit = expectedTasksList[taskBossIndex - 1];
TestTask editedTask = new TaskBuilder(taskToEdit).withTags().build();
assertEditSuccess(taskBossIndex, taskBossIndex, detailsToEdit, editedTask);
}
@Test
public void edit_findThenEdit_success() throws Exception {
commandBox.runCommand("find n/Elle");
String detailsToEdit = "Belle";
int filteredTaskListIndex = 1;
int taskBossIndex = 5;
TestTask taskToEdit = expectedTasksList[taskBossIndex - 1];
TestTask editedTask = new TaskBuilder(taskToEdit).withName("Belle").build();
assertEditSuccess(filteredTaskListIndex, taskBossIndex, detailsToEdit, editedTask);
}
@Test
public void edit_missingTaskIndex_failure() {
commandBox.runCommand("edit Bobby");
assertResultMessage(String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE));
}
@Test
public void edit_invalidTaskIndex_failure() {
commandBox.runCommand("edit 8 Bobby");
assertResultMessage(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX);
}
@Test
public void edit_noFieldsSpecified_failure() {
commandBox.runCommand("edit 1");
assertResultMessage(EditCommand.MESSAGE_NOT_EDITED);
}
@Test
public void edit_invalidValues_failure() {
commandBox.runCommand("edit 1 *&");
assertResultMessage(Name.MESSAGE_NAME_CONSTRAINTS);
commandBox.runCommand("edit 1 p/abcd");
assertResultMessage(PriorityLevel.MESSAGE_PRIORITY_CONSTRAINTS);
commandBox.runCommand("edit 1 i/");
assertResultMessage(Information.MESSAGE_INFORMATION_CONSTRAINTS);
/**
* Checks whether the edited task has the correct updated details.
*
* @param filteredTaskListIndex index of task to edit in filtered list
* @param taskBossIndex index of task to edit in TaskBoss.
* Must refer to the same task as {@code filteredTaskListIndex}
* @param detailsToEdit details to edit the task with as input to the edit command
* @param editedTask the expected task after editing the task's details
*/
private void assertEditSuccess(int filteredTaskListIndex, int taskBossIndex,
String detailsToEdit, TestTask editedTask) {
commandBox.runCommand("edit " + filteredTaskListIndex + " " + detailsToEdit);
// confirm the new card contains the right data
TaskCardHandle editedCard = taskListPanel.navigateToTask(editedTask.getName().fullName);
assertMatching(editedTask, editedCard);
// confirm the list now contains all previous tasks plus the task with updated details
expectedTasksList[taskBossIndex - 1] = editedTask;
assertTrue(taskListPanel.isListMatching(expectedTasksList));
assertResultMessage(String.format(EditCommand.MESSAGE_EDIT_TASK_SUCCESS, editedTask));
}
}
|
package org.luxons.sevenwonders.game.data.definitions;
import org.luxons.sevenwonders.game.Settings;
/**
* Represents a deserialized JSON definition of some data about the game. It is settings-agnostic. An instance of
* in-game data can be generated from this, given specific game settings.
*
* @param <T>
* the type of in-game object that can be generated from this definition
*/
public interface Definition<T> {
/**
* Creates a T object from the given settings. This method mustn't mutate this Definition as it may be called
* multiple times with different settings.
*
* @param settings
* the game settings to use to generate a game-specific object from this definition
*
* @return the new game-specific object created from this definition
*/
T create(Settings settings);
}
|
package guitests;
import static org.junit.Assert.*;
import static seedu.jimi.logic.commands.DeleteCommand.MESSAGE_DELETE_TASK_SUCCESS;
import org.junit.Test;
import seedu.jimi.commons.exceptions.IllegalValueException;
import seedu.jimi.model.task.Name;
import seedu.jimi.testutil.TestFloatingTask;
import seedu.jimi.testutil.TestUtil;
public class EditCommandTest extends AddressBookGuiTest{
@Test
public void edit() throws IllegalValueException {
TestFloatingTask[] currentList = td.getTypicalTasks();
//edit the first task in list with all details
int targetIndex = 1;
assertEditTaskSuccess(targetIndex, currentList);
//edit the last task in list with only name changes
currentList = TestUtil.removeTaskFromList(currentList, targetIndex);
targetIndex = currentList.length;
assertEditTaskSuccess(targetIndex, currentList);
//edit the middle task of the list with only name changes
currentList = TestUtil.removeTaskFromList(currentList, targetIndex);
targetIndex = currentList.length/2;
assertEditTaskSuccess(targetIndex, currentList);
//invalid index
commandBox.runCommand("edit " + currentList.length + 1);
assertResultMessage("The task index provided is invalid");
//TODO: edit the first event in the list with all details
}
private void assertEditTaskSuccess(int targetIndex, TestFloatingTask... currentList) throws IllegalValueException{
final String newName = "Get rich or die coding";
TestFloatingTask expectedTask = new TestFloatingTask();
expectedTask.setName(new Name(newName)); //set up newTask with changed name
//edit the name of the target task with the newName
commandBox.runCommand("edit " + targetIndex + " " + newName);
//confirm the list now contains all previous persons except the deleted person
//TODO: change from checking last index to previous index of changed task
assertTrue(taskListPanel.isListMatching(expectedTask));
//confirm the result message is correct
assertResultMessage(String.format(MESSAGE_DELETE_TASK_SUCCESS, expectedTask));
}
}
|
package org.takes.tk;
import java.io.IOException;
import java.net.URI;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.takes.Request;
import org.takes.Response;
import org.takes.Take;
import org.takes.http.FtRemote;
import org.takes.rq.RqFake;
import org.takes.rq.RqHref;
import org.takes.rq.RqMethod;
import org.takes.rs.RsPrint;
import org.takes.rs.RsText;
/**
* Test case for {@link TkProxy}.
* @author Dragan Bozanovic (bozanovicdr@gmail.com)
* @version $Id$
* @since 0.25
* @todo #458:30min/DEV We need more tests for TkProxy.
* The tests should verify different combinations of request/response headers.
* @checkstyle ClassDataAbstractionCouplingCheck (500 lines)
*/
public final class TkProxyTest {
/**
* An array of http methods for testing.
*/
private static final String[] METHODS = {
RqMethod.POST,
RqMethod.GET,
RqMethod.PUT,
RqMethod.DELETE,
RqMethod.TRACE,
};
/**
* TkProxy can just work.
* @throws Exception If some problem inside
*/
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
@Test
public void justWorks() throws Exception {
for (final String method:TkProxyTest.METHODS) {
new FtRemote(new TkFixed("hello, world!")).exec(
new FtRemote.Script() {
@Override
public void exec(final URI home) throws IOException {
MatcherAssert.assertThat(
new RsPrint(
new TkProxy(home).act(new RqFake(method))
).print(),
Matchers.containsString("hello")
);
}
}
);
}
}
/**
* TkProxy can correctly maps path string.
* @throws Exception If some problem inside
*/
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
@Test
public void correctlyMapsPathString() throws Exception {
final Take take = new Take() {
@Override
public Response act(final Request req) throws IOException {
return new RsText(new RqHref.Base(req).href().toString());
}
};
for (final String method:TkProxyTest.METHODS) {
new FtRemote(take).exec(
new FtRemote.Script() {
@Override
public void exec(final URI home) throws IOException {
MatcherAssert.assertThat(
new RsPrint(
new TkProxy(home).act(
new RqFake(method, "/a/b/c")
)
).printBody(),
Matchers.equalTo(
String.format(
"http://%s:%d/a/b/c",
home.getHost(), home.getPort()
)
)
);
}
}
);
}
}
}
|
package test;
import info.u_team.u_team_core.tileentity.UTileEntityAsyncUpdate;
import info.u_team.u_team_core.util.MathUtil;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.items.*;
public class TileEntityAsyncTest extends UTileEntityAsyncUpdate {
private BlockPos loc;
@Override
public void updateAsync() {
if (world.isRemote)
return;
System.out.println("hello me getting updated async");
if (loc == null) {
loc = pos.add(0, 3, 0);
}
world.setBlockState(loc, Blocks.STAINED_GLASS.getStateFromMeta(5));
loc = loc.add(MathUtil.getRandomNumberInRange(-1, 1), MathUtil.getRandomNumberInRange(0, 1), MathUtil.getRandomNumberInRange(-1, 1));
TileEntity tile = world.getTileEntity(pos.add(0, 1, 0));
if (tile != null) {
IItemHandler handler = tile.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, EnumFacing.DOWN);
if (handler != null) {
handler.insertItem(0, new ItemStack(Blocks.STONE), false);
}
}
}
@Override
public void update() {
// System.out.println("sync");
}
@Override
public int getUpdateRate() {
return 250;
}
@Override
public void readNBT(NBTTagCompound compound) {
}
@Override
public void writeNBT(NBTTagCompound compound) {
}
}
|
package org.suren.autotest.web.framework.core.ui;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.suren.autotest.web.framework.core.Locator;
/**
* HTML idnametagNamecssxpathlinktextpartialLinkText
*
* @author suren
* @since jdk1.6 2016630
*/
public abstract class AbstractElement implements Element
{
private String id;
private String name;
private String tagName;
private String CSS;
private String XPath;
private String linkText;
private String partialLinkText;
private List<Locator> locatorList = new ArrayList<Locator>();
private String strategy;
private Map<String, Object> data = new HashMap<String, Object>();
private String paramPrefix;
@Override
public String getId()
{
return id;
}
public void setId(String id)
{
this.id = id;
}
@Override
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
@Override
public String getTagName()
{
return tagName;
}
public void setTagName(String tagName)
{
this.tagName = tagName;
}
@Override
public String getCSS()
{
return CSS;
}
public void setCSS(String cSS)
{
CSS = cSS;
}
@Override
public String getXPath()
{
return XPath;
}
public void setXPath(String xPath)
{
XPath = xPath;
}
@Override
public String getLinkText()
{
return linkText;
}
public void setLinkText(String linkText)
{
this.linkText = linkText;
}
@Override
public String getPartialLinkText()
{
return partialLinkText;
}
public void setPartialLinkText(String partialLinkText)
{
this.partialLinkText = partialLinkText;
}
/**
* @return the locatorList
*/
@Override
public List<Locator> getLocatorList()
{
return locatorList;
}
/**
* @param locatorList the locatorList to set
*/
public void setLocatorList(List<Locator> locatorList)
{
this.locatorList = locatorList;
}
@Override
public String getStrategy()
{
return strategy;
}
public void setStrategy(String strategy)
{
this.strategy = strategy;
}
/**
*
* @param key
* @param value
*/
public void putData(String key, Object value)
{
data.put(key, value);
}
/**
*
* @param key
*/
public void removeData(String key)
{
data.remove(key);
}
/**
* @param key
* @return key
*/
public boolean containsKey(String key)
{
return data.containsKey(key);
}
public void clearData()
{
data.clear();
}
/**
* @return truefalse
*/
public abstract boolean isEnabled();
/**
* @return truefalse
*/
public abstract boolean isHidden();
/**
*
* @param value
* @return
*/
public String paramTranslate(String value)
{
String result = value;
Iterator<String> dataIt = data.keySet().iterator();
while(dataIt.hasNext())
{
String param = dataIt.next();
if(!param.startsWith(paramPrefix))
{
continue;
}
Object paramVal = data.get(param);
if(paramVal != null)
{
result = result.replace("${" + param + "}", paramVal.toString());
}
}
return result;
}
public String getParamPrefix()
{
return paramPrefix;
}
public void setParamPrefix(String paramPrefix)
{
this.paramPrefix = paramPrefix;
}
}
|
// Triple Play - utilities for use in PlayN-based games
package tripleplay.ui;
import playn.core.PlayN;
import playn.core.Image;
import playn.core.Game;
import playn.java.JavaPlatform;
import react.Signals;
import pythagoras.f.IRectangle;
import pythagoras.f.Rectangle;
/**
* A test app for demoing the UI widgets.
*/
public class WidgetDemo implements Game
{
public static void main (String[] args) {
JavaPlatform platform = JavaPlatform.register();
platform.assetManager().setPathPrefix("src/test/resources");
PlayN.run(new WidgetDemo());
}
@Override // from interface Game
public void init () {
_iface = new Interface(null);
PlayN.pointer().setListener(_iface.plistener);
// define our root stylesheet
Styles buttonStyles = Styles.none().
add(Style.BACKGROUND.is(Background.solid(0xFFFFFFFF, 5))).
addSelected(Style.BACKGROUND.is(Background.solid(0xFFCCCCCC, 6, 4, 4, 6)));
Stylesheet rootSheet = Stylesheet.builder().
add(Button.class, buttonStyles).
create();
// create our demo interface
Root root = _iface.createRoot(AxisLayout.vertical().gap(15), rootSheet);
root.setSize(PlayN.graphics().width(), PlayN.graphics().height());
root.addStyles(Styles.make(Style.BACKGROUND.is(Background.solid(0xFF99CCFF, 5))));
PlayN.graphics().rootLayer().add(root.layer);
Image smiley = PlayN.assetManager().getImage("images/smiley.png");
Image squares = PlayN.assetManager().getImage("images/squares.png");
Styles wrapped = Styles.make(Style.TEXT_WRAP.is(true));
Button toggle;
Label label2;
Styles greenBg = Styles.make(Style.BACKGROUND.is(Background.solid(0xFFCCFF99, 5)));
root.add(new Group(AxisLayout.horizontal().alignTop()).add(
new Label(wrapped).setConstraint(AxisLayout.stretched()).setText(TEXT1),
new Label(wrapped).setConstraint(AxisLayout.stretched()).setText(TEXT2),
new Label(wrapped).setConstraint(AxisLayout.stretched()).setText(TEXT3)),
new Group(AxisLayout.horizontal().alignTop(), greenBg).add(
new Group(AxisLayout.vertical()).add(
new Label().setText("Toggle viz:"),
toggle = new Button().setText("Toggle"),
new Button().setText("Disabled").setEnabled(false)),
new Group(AxisLayout.vertical()).add(
new Label().setText("Label 1"),
label2 = new Label().setText("Label 2"),
new Label().setIcon(smiley).setText("Label 3"))),
new Group(AxisLayout.horizontal().gap(15), greenBg).add(
new Label(Styles.make(Style.ICON_POS.is(Style.Pos.LEFT))).
setText("Left").setIcon(squares, getIBounds(0)),
new Label(Styles.make(Style.ICON_POS.is(Style.Pos.RIGHT))).
setText("Right").setIcon(squares, getIBounds(1)),
new Label(Styles.make(Style.ICON_POS.is(Style.Pos.ABOVE),
Style.HALIGN.is(Style.HAlign.CENTER))).
setText("Above").setIcon(squares, getIBounds(2)),
new Label(Styles.make(Style.ICON_POS.is(Style.Pos.BELOW),
Style.HALIGN.is(Style.HAlign.CENTER))).
setText("Below").setIcon(squares, getIBounds(3))));
Signals.toggler(toggle.clicked, true).connect(label2.visibleSlot());
}
@Override // from interface Game
public void update (float delta) {
_iface.update(delta);
}
@Override // from interface Game
public void paint (float alpha) {
_iface.paint(alpha);
}
@Override // from interface Game
public int updateRate () {
return 30;
}
protected IRectangle getIBounds (int index) {
final float iwidth = 16, iheight = 16;
return new Rectangle(index*iwidth, 0, iwidth, iheight);
}
protected Interface _iface;
protected static final String TEXT1 = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.";
protected static final String TEXT2 = "Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo.";
protected static final String TEXT3 = "But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the great explorer of the truth, the master-builder of human happiness.";
}
|
package techreborn.tiles.fusionReactor;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import reborncore.api.IToolDrop;
import reborncore.api.tile.IInventoryProvider;
import reborncore.common.RebornCoreConfig;
import reborncore.common.powerSystem.TilePowerAcceptor;
import reborncore.common.registration.RebornRegistry;
import reborncore.common.registration.impl.ConfigRegistry;
import reborncore.common.util.Inventory;
import reborncore.common.util.ItemUtils;
import reborncore.common.util.Torus;
import techreborn.api.reactor.FusionReactorRecipe;
import techreborn.api.reactor.FusionReactorRecipeHelper;
import techreborn.client.container.IContainerProvider;
import techreborn.client.container.builder.BuiltContainer;
import techreborn.client.container.builder.ContainerBuilder;
import techreborn.init.ModBlocks;
import techreborn.lib.ModInfo;
import java.util.List;
@RebornRegistry(modID = ModInfo.MOD_ID)
public class TileFusionControlComputer extends TilePowerAcceptor
implements IToolDrop, IInventoryProvider, IContainerProvider {
@ConfigRegistry(config = "machines", category = "fusion_reactor", key = "FusionReactorMaxInput", comment = "Fusion Reactor Max Input (Value in EU)")
public static int maxInput = 8192;
@ConfigRegistry(config = "machines", category = "fusion_reactor", key = "FusionReactorMaxOutput", comment = "Fusion Reactor Max Output (Value in EU)")
public static int maxOutput = 1_000_000;
@ConfigRegistry(config = "machines", category = "fusion_reactor", key = "FusionReactorMaxEnergy", comment = "Fusion Reactor Max Energy (Value in EU)")
public static int maxEnergy = 100_000_000;
@ConfigRegistry(config = "machines", category = "fusion_reactor", key = "FusionReactorMaxCoilSize", comment = "Fusion Reactor Max Coil size (Radius)")
public static int maxCoilSize = 50;
public Inventory inventory;
public int coilCount = 0;
public int crafingTickTime = 0;
public int finalTickTime = 0;
public int neededPower = 0;
public int size = 6;
public int state = -1;
int topStackSlot = 0;
int bottomStackSlot = 1;
int outputStackSlot = 2;
FusionReactorRecipe currentRecipe = null;
boolean hasStartedCrafting = false;
public TileFusionControlComputer() {
super();
checkOverfill = false;
this.inventory = new Inventory(3, "TileFusionControlComputer", 64, this);
}
/**
* Check that reactor has all necessary coils in place
*
* @return boolean Return true if coils are present
*/
public boolean checkCoils() {
List<BlockPos> coils = Torus.generate(getPos(), size);
for(BlockPos coilPos : coils){
if (!isCoil(coilPos)) {
coilCount = 0;
return false;
}
}
coilCount = coils.size();
return true;
}
/**
* Checks if block is fusion coil
*
* @param pos coordinate for block
* @return boolean Returns true if block is fusion coil
*/
public boolean isCoil(final BlockPos pos) {
return this.world.getBlockState(pos).getBlock() == ModBlocks.FUSION_COIL;
}
/**
* Resets crafter progress and recipe
*/
private void resetCrafter() {
this.currentRecipe = null;
this.crafingTickTime = 0;
this.finalTickTime = 0;
this.neededPower = 0;
this.hasStartedCrafting = false;
}
/**
* Checks that ItemStack could be inserted into slot provided, including check
* for existing item in slot and maximum stack size
*
* @param stack ItemStack ItemStack to insert
* @param slot int Slot ID to check
* @param oreDic boolean Should we use ore dictionary
* @return boolean Returns true if ItemStack will fit into slot
*/
public boolean canFitStack(final ItemStack stack, final int slot, final boolean oreDic) {// Checks to see if it can
// fit the stack
if (stack.isEmpty()) {
return true;
}
if (this.inventory.getStackInSlot(slot).isEmpty()) {
return true;
}
if (ItemUtils.isItemEqual(this.inventory.getStackInSlot(slot), stack, true, true, oreDic)) {
if (stack.getCount() + this.inventory.getStackInSlot(slot).getCount() <= stack.getMaxStackSize()) {
return true;
}
}
return false;
}
/**
* Returns progress scaled to input value
*
* @param scale int Maximum value for progress
* @return int Scale of progress
*/
public int getProgressScaled(int scale) {
if (this.crafingTickTime != 0) {
return this.crafingTickTime * scale / this.finalTickTime;
}
return 0;
}
/**
* Tries to set current recipe based in inputs in reactor
*/
private void updateCurrentRecipe() {
for (final FusionReactorRecipe reactorRecipe : FusionReactorRecipeHelper.reactorRecipes) {
if (validateReactorRecipe(reactorRecipe)) {
this.currentRecipe = reactorRecipe;
this.crafingTickTime = 0;
this.finalTickTime = this.currentRecipe.getTickTime();
this.neededPower = (int) this.currentRecipe.getStartEU();
this.hasStartedCrafting = false;
break;
}
}
}
/**
* Validates that reactor can execute recipe provided, e.g. has all inputs and can fit output
*
* @param recipe FusionReactorRecipe Recipe to validate
* @return boolean True if reactor can execute recipe provided
*/
private boolean validateReactorRecipe(FusionReactorRecipe recipe) {
if (ItemUtils.isItemEqual(this.getStackInSlot(topStackSlot), recipe.getTopInput(), true, true, true)) {
if (recipe.getBottomInput() != null) {
if (!ItemUtils.isItemEqual(this.getStackInSlot(bottomStackSlot), recipe.getBottomInput(), true, true, true)) {
return false;
}
}
if (this.canFitStack(recipe.getOutput(), outputStackSlot, true)) {
return true;
}
}
return false;
}
// TilePowerAcceptor
@Override
public void update() {
super.update();
if (this.world.isRemote) {
return;
}
// Force check every second
if (this.world.getTotalWorldTime() % 20 == 0) {
this.checkCoils();
this.inventory.hasChanged = true;
}
if (this.coilCount == 0) {
this.resetCrafter();
return;
}
if (this.currentRecipe == null && this.inventory.hasChanged == true) {
updateCurrentRecipe();
}
if (this.currentRecipe != null) {
if (this.inventory.hasChanged && !validateReactorRecipe(this.currentRecipe)) {
resetCrafter();
return;
}
if (!this.hasStartedCrafting) {
// Ignition!
if (this.canUseEnergy(this.currentRecipe.getStartEU())) {
this.useEnergy(this.currentRecipe.getStartEU());
this.hasStartedCrafting = true;
}
}
if (hasStartedCrafting && this.crafingTickTime < this.finalTickTime) {
this.crafingTickTime++;
// Power gen
if (this.currentRecipe.getEuTick() > 0) {
// Waste power if it has no where to go
this.addEnergy(this.currentRecipe.getEuTick() * getPowerMultiplier());
this.powerChange = this.currentRecipe.getEuTick() * getPowerMultiplier();
} else { // Power user
if (this.canUseEnergy(this.currentRecipe.getEuTick() * -1)) {
this.setEnergy(this.getEnergy() - this.currentRecipe.getEuTick() * -1);
}
}
} else if (this.crafingTickTime >= this.finalTickTime) {
if (this.canFitStack(this.currentRecipe.getOutput(), this.outputStackSlot, true)) {
if (this.getStackInSlot(this.outputStackSlot).isEmpty()) {
this.setInventorySlotContents(this.outputStackSlot, this.currentRecipe.getOutput().copy());
} else {
this.decrStackSize(this.outputStackSlot, -this.currentRecipe.getOutput().getCount());
}
this.decrStackSize(this.topStackSlot, this.currentRecipe.getTopInput().getCount());
if (!this.currentRecipe.getBottomInput().isEmpty()) {
this.decrStackSize(this.bottomStackSlot, this.currentRecipe.getBottomInput().getCount());
}
if (this.validateReactorRecipe(this.currentRecipe)) {
this.crafingTickTime = 0;
} else {
this.resetCrafter();
}
}
}
this.markDirty();
}
if (this.inventory.hasChanged) {
this.inventory.hasChanged = false;
}
}
@Override
public double getPowerMultiplier() {
double calc = (1F/2F) * Math.pow(size -5, 1.8);
return Math.max(Math.round(calc * 100D) / 100D, 1D);
}
@Override
public double getBaseMaxPower() {
return Math.min(maxEnergy * getPowerMultiplier(), Integer.MAX_VALUE / RebornCoreConfig.euPerFU);
}
@Override
public boolean canAcceptEnergy(final EnumFacing direction) {
return !(direction == EnumFacing.DOWN || direction == EnumFacing.UP);
}
@Override
public boolean canProvideEnergy(final EnumFacing direction) {
return direction == EnumFacing.DOWN || direction == EnumFacing.UP;
}
@Override
public double getBaseMaxOutput() {
if (!this.hasStartedCrafting) {
return 0;
}
return Integer.MAX_VALUE / RebornCoreConfig.euPerFU;
}
@Override
public double getBaseMaxInput() {
if (this.hasStartedCrafting) {
return 0;
}
return maxInput;
}
@Override
public void readFromNBT(final NBTTagCompound tagCompound) {
super.readFromNBT(tagCompound);
this.crafingTickTime = tagCompound.getInteger("crafingTickTime");
this.finalTickTime = tagCompound.getInteger("finalTickTime");
this.neededPower = tagCompound.getInteger("neededPower");
this.hasStartedCrafting = tagCompound.getBoolean("hasStartedCrafting");
if(tagCompound.hasKey("hasActiveRecipe") && tagCompound.getBoolean("hasActiveRecipe") && this.currentRecipe == null){
for (final FusionReactorRecipe reactorRecipe : FusionReactorRecipeHelper.reactorRecipes) {
if (validateReactorRecipe(reactorRecipe)) {
this.currentRecipe = reactorRecipe;
}
}
}
if(tagCompound.hasKey("size")){
this.size = tagCompound.getInteger("size");
}
this.size = Math.min(size, maxCoilSize);//Done here to force the samller size, will be useful if people lag out on a large one.
}
@Override
public NBTTagCompound writeToNBT(final NBTTagCompound tagCompound) {
super.writeToNBT(tagCompound);
tagCompound.setInteger("crafingTickTime", this.crafingTickTime);
tagCompound.setInteger("finalTickTime", this.finalTickTime);
tagCompound.setInteger("neededPower", this.neededPower);
tagCompound.setBoolean("hasStartedCrafting", this.hasStartedCrafting);
tagCompound.setBoolean("hasActiveRecipe", this.currentRecipe != null);
tagCompound.setInteger("size", size);
return tagCompound;
}
// TileLegacyMachineBase
@Override
public void onLoad() {
super.onLoad();
this.checkCoils();
}
@Override
public boolean canBeUpgraded() {
return false;
}
// IToolDrop
@Override
public ItemStack getToolDrop(EntityPlayer p0) {
return new ItemStack(ModBlocks.FUSION_CONTROL_COMPUTER, 1);
}
// IInventoryProvider
@Override
public Inventory getInventory() {
return this.inventory;
}
// IContainerProvider
@Override
public BuiltContainer createContainer(final EntityPlayer player) {
return new ContainerBuilder("fusionreactor").player(player.inventory).inventory().hotbar()
.addInventory().tile(this).slot(0, 34, 47).slot(1, 126, 47).outputSlot(2, 80, 47).syncEnergyValue()
.syncIntegerValue(this::getCoilStatus, this::setCoilStatus)
.syncIntegerValue(this::getCrafingTickTime, this::setCrafingTickTime)
.syncIntegerValue(this::getFinalTickTime, this::setFinalTickTime)
.syncIntegerValue(this::getSize, this::setSize)
.syncIntegerValue(this::getState, this::setState)
.syncIntegerValue(this::getNeededPower, this::setNeededPower).addInventory().create(this);
}
public int getCoilStatus() {
return this.coilCount;
}
public void setCoilStatus(final int coilStatus) {
this.coilCount = coilStatus;
}
public int getCrafingTickTime() {
return this.crafingTickTime;
}
public void setCrafingTickTime(final int crafingTickTime) {
this.crafingTickTime = crafingTickTime;
}
public int getFinalTickTime() {
return this.finalTickTime;
}
public void setFinalTickTime(final int finalTickTime) {
this.finalTickTime = finalTickTime;
}
public int getNeededPower() {
return this.neededPower;
}
public void setNeededPower(final int neededPower) {
this.neededPower = neededPower;
}
public int getSize() {
return size;
}
public void setSize(int size) {
this.size = size;
}
public void changeSize(int sizeDelta){
int newSize = size + sizeDelta;
this.size = Math.max(6, Math.min(maxCoilSize, newSize));
}
public int getState(){
if(currentRecipe == null ){
return 0; //No Recipe
}
if(!hasStartedCrafting){
return 1; //Waiting on power
}
if(hasStartedCrafting){
return 2; //Crafting
}
return -1;
}
public void setState(int state){
this.state = state;
}
public String getStateString(){
if(state == -1){
return "";
} else if (state == 0){
return "No recipe";
} else if (state == 1){
return "Charging";
} else if (state == 2){
return "Crafting";
}
return "";
}
}
|
package net.finmath.experiments.concurrency;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Arrays;
import java.util.stream.IntStream;
public class NestedParallelForEachBenchmark {
final NumberFormat formatter2 = new DecimalFormat("0.00");
final int numberOfWarmUpLoops = 20;
final int numberOfBenchmarkLoops = 20;
final int numberOfTasksInOuterLoop = 24;
final int numberOfTasksInInnerLoop = 10;
final int concurrentExecutionsLimitForStreams = 2; // java.util.concurrent.ForkJoinPool.common.parallelism
final int outerLoopOverheadFactor = 100000;
final long calculationTaskFactor = 100; // You might need to calibrate this for your machine
// Array where we store calculation results - this is just to prevent the JVM to optimize the task away
final double[] results = new double[numberOfTasksInOuterLoop * numberOfTasksInInnerLoop];
public static void main(String[] args) {
(new NestedParallelForEachBenchmark()).testNestedLoops();
}
public NestedParallelForEachBenchmark() {
super();
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism",Integer.toString(concurrentExecutionsLimitForStreams));
System.out.println("java.util.concurrent.ForkJoinPool.common.parallelism = " + System.getProperty("java.util.concurrent.ForkJoinPool.common.parallelism"));
}
public void testNestedLoops() {
System.out.println("We perfrom three different tests of Java streams (parallel) forEach loops.");
System.out.println("Each test takes around 1 minute and is repeated " + (numberOfWarmUpLoops + numberOfBenchmarkLoops) + " times.");
System.out.println("Please be patient (we print a '.' after each run).");
System.out.println("Note: You may like to check cpu usage for each test.");
System.out.println("");
System.out.print("Test 1 (inner loop parallel but wrapped in thread)_:");
String timeWithInnerParallelButWrappedInThread = timeAction(() -> timeNestedLoopWithInnerParallelButWrappedInThread());
System.out.print("Test 2 (inner loop sequential)_____________________:");
String timeForInnerSequential = timeAction(() -> timeNestedLoopWithInnerSequential());
System.out.print("Test 3 (inner loop parallel)_______________________:");
String timeForInnerParallel = timeAction(() -> timeNestedLoopWithInnerParallel());
System.out.println("");
System.out.println("Results:");
System.out.println("time for inner loop parallel but wrapped in thread__= " + timeWithInnerParallelButWrappedInThread);
System.out.println("time for inner loop sequential______________________= " + timeForInnerSequential);
System.out.println("time for inner loop parallel________________________= " + timeForInnerParallel);
}
public void warmUp(Runnable action) {
// Some warm up
for(int i=0; i<numberOfWarmUpLoops; i++) {
Arrays.fill(results, 0);
System.out.print(".");
action.run();
}
}
public String timeAction(Runnable action) {
warmUp(action);
// Test case
double sum = 0.0;
double sumOfSquared = 0.0;
double max = 0.0;
double min = Double.MAX_VALUE;
for(int i=0; i<numberOfBenchmarkLoops; i++) {
System.out.print(".");
Arrays.fill(results, 0);
long start = System.currentTimeMillis();
action.run();
long end = System.currentTimeMillis();
double seconds = (double)(end-start) / 1000.0;
max = Math.max(max, seconds);
min = Math.min(min, seconds);
sum += seconds;
sumOfSquared += seconds*seconds;
}
System.out.println();
return "" + formatter2.format(sum / numberOfBenchmarkLoops) + " +/- " + formatter2.format(Math.sqrt(sumOfSquared/numberOfBenchmarkLoops - sum * sum / numberOfBenchmarkLoops / numberOfBenchmarkLoops)) + " (min: " + formatter2.format(min) + " , max: " + formatter2.format(max) + ")" ;
}
public void timeNestedLoopWithInnerParallel() {
// Outer loop
IntStream.range(0,numberOfTasksInOuterLoop).parallel().forEach(i -> {
if(i < numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
// Inner loop as parallel: worst case (sequential) it takes 10 * numberOfTasksInInnerLoop millis
IntStream.range(0,numberOfTasksInInnerLoop).parallel().forEach(j -> {
results[i * numberOfTasksInInnerLoop + j] += burnTime(1);
});
if(i >= numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
});
}
public void timeNestedLoopWithInnerSequential() {
// Outer loop
IntStream.range(0,numberOfTasksInOuterLoop).parallel().forEach(i -> {
if(i < numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
// Inner loop as parallel: worst case (sequential) it takes 10 * numberOfTasksInInnerLoop millis
IntStream.range(0,numberOfTasksInInnerLoop).sequential().forEach(j -> {
results[i * numberOfTasksInInnerLoop + j] += burnTime(1);
});
if(i >= numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
});
}
public void timeNestedLoopWithInnerParallelButWrappedInThread() {
// Outer loop
IntStream.range(0,numberOfTasksInOuterLoop).parallel().forEach(i -> {
if(i < numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
// Inner loop as parallel: worst case (sequential) it takes 10 * numberOfTasksInInnerLoop millis
wrapInThread(() ->
IntStream.range(0,numberOfTasksInInnerLoop).parallel().forEach(j -> {
results[i * numberOfTasksInInnerLoop + j] += burnTime(1);
})
);
if(i >= numberOfTasksInOuterLoop/2) results[i * numberOfTasksInInnerLoop] += burnTime(outerLoopOverheadFactor);
});
}
private double burnTime(long millis) {
double x = 0;
/* try {
Thread.sleep(millis);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
*/ for(long i=0; i<millis*calculationTaskFactor; i++) {
// We use a random number generator here, to prevent some optimization by the JVM
x += Math.cos(i*0.0023*Math.random());
}
return x/calculationTaskFactor;
}
private void wrapInThread(Runnable runnable) {
Thread t = new Thread(runnable, "Wrapper Thread");
try {
t.start();
t.join();
} catch (InterruptedException e) { }
}
}
|
package us.kbase.cmonkey;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import us.kbase.auth.AuthException;
import us.kbase.auth.AuthToken;
import us.kbase.auth.TokenFormatException;
import us.kbase.common.service.JsonClientException;
import us.kbase.common.service.Tuple11;
import us.kbase.common.service.UObject;
import us.kbase.common.service.UnauthorizedException;
import us.kbase.idserverapi.IDServerAPIClient;
import us.kbase.userandjobstate.InitProgress;
import us.kbase.userandjobstate.Results;
import us.kbase.userandjobstate.UserAndJobStateClient;
import us.kbase.workspaceservice.GetObjectOutput;
import us.kbase.workspaceservice.GetObjectParams;
import us.kbase.workspaceservice.ObjectData;
import us.kbase.workspaceservice.SaveObjectParams;
import us.kbase.workspaceservice.WorkspaceServiceClient;
public class CmonkeyServerImpl {
// private static Integer tempFileId = 0;
private static final String JOB_PATH = "/var/tmp/cmonkey/";
// private static final String CMONKEY_COMMAND = "cmonkey-python";
private static final String CMONKEY_DIR = "/kb/runtime/cmonkey-python/";
private static final String CMONKEY_COMMAND = "/kb/runtime/cmonkey-python/cmonkey.py";
private static final String DATA_PATH = "/etc/cmonkey-python/KEGG_taxonomy";
private static final String ID_SERVICE_URL = "http://kbase.us/services/idserver";
private static final String WS_SERVICE_URL = "http://kbase.us/services/workspace";
private static final String JOB_SERVICE_URL = "http://140.221.84.180:7083";
private static WorkspaceServiceClient _wsClient = null;
private static UserAndJobStateClient _jobClient = null;
private static Date date = new Date();
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
private static boolean awe = false;
protected static WorkspaceServiceClient wsClient(String token) throws TokenFormatException, UnauthorizedException, IOException{
if(_wsClient == null)
{
URL workspaceClientUrl = new URL (WS_SERVICE_URL);
AuthToken authToken = new AuthToken(token);
_wsClient = new WorkspaceServiceClient(workspaceClientUrl, authToken);
_wsClient.setAuthAllowedForHttp(true);
}
return _wsClient;
}
protected static UserAndJobStateClient jobClient(String token) throws UnauthorizedException, IOException, AuthException {
if(_jobClient == null)
{
URL jobServiceUrl = new URL (JOB_SERVICE_URL);
AuthToken authToken = new AuthToken(token);
_jobClient = new UserAndJobStateClient (jobServiceUrl, authToken);
_jobClient.setAuthAllowedForHttp(true);
}
return _jobClient;
}
protected static void cleanUpOnStart () {
try {
Runtime.getRuntime().exec("rm -r "+JOB_PATH+"*");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static CmonkeyRunResult buildCmonkeyNetwork(ExpressionDataSeries expressionDataSeries, CmonkeyRunParameters params, String jobId, String token, String currentDir) throws Exception{
CmonkeyRunResult cmonkeyRunResult = new CmonkeyRunResult();
cmonkeyRunResult.setId(getKbaseId("CmonkeyRunResult"));
String jobPath = null;
if (currentDir == null) {
jobPath = JOB_PATH + jobId + "/";
new File(jobPath).mkdir();
} else {
jobPath = currentDir + "/"+ jobId + "/";
awe = true;
}
//prepare input
String inputTable = getInputTable(expressionDataSeries);
FileWriter writer = new FileWriter(jobPath+"serveroutput.txt");
writer.write(inputTable);
writer.flush();
//check list of genes
String organismName = getOrganismName(expressionDataSeries);
writer.write("Organism name = " + organismName + "\n");
writer.flush();
String organismCode = getKeggCode(organismName);
writer.write("Organism code = " + organismCode + "\n");
writer.flush();
//save input file
writeInputFile (jobPath+"input.txt", inputTable);
//generate command line
String commandLine = generateCmonkeyCommandLine (jobPath, params, organismCode);
writer.write(commandLine + "\n");
writer.flush();
//run
if (jobId != null) updateJobProgress (jobId, "Input prepared. Starting cMonkey program...", token);
Integer exitVal = executeCommand (commandLine, jobPath, jobId, token);
if (exitVal != null) {
writer.write("ExitValue: " + exitVal.toString() + "\n");
writer.flush();
} else {
writer.write("ExitValue: null\n");
writer.flush();
}
//parse results
if (jobId != null) updateJobProgress (jobId, "cMonkey finished. Processing output...", token);
String sqlFile=jobPath+"out/cmonkey_run.db";
writer.write(sqlFile + "\n");
writer.flush();
parseCmonkeySql(sqlFile, cmonkeyRunResult);
String resultId = getKbaseId("CmonkeyRunResult");
writer.write(resultId + "\n");
cmonkeyRunResult.setId(resultId);
writer.close();
//clean up
if (awe == false) {
Runtime.getRuntime().exec("rm -r " + jobPath);
Runtime.getRuntime().exec("rm " + JOB_PATH + "cmonkey-checkpoint*");
}
return cmonkeyRunResult;
}
public static void buildCmonkeyNetworkJobFromWs (String wsId, String seriesId, CmonkeyRunParameters params, String jobId, String token, String currentDir) throws Exception {
String desc = "Cmonkey service job. Method: buildCmonkeyNetworkJobFromWs. Input: " + seriesId + ". Workspace: " + wsId + ".";
if (jobId != null) startJob (jobId, desc, 23L, token.toString());
GetObjectParams objectParams = new GetObjectParams().withType("ExpressionDataSeries").withId(seriesId).withWorkspace(wsId).withAuth(token.toString());
GetObjectOutput output = wsClient(token.toString()).getObject(objectParams);
ExpressionDataSeries input = UObject.transformObjectToObject(output.getData(), ExpressionDataSeries.class);
CmonkeyRunResult runResult = buildCmonkeyNetwork(input, params, jobId, token, currentDir);
saveObjectToWorkspace (UObject.transformObjectToObject(runResult, UObject.class), runResult.getClass().getSimpleName(), wsId, runResult.getId(), token.toString());
if (jobId != null) finishJob (jobId, wsId, runResult.getId(), token.toString());
}
protected static String generateCmonkeyCommandLine (String jobPath, CmonkeyRunParameters params, String organismCode) {
String outputDirectory = jobPath+"out";
String cacheDirectory = jobPath+"cache";
String inputFile = jobPath+"input.txt";
String commandLine = CMONKEY_COMMAND + " --organism "+ organismCode +" --ratios "+inputFile+" --out "+outputDirectory+" --cachedir "+cacheDirectory;// + " --config " + CONFIG_PATH;
//Set options
if (params.getNoMotifs() == 1L) {
commandLine += " --nomotifs";
}
if (params.getNoNetworks() == 1L) {
commandLine += " --nonetworks";
}
if (params.getNoOperons() == 1L) {
commandLine += " --nooperons";
}
if (params.getNoString() == 1L) {
commandLine += " --nostring";
}
return commandLine;
}
protected static void startJob (String jobId, String desc, Long tasks, String token) {
String status = "cmonkey service job started. Preparing input...";
InitProgress initProgress = new InitProgress();
initProgress.setPtype("task");
initProgress.setMax(tasks);
date.setTime(date.getTime()+108000000L);
try {
//System.out.println(dateFormat.format(date));
jobClient(token).startJob(jobId, token, status, desc, initProgress, dateFormat.format(date));
} catch (JsonClientException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AuthException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected static void updateJobProgress (String jobId, String status, String token){
try {
date.setTime(date.getTime()+1000000L);
jobClient(token).updateJobProgress(jobId, token, status, 1L, dateFormat.format(date));
} catch (JsonClientException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AuthException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected static void finishJob (String jobId, String wsId, String objectId, String token){
try {
String status = "Finished";
String error = null;
Results res = new Results();
List<String> workspaceIds = new ArrayList<String>();
workspaceIds.add(wsId + "/" + objectId);
res.setWorkspaceids(workspaceIds);
jobClient(token).completeJob(jobId, token, status, error, res);
} catch (JsonClientException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AuthException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected static String getKbaseId(String entityType) throws Exception {
String returnVal = null;
URL idServerUrl = new URL(ID_SERVICE_URL);
IDServerAPIClient idClient = new IDServerAPIClient(idServerUrl);
if (entityType.equals("CmonkeyRunResult")) {
returnVal = "kb|cmonkeyrunresult." + idClient.allocateIdRange("cmonkeyrunresult", 1L).toString();
} else if (entityType.equals("CmonkeyNetwork")) {
returnVal = "kb|cmonkeynetwork." + idClient.allocateIdRange("cmonkeynetwork", 1L).toString();
} else if (entityType.equals("CmonkeyCluster")) {
returnVal = "kb|cmonkeycluster." + idClient.allocateIdRange("cmonkeycluster", 1L).toString();
} else if (entityType.equals("CmonkeyMotif")) {
returnVal = "kb|cmonkeymotif." + idClient.allocateIdRange("cmonkeymotif", 1L).toString();
} else if (entityType.equals("MastHit")) {
returnVal = "kb|masthit." + idClient.allocateIdRange("masthit", 1L).toString();
} else if (entityType.equals("ExpressionDataSeries")) {
returnVal = "kb|expressiondataseries." + idClient.allocateIdRange("expressiondataseries", 1L).toString();
} else {
System.out.println("ID requested for unknown type " + entityType);
}
return returnVal;
}
protected static String getOrganismName (ExpressionDataSeries series) throws Exception {
String organismName = null;
Microbesonline microbesonline = new Microbesonline();
List<String> geneNames = new ArrayList<String>();
for(ExpressionDataSample set:series.getSamples()){
for(ExpressionDataPoint point:set.getPoints()){
geneNames.add(point.getGeneId());
}
}
geneNames = new ArrayList<String>(new HashSet<String>(geneNames));
for (int i = 0; i < geneNames.size(); i++){
try {
organismName = microbesonline.getGenomeForGene(geneNames.get(i));
} catch (Exception e) {
// TODO Auto-generated catch block
System.out.println(geneNames.get(i));
e.printStackTrace();
}
if (organismName != null) {
break;
}
}
if (organismName == null) {
throw new Exception("Organism name cannot be identified");
}
for (String geneName:geneNames){
if ((microbesonline.getGenomeForGene(geneName) != null) && (!microbesonline.getGenomeForGene(geneName).equals(organismName))){
throw new Exception("Genes in input data series belong to different organisms");
}
}
return organismName;
}
protected static String getKeggCode(String organismName) {
String result = null;
try {
String line = null;
BufferedReader br = new BufferedReader(new FileReader(DATA_PATH));
while ((line = br.readLine()) != null) {
if ((line.equals("")) || (line.matches("
//do nothing
} else{
String[] fields = line.split("\t");
if (fields[3].equals(organismName)){
result = fields[1];
}
}
}
br.close();
} catch (IOException e) {
System.out.println(e.getLocalizedMessage());
}
return result;
}
protected static String getInputTable(ExpressionDataSeries series){
String result = "GENE";
List<HashMap<String, Double>> dataCollection = new ArrayList<HashMap<String, Double>>();
//make list of conditions
for(ExpressionDataSample sample:series.getSamples()){
result+="\t"+sample.getId();
HashMap<String, Double> dataSet= new HashMap<String, Double>();
for (ExpressionDataPoint point:sample.getPoints()){
dataSet.put(point.getGeneId(), point.getExpressionValue());
}
dataCollection.add(dataSet);
}
//make list of genes
List<String> geneNames=new ArrayList<String>();
for(ExpressionDataSample sample:series.getSamples()){
for(ExpressionDataPoint point:sample.getPoints()){
geneNames.add(point.getGeneId());
}
}
List<String> uniqueGeneNames = new ArrayList<String>(new HashSet<String>(geneNames));
for(String geneName:uniqueGeneNames){
result+="\n"+geneName;
DecimalFormat df = new DecimalFormat("0.000");
for (HashMap<String, Double> dataSetHashmap: dataCollection){
if (dataSetHashmap.containsKey(geneName)){
if (dataSetHashmap.get(geneName).toString().matches("-.*")){
result+="\t"+ df.format(dataSetHashmap.get(geneName));
} else {
result+="\t "+ df.format(dataSetHashmap.get(geneName));
}
} else {
result+="\tNA";
}
}
}
result+="\n";
return result;
}
protected static void writeInputFile (String inputFileName, String input){
BufferedWriter writer = null;
try {
writer = new BufferedWriter(new FileWriter(inputFileName));
writer.write(input);
} catch (IOException e) {
System.out.println(e.getLocalizedMessage());
} finally {
try {
if (writer != null)
writer.close();
} catch (IOException e) {
System.out.println(e.getLocalizedMessage());
}
}
}
protected static Integer executeCommand(String commandLine, String jobPath) throws InterruptedException {
Integer exitVal = executeCommand (commandLine, jobPath, null, null);
return exitVal;
}
protected static Integer executeCommand(String commandLine, String jobPath, String jobId, String token) throws InterruptedException {
Integer exitVal = null;
try {
Process p = Runtime.getRuntime().exec(commandLine, null, new File(CMONKEY_DIR));
StreamGobbler errorGobbler = new StreamGobbler(p.getErrorStream(), "ERROR", jobId, token, jobPath+"errorlog.txt");
// any output?
StreamGobbler outputGobbler = new StreamGobbler(p.getInputStream(), "OUTPUT", jobId, token, jobPath+"out.txt");
// kick them off
errorGobbler.start();
outputGobbler.start();
// any error???
exitVal = p.waitFor();
System.out.println("ExitValue: " + exitVal);
} catch (IOException e) {
System.out.println(e.getLocalizedMessage());
} finally {
}
return exitVal;
}
protected static void parseCmonkeySql(String sqlFile, CmonkeyRunResult cmonkeyRunResult) throws Exception{
CmonkeySqlite database = new CmonkeySqlite(sqlFile);
database.buildCmonkeyRunResult(cmonkeyRunResult);
database.disconnect();
}
protected static void saveObjectToWorkspace (UObject object, String type, String workspaceName, String id, String token) throws Exception {
ObjectData objectData = UObject.transformObjectToObject(object, ObjectData.class);
SaveObjectParams params = new SaveObjectParams();
params.setAuth(token);
params.setCompressed(0L);
params.setData(objectData);
params.setId(id);
params.setJson(0L);
params.setType(type);
Map<String, String> metadata = new HashMap<String, String>();
params.setMetadata(metadata);
params.setWorkspace(workspaceName);
Tuple11<String, String, String, Long, String, String, String, String, String, String, Map<String,String>> ret = wsClient(token).saveObject(params);
System.out.println("Saving object:");
System.out.println(ret.getE1());
/* System.out.println(ret.getE2());
System.out.println(ret.getE3());
System.out.println(ret.getE4());
System.out.println(ret.getE5());
System.out.println(ret.getE6());
System.out.println(ret.getE7());
System.out.println(ret.getE8());
System.out.println(ret.getE9());
System.out.println(ret.getE10());
System.out.println(ret.getE11());
*/
}
}
|
package net.java.sip.communicator.impl.gui.lookandfeel;
import java.util.*;
import java.util.List;
import java.awt.*;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.plaf.metal.*;
import net.java.sip.communicator.impl.gui.utils.*;
/**
* SipCommunicator default theme.
*
* @author Yana Stamcheva
*/
public class SIPCommDefaultTheme
extends DefaultMetalTheme
{
private static final ColorUIResource DARK_BLUE
= new ColorUIResource(0x3f6296);
private static final ColorUIResource DARK_GRAY
= new ColorUIResource(0x474747);
private static final ColorUIResource LIGHT_BLUE
= new ColorUIResource(0x96A9C6);
private static final ColorUIResource LIGHT_BLUE_GRAY
= new ColorUIResource(0xD3DDED);
private static final ColorUIResource LIGHT_GRAY
= new ColorUIResource(0xEEEEEE);
private static final ColorUIResource GRAY
= new ColorUIResource(0xC0C0C0);
private static final ColorUIResource VERY_LIGHT_GRAY
= new ColorUIResource(0xFAFAFA);
private static final ColorUIResource CONTROL_TEXT_COLOR
= new ColorUIResource(0x333333);
private static final ColorUIResource INACTIVE_CONTROL_TEXT_COLOR
= new ColorUIResource(0x999999);
private static final ColorUIResource MENU_DISABLED_FOREGROUND
= new ColorUIResource(0x999999);
private static final ColorUIResource OCEAN_BLACK
= new ColorUIResource(0x333333);
private static final FontUIResource BASIC_FONT
= new FontUIResource(Constants.FONT);
public SIPCommDefaultTheme() {
}
public void addCustomEntriesToTable(UIDefaults table) {
List buttonGradient = Arrays.asList(
new Object[] {new Float(.3f), new Float(0f),
DARK_BLUE, getWhite(), getSecondary2()});
List sliderGradient = Arrays.asList(new Object[] {
new Float(.3f), new Float(.2f),
DARK_BLUE, getWhite(), new ColorUIResource(LIGHT_BLUE_GRAY) });
Object textFieldBorder = SIPCommBorders.getTextFieldBorder();
Object[] defaults = new Object[] {
"Button.rollover", Boolean.TRUE,
"CheckBox.rollover", Boolean.TRUE,
"CheckBox.gradient", buttonGradient,
"CheckBoxMenuItem.gradient", buttonGradient,
"Menu.opaque", Boolean.FALSE,
"MenuBar.border", null,
"Menu.borderPainted", Boolean.FALSE,
"Menu.border", textFieldBorder,
"Menu.selectionBackground", LIGHT_GRAY,
"Menu.selectionForeground", DARK_GRAY,
"Menu.margin", new InsetsUIResource(0, 0, 0, 0),
"MenuItem.borderPainted", Boolean.FALSE,
"MenuItem.border", textFieldBorder,
"MenuItem.selectionBackground", LIGHT_GRAY,
"MenuItem.selectionForeground", DARK_GRAY,
"CheckBoxMenuItem.borderPainted", Boolean.FALSE,
"CheckBoxMenuItem.border", textFieldBorder,
"CheckBoxMenuItem.selectionBackground", LIGHT_GRAY,
"CheckBoxMenuItem.selectionForeground", DARK_GRAY,
"InternalFrame.activeTitleGradient", buttonGradient,
"OptionPane.warningIcon",
new ImageIcon(ImageLoader.getImage(ImageLoader.WARNING_ICON)),
"OptionPane.errorIcon",
new ImageIcon(ImageLoader.getImage(ImageLoader.ERROR_ICON)),
"OptionPane.infoIcon",
new ImageIcon(ImageLoader.getImage(ImageLoader.INFO_ICON)),
"RadioButton.gradient", buttonGradient,
"RadioButton.rollover", Boolean.TRUE,
"RadioButtonMenuItem.gradient", buttonGradient,
"Slider.altTrackColor", new ColorUIResource(0xD2E2EF),
"Slider.gradient", sliderGradient,
"Slider.focusGradient", sliderGradient,
"SplitPane.oneTouchButtonsOpaque", Boolean.FALSE,
"SplitPane.dividerFocusColor", LIGHT_BLUE_GRAY,
"SplitPane.dividerSize", new Integer(5),
"ScrollBar.width", new Integer(12),
"ScrollBar.horizontalThumbIcon",
ImageLoader.getImage(
ImageLoader.SCROLLBAR_THUMB_HORIZONTAL),
"ScrollBar.verticalThumbIcon",
ImageLoader.getImage(
ImageLoader.SCROLLBAR_THUMB_VERTICAL),
"ScrollBar.horizontalThumbHandleIcon",
ImageLoader.getImage(
ImageLoader.SCROLLBAR_THUMB_HANDLE_HORIZONTAL),
"ScrollBar.verticalThumbHandleIcon",
ImageLoader.getImage(
ImageLoader.SCROLLBAR_THUMB_HANDLE_VERTICAL),
"ScrollBar.trackHighlight", GRAY,
"ScrollBar.highlight", LIGHT_GRAY,
"ScrollBar.darkShadow", GRAY,
"TabbedPane.borderHightlightColor", LIGHT_BLUE,
"TabbedPane.contentBorderInsets", new Insets(2, 2, 3, 3),
"TabbedPane.selected", LIGHT_GRAY,
"TabbedPane.tabAreaInsets", new Insets(2, 2, 0, 6),
"TabbedPane.unselectedBackground", LIGHT_GRAY,
"TextField.border", textFieldBorder,
"TextField.margin", new InsetsUIResource(3, 3, 3, 3),
"PasswordField.border", textFieldBorder,
"PasswordField.margin", new InsetsUIResource(3, 3, 3, 3),
"FormattedTextField.border", textFieldBorder,
"FormattedTextField.margin", new InsetsUIResource(3, 3, 3, 3),
"Table.gridColor", LIGHT_BLUE_GRAY,
"Table.background", getDesktopColor(),
"ToggleButton.gradient", buttonGradient,
"ToolBar.isRollover", Boolean.TRUE,
"ToolBar.separatorColor", Constants.LIGHT_GRAY_COLOR,
"ToolBar.separatorSize", new DimensionUIResource(8, 22),
"ToolTip.background", LIGHT_GRAY,
"ToolTip.backgroundInactive", LIGHT_GRAY,
"ToolTip.hideAccelerator", Boolean.FALSE,
"TitledBorder.border", SIPCommBorders.getBoldRoundBorder()
};
table.putDefaults(defaults);
}
/**
* Overriden to enable picking up the system fonts, if applicable.
*/
boolean isSystemTheme() {
return true;
}
public String getName() {
return "SipCommunicator";
}
protected ColorUIResource getPrimary1() {
return DARK_BLUE;
}
protected ColorUIResource getPrimary2() {
return LIGHT_BLUE;
}
protected ColorUIResource getPrimary3() {
return GRAY;
}
protected ColorUIResource getSecondary1() {
return DARK_BLUE;
}
protected ColorUIResource getSecondary2() {
return GRAY;
}
protected ColorUIResource getSecondary3() {
return LIGHT_GRAY;
}
protected ColorUIResource getBlack() {
return OCEAN_BLACK;
}
public ColorUIResource getDesktopColor() {
return VERY_LIGHT_GRAY;
}
public ColorUIResource getWindowBackground() {
return getWhite();
}
public ColorUIResource getControl(){
return VERY_LIGHT_GRAY;
}
public ColorUIResource getMenuBackground(){
return VERY_LIGHT_GRAY;
}
public ColorUIResource getInactiveControlTextColor() {
return INACTIVE_CONTROL_TEXT_COLOR;
}
public ColorUIResource getControlTextColor() {
return CONTROL_TEXT_COLOR;
}
public ColorUIResource getMenuDisabledForeground() {
return MENU_DISABLED_FOREGROUND;
}
public FontUIResource getControlTextFont() {
return BASIC_FONT;
}
public FontUIResource getSystemTextFont() {
return BASIC_FONT;
}
public FontUIResource getUserTextFont() {
return BASIC_FONT;
}
public FontUIResource getMenuTextFont() {
return BASIC_FONT;
}
public FontUIResource getWindowTitleFont() {
return BASIC_FONT;
}
public FontUIResource getSubTextFont() {
return BASIC_FONT;
}
}
|
package us.kbase.scripts;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.text.MessageFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig.Feature;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import us.kbase.scripts.util.ProcessHelper;
import com.googlecode.jsonschema2pojo.DefaultGenerationConfig;
import com.googlecode.jsonschema2pojo.Jackson1Annotator;
import com.googlecode.jsonschema2pojo.SchemaGenerator;
import com.googlecode.jsonschema2pojo.SchemaMapper;
import com.googlecode.jsonschema2pojo.SchemaStore;
import com.googlecode.jsonschema2pojo.rules.Rule;
import com.googlecode.jsonschema2pojo.rules.RuleFactory;
import com.sun.codemodel.JCodeModel;
import com.sun.codemodel.JPackage;
import com.sun.codemodel.JType;
public class JavaTypeGenerator {
private static final char[] propWordDelim = {'_', '-'};
private static final String utilPackage = "us.kbase";
private static final String HEADER = "HEADER";
private static final String CLSHEADER = "CLASS_HEADER";
private static final String CONSTRUCTOR = "CONSTRUCTOR";
private static final String METHOD = "METHOD_";
private static final Pattern PAT_HEADER = Pattern.compile(
".*//BEGIN_HEADER\n(.*)//END_HEADER\n.*", Pattern.DOTALL);
private static final Pattern PAT_CLASS_HEADER = Pattern.compile(
".*//BEGIN_CLASS_HEADER\n(.*) //END_CLASS_HEADER\n.*", Pattern.DOTALL);
private static final Pattern PAT_CONSTRUCTOR = Pattern.compile(
".*//BEGIN_CONSTRUCTOR\n(.*) //END_CONSTRUCTOR\n.*", Pattern.DOTALL);
public static void main(String[] args) throws Exception {
Args parsedArgs = new Args();
CmdLineParser parser = new CmdLineParser(parsedArgs);
parser.setUsageWidth(85);
try {
parser.parseArgument(args);
} catch( CmdLineException e ) {
String message = e.getMessage();
showUsage(parser, message);
return;
}
File inputFile = parsedArgs.specFile;
File tempDir = parsedArgs.tempDir == null ? inputFile.getAbsoluteFile().getParentFile() : new File(parsedArgs.tempDir);
boolean deleteTempDir = false;
if (!tempDir.exists()) {
tempDir.mkdir();
deleteTempDir = true;
}
File srcOutDir = null;
String packageParent = parsedArgs.packageParent;
File libDir = null;
if (parsedArgs.outputDir == null) {
if (parsedArgs.srcDir == null) {
showUsage(parser, "Either -o or -s parameter should be defined");
return;
}
srcOutDir = new File(parsedArgs.srcDir);
libDir = parsedArgs.libDir == null ? null : new File(parsedArgs.libDir);
} else {
srcOutDir = new File(parsedArgs.outputDir, "src");
libDir = new File(parsedArgs.outputDir, "lib");
}
boolean createServer = parsedArgs.createServerSide;
processSpec(inputFile, tempDir, srcOutDir, packageParent, createServer, libDir, parsedArgs.gwtPackage);
if (deleteTempDir)
tempDir.delete();
}
private static void showUsage(CmdLineParser parser, String message) {
System.err.println(message);
System.err.println("Usage: <program> [options...] <spec-file>");
parser.printUsage(System.err);
}
public static JavaData processSpec(File specFile, File tempDir, File srcOutDir, String packageParent,
boolean createServer, File libOutDir, String gwtPackage) throws Exception {
return processJson(transformSpecToJson(specFile, tempDir), new File(tempDir, "json-schemas"),
srcOutDir, packageParent, createServer, libOutDir, gwtPackage);
}
public static File transformSpecToJson(File specFile, File tempDir) throws Exception {
File bashFile = new File(tempDir, "comp_server.sh");
File serverOutDir = new File(tempDir, "server_out");
serverOutDir.mkdir();
File specDir = specFile.getAbsoluteFile().getParentFile();
File retFile = new File(tempDir, "jsync_parsing_file.json");
File outFile = new File(tempDir, "comp.out");
File errFile = new File(tempDir, "comp.err");
List<String> lines = new ArrayList<String>(Arrays.asList("#!/bin/bash"));
checkEnvVar("KB_TOP", lines, "/kb/deployment");
checkEnvVar("KB_RUNTIME", lines, "/kb/runtime");
checkEnvVar("PATH", lines, "/kb/runtime/bin", "/kb/deployment/bin");
checkEnvVar("PERL5LIB", lines, "/kb/deployment/lib");
lines.add("perl /kb/deployment/plbin/compile_typespec.pl --path \"" + specDir.getAbsolutePath() + "\"" +
" --jsync " + retFile.getName() + " \"" + specFile.getAbsolutePath() + "\" " +
serverOutDir.getName() + " >" + outFile.getName() + " 2>" + errFile.getName()
);
Utils.writeFileLines(lines, bashFile);
ProcessHelper.cmd("bash", bashFile.getCanonicalPath()).exec(tempDir);
File jsyncFile = new File(serverOutDir, retFile.getName());
if (jsyncFile.exists()) {
Utils.writeFileLines(Utils.readFileLines(jsyncFile), retFile);
} else {
List<String> errLines = Utils.readFileLines(errFile);
if (errLines.size() > 1 || (errLines.size() == 1 && errLines.get(0).trim().length() > 0)) {
for (String errLine : errLines)
System.err.println(errLine);
}
}
bashFile.delete();
Utils.deleteRecursively(serverOutDir);
outFile.delete();
errFile.delete();
if (!retFile.exists()) {
throw new IllegalStateException("Parsing file wasn't created, see error lines above for detailes");
}
return retFile;
}
private static void checkEnvVar(String varName, List<String> shellLines, String... partPath) {
String value = System.getenv(varName);
Set<String> paths = new HashSet<String>();
if (value != null) {
String[] parts = value.split(":");
for (String part : parts)
if (part.trim().length() > 0)
paths.add(part.trim());
}
StringBuilder newValue = new StringBuilder();
for (String path : partPath)
if (!paths.contains(path))
newValue.append(path).append(":");
if (newValue.length() > 0)
shellLines.add("export " + varName + "=" + newValue.append("$").append(varName));
}
public static JavaData processJson(File jsonParsingFile, File jsonSchemaOutDir, File srcOutDir, String packageParent,
boolean createServer, File libOutDir, String gwtPackage) throws Exception {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(Feature.INDENT_OUTPUT, true);
Map<?,?> map = mapper.readValue(jsonParsingFile, Map.class);
JSyncProcessor subst = new JSyncProcessor(map);
List<KbService> srvList = KbService.loadFromMap(map, subst);
JavaData data = prepareDataStructures(srvList);
jsonParsingFile.delete();
outputData(data, jsonSchemaOutDir, srcOutDir, packageParent, createServer, libOutDir, gwtPackage);
return data;
}
private static JavaData prepareDataStructures(List<KbService> services) {
Set<JavaType> nonPrimitiveTypes = new TreeSet<JavaType>();
JavaData data = new JavaData();
for (KbService service: services) {
for (KbModule module : service.getModules()) {
List<JavaFunc> funcs = new ArrayList<JavaFunc>();
Set<Integer> tupleTypes = data.getTupleTypes();
for (KbModuleComp comp : module.getModuleComponents()) {
if (comp instanceof KbFuncdef) {
String moduleName = module.getModuleName();
KbFuncdef func = (KbFuncdef)comp;
String funcJavaName = Utils.inCamelCase(func.getName());
List<JavaFuncParam> params = new ArrayList<JavaFuncParam>();
for (KbParameter param : func.getParameters()) {
JavaType type = findBasic(param.getType(), module.getModuleName(), nonPrimitiveTypes, tupleTypes);
params.add(new JavaFuncParam(param, Utils.inCamelCase(param.getName()), type));
}
List<JavaFuncParam> returns = new ArrayList<JavaFuncParam>();
for (KbParameter param : func.getReturnType()) {
JavaType type = findBasic(param.getType(), module.getModuleName(), nonPrimitiveTypes, tupleTypes);
returns.add(new JavaFuncParam(param, param.getName() == null ? null : Utils.inCamelCase(param.getName()), type));
}
JavaType retMultiType = null;
if (returns.size() > 1) {
List<KbType> subTypes = new ArrayList<KbType>();
for (JavaFuncParam retPar : returns)
subTypes.add(retPar.getOriginal().getType());
KbTuple tuple = new KbTuple(subTypes);
retMultiType = new JavaType(null, tuple, moduleName, new ArrayList<KbTypedef>());
for (JavaFuncParam retPar : returns)
retMultiType.addInternalType(retPar.getType());
tupleTypes.add(returns.size());
}
funcs.add(new JavaFunc(moduleName, func, funcJavaName, params, returns, retMultiType));
} else {
findBasic((KbTypedef)comp, module.getModuleName(), nonPrimitiveTypes, tupleTypes);
}
}
data.addModule(module, funcs);
}
}
data.setTypes(nonPrimitiveTypes);
return data;
}
private static void outputData(JavaData data, File jsonOutDir, File srcOutDir, String packageParent,
boolean createServers, File libOutDir, String gwtPackage) throws Exception {
if (!srcOutDir.exists())
srcOutDir.mkdirs();
generatePojos(data, jsonOutDir, srcOutDir, packageParent);
generateTupleClasses(data,srcOutDir, packageParent);
generateClientClass(data, srcOutDir, packageParent);
if (createServers)
generateServerClass(data, srcOutDir, packageParent);
checkUtilityClasses(srcOutDir, createServers);
checkLibs(libOutDir, createServers);
if (gwtPackage != null) {
GwtGenerator.generate(data, srcOutDir, gwtPackage);
checkUtilityClass(srcOutDir, "GwtTransformer");
}
}
private static void generatePojos(JavaData data, File jsonOutDir,
File srcOutDir, String packageParent) throws Exception {
for (JavaType type : data.getTypes()) {
Set<Integer> tupleTypes = data.getTupleTypes();
File dir = new File(jsonOutDir, type.getModuleName());
if (!dir.exists())
dir.mkdirs();
File jsonFile = new File(dir, type.getJavaClassName() + ".json");
writeJsonSchema(jsonFile, packageParent, type, tupleTypes);
}
JCodeModel codeModel = new JCodeModel();
DefaultGenerationConfig cfg = new DefaultGenerationConfig() {
@Override
public char[] getPropertyWordDelimiters() {
return propWordDelim;
}
@Override
public boolean isIncludeHashcodeAndEquals() {
return false;
}
@Override
public boolean isIncludeToString() {
return false;
}
@Override
public boolean isIncludeJsr303Annotations() {
return false;
}
@Override
public boolean isGenerateBuilders() {
return true;
}
};
SchemaStore ss = new SchemaStore();
RuleFactory rf = new RuleFactory(cfg, new Jackson1Annotator(), ss) {
@Override
public Rule<JPackage, JType> getObjectRule() {
return new JsonSchemaToPojoCustomObjectRule(this);
}
};
SchemaGenerator sg = new SchemaGenerator();
SchemaMapper sm = new SchemaMapper(rf, sg);
for (JavaType type : data.getTypes()) {
File jsonFile = new File(new File(jsonOutDir, type.getModuleName()), type.getJavaClassName() + ".json");
URL source = jsonFile.toURI().toURL();
sm.generate(codeModel, type.getJavaClassName(), "", source);
}
codeModel.build(srcOutDir);
Utils.deleteRecursively(jsonOutDir);
}
private static void generateTupleClasses(JavaData data, File srcOutDir, String packageParent) throws Exception {
Set<Integer> tupleTypes = data.getTupleTypes();
if (tupleTypes.size() > 0) {
File utilDir = new File(srcOutDir.getAbsolutePath() + "/" + utilPackage.replace('.', '/'));
if (!utilDir.exists())
utilDir.mkdirs();
for (int tupleType : tupleTypes) {
if (tupleType < 1)
throw new IllegalStateException("Wrong tuple type: " + tupleType);
File tupleFile = new File(utilDir, "Tuple" + tupleType + ".java");
StringBuilder sb = new StringBuilder();
for (int i = 0; i < tupleType; i++) {
if (sb.length() > 0)
sb.append(", ");
sb.append('T').append(i+1);
}
List<String> classLines = new ArrayList<String>(Arrays.asList(
"package " + utilPackage + ";",
"",
"import java.util.HashMap;",
"import java.util.Map;",
"import org.codehaus.jackson.annotate.JsonAnyGetter;",
"import org.codehaus.jackson.annotate.JsonAnySetter;",
"",
"public class Tuple" + tupleType + " <" + sb + "> {"
));
for (int i = 0; i < tupleType; i++) {
classLines.add(" private T" + (i + 1) + " e" + (i + 1) + ";");
}
classLines.add(" private Map<String, Object> additionalProperties = new HashMap<String, Object>();");
for (int i = 0; i < tupleType; i++) {
classLines.addAll(Arrays.asList(
"",
" public T" + (i + 1) + " getE" + (i + 1) + "() {",
" return e" + (i + 1) + ";",
" }",
"",
" public void setE" + (i + 1) + "(T" + (i + 1) + " e" + (i + 1) + ") {",
" this.e" + (i + 1) + " = e" + (i + 1) + ";",
" }",
"",
" public Tuple" + tupleType + "<" + sb + "> withE" + (i + 1) + "(T" + (i + 1) + " e" + (i + 1) + ") {",
" this.e" + (i + 1) + " = e" + (i + 1) + ";",
" return this;",
" }"
));
}
classLines.addAll(Arrays.asList(
"",
" @JsonAnyGetter",
" public Map<String, Object> getAdditionalProperties() {",
" return this.additionalProperties;",
" }",
"",
" @JsonAnySetter",
" public void setAdditionalProperties(String name, Object value) {",
" this.additionalProperties.put(name, value);",
" }",
"}"
));
Utils.writeFileLines(classLines, tupleFile);
}
}
}
private static File getParentSourceDir(File srcOutDir, String packageParent) {
File parentDir = new File(srcOutDir.getAbsolutePath() + "/" + packageParent.replace('.', '/'));
if (!parentDir.exists())
parentDir.mkdirs();
return parentDir;
}
private static void generateClientClass(JavaData data, File srcOutDir, String packageParent) throws Exception {
File parentDir = getParentSourceDir(srcOutDir, packageParent);
for (JavaModule module : data.getModules()) {
File moduleDir = new File(parentDir, module.getModuleName());
if (!moduleDir.exists())
moduleDir.mkdir();
JavaImportHolder model = new JavaImportHolder(packageParent + "." + module.getModuleName());
String clientClassName = Utils.capitalize(module.getModuleName()) + "Client";
File classFile = new File(moduleDir, clientClassName + ".java");
String callerClass = model.ref(utilPackage + ".JsonClientCaller");
boolean anyAuth = false;
for (JavaFunc func : module.getFuncs()) {
if (func.isAuthCouldBeUsed()) {
anyAuth = true;
break;
}
}
List<String> classLines = new ArrayList<String>(Arrays.asList(
"public class " + clientClassName + " {",
" private " + callerClass + " caller;",
"",
" public " + clientClassName + "(String url) throws " + model.ref("java.net.MalformedURLException") + " {",
" caller = new " + callerClass + "(url);",
" }"
));
if (anyAuth) {
classLines.addAll(Arrays.asList(
"",
" public " + clientClassName + "(String url, String token) throws " + model.ref("java.net.MalformedURLException") + ", " + model.ref("java.io.IOException") + " {",
" caller = new " + callerClass + "(url, token);",
" }",
"",
" public " + clientClassName + "(String url, String user, String password) throws " + model.ref("java.net.MalformedURLException") + " {",
" caller = new " + callerClass + "(url, user, password);",
" }"
));
}
if (anyAuth) {
classLines.addAll(Arrays.asList(
"",
" public boolean isAuthAllowedForHttp() {",
" return caller.isAuthAllowedForHttp();",
" }",
"",
" public void setAuthAllowedForHttp(boolean isAuthAllowedForHttp) {",
" caller.setAuthAllowedForHttp(isAuthAllowedForHttp);",
" }"
));
}
for (JavaFunc func : module.getFuncs()) {
JavaType retType = null;
if (func.getRetMultyType() == null) {
if (func.getReturns().size() > 0) {
retType = func.getReturns().get(0).getType();
}
} else {
retType = func.getRetMultyType();
}
StringBuilder funcParams = new StringBuilder();
for (JavaFuncParam param : func.getParams()) {
if (funcParams.length() > 0)
funcParams.append(", ");
funcParams.append(getJType(param.getType(), packageParent, model)).append(" ").append(param.getJavaName());
}
String retTypeName = retType == null ? "void" : getJType(retType, packageParent, model);
String listClass = model.ref("java.util.List");
String arrayListClass = model.ref("java.util.ArrayList");
classLines.add("");
classLines.add(" public " + retTypeName + " " + func.getJavaName() + "(" + funcParams + ") throws Exception {");
classLines.add(" " + listClass + "<Object> args = new " + arrayListClass + "<Object>();");
for (JavaFuncParam param : func.getParams()) {
classLines.add(" args.add(" + param.getJavaName() + ");");
}
String typeReferenceClass = model.ref("org.codehaus.jackson.type.TypeReference");
boolean authRequired = func.isAuthRequired();
boolean needRet = retType != null;
if (func.getRetMultyType() == null) {
if (retType == null) {
String trFull = typeReferenceClass + "<Object>";
classLines.addAll(Arrays.asList(
" " + trFull + " retType = new " + trFull + "() {};",
" caller.jsonrpcCall(\"" + module.getOriginal().getModuleName() + "." + func.getOriginal().getName() + "\", args, retType, " + needRet + ", " + authRequired + ");",
" }"
));
} else {
String trFull = typeReferenceClass + "<" + listClass + "<" + retTypeName + ">>";
classLines.addAll(Arrays.asList(
" " + trFull + " retType = new " + trFull + "() {};",
" " + listClass + "<" + retTypeName + "> res = caller.jsonrpcCall(\"" + module.getOriginal().getModuleName() + "." + func.getOriginal().getName() + "\", args, retType, " + needRet + ", " + authRequired + ");",
" return res.get(0);",
" }"
));
}
} else {
String trFull = typeReferenceClass + "<" + retTypeName + ">";
classLines.addAll(Arrays.asList(
" " + trFull + " retType = new " + trFull + "() {};",
" " + retTypeName + " res = caller.jsonrpcCall(\"" + module.getOriginal().getModuleName() + "." + func.getOriginal().getName() + "\", args, retType, " + needRet + ", " + authRequired + ");",
" return res;",
" }"
));
}
}
classLines.add("}");
List<String> headerLines = new ArrayList<String>(Arrays.asList(
"package " + packageParent + "." + module.getModuleName() + ";",
""
));
headerLines.addAll(model.generateImports());
headerLines.add("");
classLines.addAll(0, headerLines);
Utils.writeFileLines(classLines, classFile);
}
}
private static String backupExtension() {
String ret = ".bak-";
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
return ret + sdf.format(new Date());
}
private static void checkMatch(HashMap<String, String> code, Pattern matcher,
String oldserver, String codekey, String errortype, boolean exceptOnFail)
throws ParseException {
Matcher m = matcher.matcher(oldserver);
if (!m.matches()) {
if (exceptOnFail) {
throw new ParseException("Missing " + errortype +
" in original file", 0);
} else {
return;
}
}
code.put(codekey, m.group(1));
}
private static HashMap<String, String> parsePrevCode(File classFile, List<JavaFunc> funcs)
throws IOException, ParseException {
HashMap<String, String> code = new HashMap<String, String>();
if (!classFile.exists()) {
code.put(HEADER, "");
code.put(CLSHEADER, "");
code.put(CONSTRUCTOR, "");
return code;
}
File backup = new File(classFile.getAbsoluteFile() + backupExtension());
FileUtils.copyFile(classFile, backup);
String oldserver = IOUtils.toString(new FileReader(classFile));
checkMatch(code, PAT_HEADER, oldserver, HEADER, "header", true);
checkMatch(code, PAT_CLASS_HEADER, oldserver, CLSHEADER, "class header", true);
checkMatch(code, PAT_CONSTRUCTOR, oldserver, CONSTRUCTOR, "constructor", true);
for (JavaFunc func: funcs) {
String name = func.getOriginal().getName();
Pattern p = Pattern.compile(MessageFormat.format(
".*//BEGIN {0}\n(.*) //END {0}\n.*", name), Pattern.DOTALL);
checkMatch(code, p, oldserver, METHOD + name, "method " + name, false);
}
return code;
}
private static List<String> splitCodeLines(String code) {
LinkedList<String> l = new LinkedList<String>();
if (code.length() == 0) { //returns empty string otherwise
return l;
}
return Arrays.asList(code.split("\n"));
}
private static void generateServerClass(JavaData data, File srcOutDir, String packageParent) throws Exception {
File parentDir = getParentSourceDir(srcOutDir, packageParent);
for (JavaModule module : data.getModules()) {
File moduleDir = new File(parentDir, module.getModuleName());
if (!moduleDir.exists())
moduleDir.mkdir();
JavaImportHolder model = new JavaImportHolder(packageParent + "." + module.getModuleName());
String serverClassName = Utils.capitalize(module.getModuleName()) + "Server";
File classFile = new File(moduleDir, serverClassName + ".java");
HashMap<String, String> originalCode = parsePrevCode(classFile, module.getFuncs());
List<String> classLines = new ArrayList<String>(Arrays.asList(
"public class " + serverClassName + " extends " + model.ref(utilPackage + ".JsonServerServlet") + " {",
" private static final long serialVersionUID = 1L;",
""
));
classLines.add(" //BEGIN_CLASS_HEADER");
classLines.addAll(splitCodeLines(originalCode.get(CLSHEADER)));
classLines.add(" //END_CLASS_HEADER");
classLines.add("");
classLines.add(" public " + serverClassName + "() throws Exception {");
classLines.add(" //BEGIN_CONSTRUCTOR");
classLines.addAll(splitCodeLines(originalCode.get(CONSTRUCTOR)));
classLines.addAll(Arrays.asList(
" //END_CONSTRUCTOR",
" }"
));
for (JavaFunc func : module.getFuncs()) {
JavaType retType = null;
if (func.getRetMultyType() == null) {
if (func.getReturns().size() > 0) {
retType = func.getReturns().get(0).getType();
}
} else {
retType = func.getRetMultyType();
}
StringBuilder funcParams = new StringBuilder();
for (JavaFuncParam param : func.getParams()) {
if (funcParams.length() > 0)
funcParams.append(", ");
funcParams.append(getJType(param.getType(), packageParent, model)).append(" ").append(param.getJavaName());
}
if (func.isAuthCouldBeUsed()) {
if (funcParams.length() > 0)
funcParams.append(", ");
funcParams.append(model.ref(utilPackage + ".auth.AuthUser")).append(" authPart");;
}
String retTypeName = retType == null ? "void" : getJType(retType, packageParent, model);
classLines.add("");
classLines.add(" @" + model.ref(utilPackage + ".JsonServerMethod") + "(rpc = \"" + module.getOriginal().getModuleName() + "." + func.getOriginal().getName() + "\"" +
(func.getRetMultyType() == null ? "" : ", tuple = true") + (func.isAuthOptional() ? ", authOptional=true" : "") + ")");
classLines.add(" public " + retTypeName + " " + func.getJavaName() + "(" + funcParams + ") throws Exception {");
List<String> funcLines = new LinkedList<String>();
String name = func.getOriginal().getName();
if (originalCode.containsKey(METHOD + name)) {
funcLines.addAll(splitCodeLines(originalCode.get(METHOD + name)));
}
funcLines.add(0, " //BEGIN " + name);
funcLines.add(" //END " + name);
if (func.getRetMultyType() == null) {
if (retType == null) {
classLines.addAll(funcLines);
classLines.add(" }");
} else {
classLines.add(" " + retTypeName + " ret = null;");
classLines.addAll(funcLines);
classLines.addAll(Arrays.asList(
" return ret;",
" }"
));
}
} else {
for (int retPos = 0; retPos < func.getReturns().size(); retPos++) {
String retInnerType = getJType(func.getReturns().get(retPos).getType(), packageParent, model);
classLines.add(" " + retInnerType + " ret" + (retPos + 1) + " = null;");
}
classLines.addAll(funcLines);
classLines.add(" " + retTypeName + " ret = new " + retTypeName + "();");
for (int retPos = 0; retPos < func.getReturns().size(); retPos++) {
classLines.add(" ret.setE" + (retPos + 1) + "(ret" + (retPos + 1) + ");");
}
classLines.add(" return ret;");
classLines.add(" }");
}
}
classLines.addAll(Arrays.asList(
"",
" public static void main(String[] args) throws Exception {",
" if (args.length != 1) {",
" System.out.println(\"Usage: <program> <server_port>\");",
" return;",
" }",
" new " + serverClassName + "().startupServer(Integer.parseInt(args[0]));",
" }",
"}"));
List<String> headerLines = new ArrayList<String>(Arrays.asList(
"package " + packageParent + "." + module.getModuleName() + ";",
""
));
headerLines.addAll(model.generateImports());
headerLines.add("");
headerLines.add("//BEGIN_HEADER");
headerLines.addAll(splitCodeLines(originalCode.get(HEADER)));
headerLines.add("//END_HEADER");
headerLines.add("");
classLines.addAll(0, headerLines);
Utils.writeFileLines(classLines, classFile);
}
}
private static void checkUtilityClasses(File srcOutDir, boolean createServers) throws Exception {
checkUtilityClass(srcOutDir, "JsonClientCaller");
checkUtilityClass(srcOutDir, "JacksonTupleModule");
checkUtilityClass(srcOutDir, "UObject");
if (createServers) {
checkUtilityClass(srcOutDir, "JsonServerMethod");
checkUtilityClass(srcOutDir, "JsonServerServlet");
}
}
private static void checkUtilityClass(File srcOutDir, String className) throws Exception {
File dir = new File(srcOutDir.getAbsolutePath() + "/" + utilPackage.replace('.', '/'));
if (!dir.exists())
dir.mkdirs();
File dstClassFile = new File(dir, className + ".java");
//if (dstClassFile.exists())
// return;
Utils.writeFileLines(Utils.readStreamLines(JavaTypeGenerator.class.getResourceAsStream(
className + ".java.properties")), dstClassFile);
}
private static void checkLibs(File libOutDir, boolean createServers) throws Exception {
if (libOutDir == null)
return;
if (!libOutDir.exists())
libOutDir.mkdirs();
checkLib(libOutDir, "jackson-all-1.9.11");
checkLib(libOutDir, "kbase-auth");
checkLib(libOutDir, "bcpkix-jdk15on-147");
checkLib(libOutDir, "bcprov-ext-jdk15on-147");
if (createServers) {
checkLib(libOutDir, "servlet-api-2.5");
checkLib(libOutDir, "jetty-all-7.0.0");
checkLib(libOutDir, "ini4j-0.5.2");
}
}
private static void checkLib(File libDir, String libName) throws Exception {
String libFileName = libName + ".jar";
InputStream is = JavaTypeGenerator.class.getResourceAsStream(libFileName + ".properties");
OutputStream os = new FileOutputStream(new File(libDir, libFileName));
Utils.copyStreams(is, os);
}
private static void writeJsonSchema(File jsonFile, String packageParent, JavaType type,
Set<Integer> tupleTypes) throws Exception {
LinkedHashMap<String, Object> tree = new LinkedHashMap<String, Object>();
tree.put("$schema", "http://json-schema.org/draft-04/schema
tree.put("id", type.getModuleName() + "." + type.getJavaClassName());
tree.put("description", type.getComment());
tree.put("type", "object");
tree.put("javaType", packageParent + "." + type.getModuleName() + "." + type.getJavaClassName());
if (type.getMainType() instanceof KbMapping) {
JavaType firstInternal = type.getInternalTypes().get(0);
if (!firstInternal.getJavaClassName().equals("String"))
throw new IllegalStateException("Type [" + firstInternal.getOriginalTypeName() + "] " +
"can not be used as map key type");
JavaType subType = type.getInternalTypes().get(1);
LinkedHashMap<String, Object> typeTree = createJsonRefTypeTree(type.getModuleName(), subType,
null, false, packageParent, tupleTypes);
tree.put("additionalProperties", typeTree);
throw new IllegalStateException();
} else {
LinkedHashMap<String, Object> props = new LinkedHashMap<String, Object>();
for (int itemPos = 0; itemPos < type.getInternalTypes().size(); itemPos++) {
JavaType iType = type.getInternalTypes().get(itemPos);
String field = type.getInternalFields().get(itemPos);
props.put(field, createJsonRefTypeTree(type.getModuleName(), iType,
type.getInternalComment(itemPos), false, packageParent, tupleTypes));
}
tree.put("properties", props);
tree.put("additionalProperties", true);
}
ObjectMapper mapper = new ObjectMapper();
mapper.configure(Feature.INDENT_OUTPUT, true);
mapper.writeValue(jsonFile, tree);
}
private static LinkedHashMap<String, Object> createJsonRefTypeTree(String module, JavaType type, String comment,
boolean insideTypeParam, String packageParent, Set<Integer> tupleTypes) {
LinkedHashMap<String, Object> typeTree = new LinkedHashMap<String, Object>();
if (comment != null && comment.trim().length() > 0)
typeTree.put("description", comment);
if (type.needClassGeneration()) {
if (insideTypeParam) {
typeTree.put("type", "object");
typeTree.put("javaType", packageParent + "." + type.getModuleName() + "." + type.getJavaClassName());
} else {
String modulePrefix = type.getModuleName().equals(module) ? "" : ("../" + type.getModuleName() + "/");
typeTree.put("$ref", modulePrefix + type.getJavaClassName() + ".json");
}
} else if (type.getMainType() instanceof KbScalar) {
if (insideTypeParam) {
typeTree.put("type", "object");
typeTree.put("javaType", ((KbScalar)type.getMainType()).getJavaStyleName());
} else {
typeTree.put("type", ((KbScalar)type.getMainType()).getJsonStyleName());
}
} else if (type.getMainType() instanceof KbList) {
LinkedHashMap<String, Object> subType = createJsonRefTypeTree(module, type.getInternalTypes().get(0), null,
insideTypeParam, packageParent, tupleTypes);
if (insideTypeParam) {
typeTree.put("type", "object");
typeTree.put("javaType", "java.util.List");
typeTree.put("javaTypeParams", subType);
} else {
typeTree.put("type", "array");
typeTree.put("items", subType);
}
} else if (type.getMainType() instanceof KbMapping) {
typeTree.put("type", "object");
typeTree.put("javaType", "java.util.Map");
List<LinkedHashMap<String, Object>> subList = new ArrayList<LinkedHashMap<String, Object>>();
for (JavaType iType : type.getInternalTypes())
subList.add(createJsonRefTypeTree(module, iType, null, true, packageParent, tupleTypes));
typeTree.put("javaTypeParams", subList);
} else if (type.getMainType() instanceof KbTuple) {
typeTree.put("type", "object");
int tupleType = type.getInternalTypes().size();
if (tupleType < 1)
throw new IllegalStateException("Wrong count of tuple parameters: " + tupleType);
typeTree.put("javaType", utilPackage + ".Tuple" + tupleType);
tupleTypes.add(tupleType);
List<LinkedHashMap<String, Object>> subList = new ArrayList<LinkedHashMap<String, Object>>();
for (JavaType iType : type.getInternalTypes())
subList.add(createJsonRefTypeTree(module, iType, null, true, packageParent, tupleTypes));
typeTree.put("javaTypeParams", subList);
} else if (type.getMainType() instanceof KbUnspecifiedObject) {
typeTree.put("type", "object");
typeTree.put("javaType", "java.lang.Object");
} else {
throw new IllegalStateException("Unknown type: " + type.getMainType().getClass().getName());
}
return typeTree;
}
private static JavaType findBasic(KbType type, String moduleName, Set<JavaType> nonPrimitiveTypes, Set<Integer> tupleTypes) {
JavaType ret = findBasic(null, type, moduleName, null, new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes);
return ret;
}
private static JavaType findBasic(String typeName, KbType type, String defaultModuleName, String typeModuleName,
List<KbTypedef> aliases, Set<JavaType> nonPrimitiveTypes, Set<Integer> tupleTypes) {
if (type instanceof KbBasicType) {
JavaType ret = new JavaType(typeName, (KbBasicType)type,
typeModuleName == null ? defaultModuleName : typeModuleName, aliases);
if (!(type instanceof KbScalar || type instanceof KbUnspecifiedObject))
if (type instanceof KbStruct) {
for (KbStructItem item : ((KbStruct)type).getItems()) {
ret.addInternalType(findBasic(null, item.getItemType(), defaultModuleName, null,
new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes));
ret.addInternalField(item.getName(), "");
}
} else if (type instanceof KbList) {
ret.addInternalType(findBasic(null, ((KbList)type).getElementType(), defaultModuleName, null,
new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes));
} else if (type instanceof KbMapping) {
ret.addInternalType(findBasic(null, ((KbMapping)type).getKeyType(), defaultModuleName, null,
new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes));
ret.addInternalType(findBasic(null, ((KbMapping)type).getValueType(), defaultModuleName, null,
new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes));
} else if (type instanceof KbTuple) {
tupleTypes.add(((KbTuple)type).getElementTypes().size());
for (KbType iType : ((KbTuple)type).getElementTypes())
ret.addInternalType(findBasic(null, iType, defaultModuleName, null,
new ArrayList<KbTypedef>(), nonPrimitiveTypes, tupleTypes));
} else {
throw new IllegalStateException("Unknown basic type: " + type.getClass().getSimpleName());
}
if (ret.needClassGeneration())
nonPrimitiveTypes.add(ret);
return ret;
} else {
KbTypedef typeRef = (KbTypedef)type;
aliases.add(typeRef);
return findBasic(typeRef.getName(), typeRef.getAliasType(), defaultModuleName, typeRef.getModule(),
aliases, nonPrimitiveTypes, tupleTypes);
}
}
private static String getJType(JavaType type, String packageParent, JavaImportHolder codeModel) throws Exception {
KbBasicType kbt = type.getMainType();
if (type.needClassGeneration()) {
return codeModel.ref(getPackagePrefix(packageParent, type) + type.getJavaClassName());
} else if (kbt instanceof KbScalar) {
return codeModel.ref(((KbScalar)kbt).getFullJavaStyleName());
} else if (kbt instanceof KbList) {
return codeModel.ref("java.util.List") + "<" + getJType(type.getInternalTypes().get(0), packageParent, codeModel) + ">";
} else if (kbt instanceof KbMapping) {
return codeModel.ref("java.util.Map")+ "<" + getJType(type.getInternalTypes().get(0), packageParent, codeModel) + "," +
getJType(type.getInternalTypes().get(1), packageParent, codeModel) + ">";
} else if (kbt instanceof KbTuple) {
int paramCount = type.getInternalTypes().size();
StringBuilder narrowParams = new StringBuilder();
for (JavaType iType : type.getInternalTypes()) {
if (narrowParams.length() > 0)
narrowParams.append(", ");
narrowParams.append(getJType(iType, packageParent, codeModel));
}
return codeModel.ref(utilPackage + "." + "Tuple" + paramCount) + "<" + narrowParams + ">";
} else if (kbt instanceof KbUnspecifiedObject) {
return codeModel.ref("java.lang.Object");
} else {
throw new IllegalStateException("Unknown data type: " + kbt.getClass().getName());
}
}
private static String getPackagePrefix(String packageParent, JavaType type) {
return packageParent + "." + type.getModuleName() + ".";
}
public static class Args {
@Option(name="-o",usage="Output folder (src and lib subfolders will be created), use -s and possibly -l instead of -o for more detailed settings", metaVar="<out-dir>")
String outputDir;
@Option(name="-s",usage="Source output folder (exclusive with -o)", metaVar="<src-dir>")
String srcDir;
@Option(name="-l",usage="Library output folder (exclusive with -o, not required when using -s)", metaVar="<lib-dir>")
String libDir;
@Option(name="-p",usage="Java package parent (module subpackages are created in this package), default value is " + utilPackage, metaVar="<package>")
String packageParent = utilPackage;
@Option(name="-t", usage="Temporary folder, default value is parent folder of <spec-file>", metaVar="<tmp-dir>")
String tempDir;
@Option(name="-S", usage="Defines whether or not java code for server side should be created, default value is false, use -S for true")
boolean createServerSide = false;
@Option(name="-g",usage="Gwt client java package (define it in case you need copies of generated classes for GWT client)", metaVar="<gwtpckg>")
String gwtPackage = null;
@Argument(metaVar="<spec-file>",required=true,usage="File *.spec for compilation into java classes")
File specFile;
}
}
|
package org.pikater.core.ontology.subtrees.batchDescription;
import java.util.ArrayList;
import java.util.List;
import org.pikater.core.ontology.subtrees.batchDescription.durarion.IExpectedDuration;
import org.pikater.core.ontology.subtrees.batchDescription.durarion.LongTermDuration;
import org.pikater.core.ontology.subtrees.batchDescription.durarion.ShortTimeDuration;
import org.pikater.core.ontology.subtrees.newOption.NewOptions;
import org.pikater.core.ontology.subtrees.newOption.base.NewOption;
import org.pikater.core.ontology.subtrees.newOption.base.Value;
import org.pikater.core.ontology.subtrees.newOption.values.IntegerValue;
import org.pikater.core.ontology.subtrees.newOption.values.NullValue;
import org.pikater.core.ontology.subtrees.newOption.values.StringValue;
public class ComputingAgent extends DataProcessing implements IDataProvider, IComputingAgent, IErrorProvider {
private static final long serialVersionUID = 2127755171666013125L;
private String agentType;
private List<NewOption> options;
private Integer model; // null = new model
private IExpectedDuration duration;
private EvaluationMethod evaluationMethod;
private DataSourceDescription trainingData;
private DataSourceDescription testingData;
private DataSourceDescription validationData;
public ComputingAgent() {
this.options = new ArrayList<NewOption>();
this.model = null;
this.duration = new LongTermDuration();
this.evaluationMethod = new EvaluationMethod();
}
public String getAgentType() {
return agentType;
}
public void setAgentType(String agentType) {
this.agentType = agentType;
}
public Integer getModel() {
return model;
}
public void setModel(Integer model) {
this.model = model;
}
public IExpectedDuration getDuration() {
return duration;
}
public void setDuration(IExpectedDuration duration) {
this.duration = duration;
}
public DataSourceDescription getTrainingData() {
return trainingData;
}
public void setTrainingData(DataSourceDescription trainingData) {
if (trainingData == null) {
throw new IllegalArgumentException("Argument trainingData can't be null");
}
DataSourceDescription trainingDataC = new DataSourceDescription();
trainingDataC.setDataInputType("trainingData");
trainingDataC.setDataOutputType(trainingData.getDataOutputType());
trainingDataC.setDataProvider(trainingData.getDataProvider());
this.trainingData = trainingDataC;
}
public DataSourceDescription getTestingData() {
return testingData;
}
public void setTestingData(DataSourceDescription testingData) {
if (testingData == null) {
throw new IllegalArgumentException("Argument testingData can't be null");
}
DataSourceDescription testingDataC = new DataSourceDescription();
testingDataC.setDataInputType("testingData");
testingDataC.setDataOutputType(testingData.getDataOutputType());
testingDataC.setDataProvider(testingData.getDataProvider());
this.testingData = testingDataC;
}
public DataSourceDescription getValidationData() {
return validationData;
}
public void setValidationData(DataSourceDescription validationData) {
if (validationData == null) {
throw new IllegalArgumentException("Argument validationData can't be null");
}
DataSourceDescription validationDataC = new DataSourceDescription();
validationDataC.setDataInputType("validationData");
validationDataC.setDataOutputType(validationData.getDataOutputType());
validationDataC.setDataProvider(validationData.getDataProvider());
this.validationData = validationDataC;
}
public EvaluationMethod getEvaluationMethod() {
return evaluationMethod;
}
public void setEvaluationMethod(EvaluationMethod evaluationMethod) {
this.evaluationMethod = evaluationMethod;
}
public List<NewOption> getOptions() {
return options;
}
public void setOptions(List<NewOption> options) {
if (options == null) {
throw new NullPointerException("Argument options can't be null");
}
this.options = options;
}
public void addOption(NewOption option) {
if (option == null) {
throw new NullPointerException("Argument option can't be null");
}
this.options.add(option);
}
@Override
public List<NewOption> exportAllOptions() {
List<NewOption> options = new ArrayList<NewOption>();
NewOption modelOption = null;
if (model == null) {
modelOption = new NewOption("model", new NullValue());
} else {
modelOption = new NewOption("model", model);
}
options.add(modelOption);
NewOption expectedDurationOption = new NewOption(
"duration", duration.getClass().getSimpleName());
options.add(expectedDurationOption);
options.addAll(this.options);
return options;
}
@Override
public void importAllOptions(List<NewOption> options) {
NewOptions optionsOntol = new NewOptions(options);
//import model
NewOption optModel = optionsOntol.getOptionByName("model");
if (optModel != null) {
Value value = optModel.toSingleValue();
if (value.getCurrentValue() instanceof IntegerValue) {
IntegerValue integerValue = (IntegerValue) value.getCurrentValue();
this.model = integerValue.getValue();
} else if (value.getCurrentValue() instanceof NullValue) {
this.model = null;
} else {
throw new IllegalStateException("Option doesn't contain correct type");
}
}
//import duration
NewOption optDuration = optionsOntol.getOptionByName("duration");
StringValue valueMethod = (StringValue)
optDuration.toSingleValue().getCurrentValue();
if (valueMethod.getValue().equals(LongTermDuration.class.getSimpleName())) {
this.duration = new LongTermDuration();
} else if (valueMethod.getValue().equals(ShortTimeDuration.class.getSimpleName()) ) {
this.duration = new ShortTimeDuration();
} else {
throw new IllegalStateException("Option doesn't contain correct type");
}
options.remove(optModel);
options.remove(optDuration);
this.options = options;
}
@Override
public List<ErrorDescription> exportAllErrors() {
return new ArrayList<ErrorDescription>();
}
@Override
public void importAllErrors(List<ErrorDescription> errors) {
if (errors != null && !errors.isEmpty()) {
throw new IllegalArgumentException("Argument errors can be only null");
}
}
@Override
public List<DataSourceDescription> exportAllDataSourceDescriptions() {
List<DataSourceDescription> slots = new ArrayList<DataSourceDescription>();
if (trainingData != null) {
DataSourceDescription trainingDataC = new DataSourceDescription();
trainingDataC.setDataInputType("trainingData");
trainingDataC.setDataOutputType(trainingData.getDataOutputType());
trainingDataC.setDataProvider(trainingData.getDataProvider());
slots.add(trainingDataC);
}
if (testingData != null) {
DataSourceDescription testingDataC = new DataSourceDescription();
testingDataC.setDataInputType("testingData");
testingDataC.setDataOutputType(testingData.getDataOutputType());
testingDataC.setDataProvider(testingData.getDataProvider());
slots.add(testingDataC);
}
if (validationData != null) {
DataSourceDescription validationDataC = new DataSourceDescription();
validationDataC.setDataInputType("validationData");
validationDataC.setDataOutputType(validationData.getDataOutputType());
validationDataC.setDataProvider(validationData.getDataProvider());
slots.add(validationDataC);
}
if (evaluationMethod != null) {
DataSourceDescription evaluationMethodDataC = new DataSourceDescription();
evaluationMethodDataC.setDataInputType("evaluationMethod");
evaluationMethodDataC.setDataOutputType("evaluationMethod");
evaluationMethodDataC.setDataProvider(evaluationMethod);
slots.add(evaluationMethodDataC);
}
return slots;
}
@Override
public void importAllDataSourceDescriptions(List<DataSourceDescription> dataSourceDescriptions) {
DataSourceDescriptions descriptions =
new DataSourceDescriptions(dataSourceDescriptions);
DataSourceDescription descriptinTrainingData =
descriptions.getDataSourceDescriptionIBynputType("trainingData");
trainingData = descriptinTrainingData;
DataSourceDescription descriptinTestingData =
descriptions.getDataSourceDescriptionIBynputType("testingData");
testingData = descriptinTestingData;
DataSourceDescription descriptinValidationData =
descriptions.getDataSourceDescriptionIBynputType("validationData");
validationData = descriptinValidationData;
DataSourceDescription descriptinevaluationMethodData =
descriptions.getDataSourceDescriptionIBynputType("evaluationMethod");
evaluationMethod = (EvaluationMethod) descriptinevaluationMethodData.getDataProvider();
}
public ComputingAgent clone() {
ComputingAgent comAgentColone = new ComputingAgent();
comAgentColone.setId(this.getId());
comAgentColone.setAgentType(this.getAgentType());
NewOptions optionsOnt = new NewOptions(this.options);
comAgentColone.setOptions(optionsOnt.clone().getOptions());
comAgentColone.setModel(this.model);
comAgentColone.setDuration(this.duration.clone());
comAgentColone.setEvaluationMethod(this.evaluationMethod.clone());
if (this.trainingData != null) {
comAgentColone.setTrainingData(this.trainingData.clone());
}
if (this.testingData != null) {
comAgentColone.setTestingData(this.testingData.clone());
}
if (this.validationData != null) {
comAgentColone.setValidationData(this.validationData.clone());
}
return comAgentColone;
}
}
|
package at.ac.tuwien.inso.integration_tests;
import static junit.framework.TestCase.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
import org.springframework.test.web.servlet.MvcResult;
public class LecturerCoursesTests extends AbstractCoursesTests {
@Test
public void itListsAllCoursesForCurrentSemesterAndLecturer() throws Exception {
mockMvc.perform(
get("/lecturer/courses").with(user(user1))
).andExpect(
model().attribute("allCourses", expectedCoursesForLecturer1)
);
}
@Test
public void allTagsJsonTest() throws Exception {
// given 5 tags
MvcResult result = mockMvc.perform(
get("/lecturer/courses/json/tags")
.with(user(user1))
).andExpect((status().isOk())
).andReturn();
// the response should contain all these tags
assertTrue(result.getResponse().getContentAsString().contains(tag1.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag2.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag3.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag4.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag5.getName()));
}
@Test
public void tagsForCourseJsonTest() throws Exception {
// given 5 tags
// given course ase with 2 of these tags
aseWS2016.addTags(tag3, tag5);
MvcResult result = mockMvc.perform(
get("/lecturer/courses/json/tags")
.with(user(user1))
.param("courseId", aseWS2016.getId().toString())
).andExpect((status().isOk())
).andReturn();
// the response should contain only these 2 tags
assertFalse(result.getResponse().getContentAsString().contains(tag1.getName()));
assertFalse(result.getResponse().getContentAsString().contains(tag2.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag3.getName()));
assertFalse(result.getResponse().getContentAsString().contains(tag4.getName()));
assertTrue(result.getResponse().getContentAsString().contains(tag5.getName()));
}
}
|
package com.akiban.cserver.store;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.akiban.cserver.service.session.UnitTestServiceManagerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.akiban.ais.model.AkibaInformationSchema;
import com.akiban.ais.model.Index;
import com.akiban.ais.model.TableName;
import com.akiban.ais.model.UserTable;
import com.akiban.cserver.InvalidOperationException;
import com.akiban.message.ErrorCode;
import com.akiban.util.Strings;
public final class PersistitStoreSchemaManagerTest {
private final static String SCHEMA = "my_schema";
private final static Pattern REGEX = Pattern.compile("CREATE TABLE `(\\w+)`\\.(\\w+)");
private PersistitStore store;
PersistitStoreSchemaManager manager;
@Before
public void setUp() throws Exception {
store = UnitTestServiceManagerFactory.getStoreForUnitTests();
manager = store.getSchemaManager();
assertEquals("user tables in AIS", 0, manager.getAisCopy().getUserTables().size());
assertTables("user tables");
assertDDLS();
}
@After
public void tearDown() throws Exception {
try {
assertEquals("user tables in AIS", 0, manager.getAisCopy().getUserTables().size());
assertTables("user tables");
assertDDLS();
} finally {
store.stop();
}
}
private void createTable(ErrorCode expectedCode, String schema, String ddl) throws Exception {
ErrorCode actualCode = null;
try {
manager.createTable(schema, ddl);
}
catch (InvalidOperationException e) {
actualCode = e.getCode();
}
assertEquals("createTable return value", expectedCode, actualCode);
}
private void createTable(String schema, String ddl) throws Exception {
manager.createTable(schema, ddl);
}
@Test
public void testUtf8Table() throws Exception {
createTable(ErrorCode.UNSUPPORTED_CHARSET, SCHEMA,
"CREATE TABLE myvarchartest(id int key, name varchar(32) character set UTF8) engine=akibandb");
createTable(ErrorCode.UNSUPPORTED_CHARSET, SCHEMA,
"CREATE TABLE myvarchartest(id int key, name varchar(32) character set utf8) engine=akibandb");
}
@Test
public void testAddDropOneTable() throws Exception {
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
AkibaInformationSchema ais = manager.getAisCopy();
assertEquals("ais size", 1, ais.getUserTables().size());
UserTable table = ais.getUserTable(SCHEMA, "one");
assertEquals("number of index", 1, table.getIndexes().size());
Index index = table.getIndexes().iterator().next();
assertTrue("index isn't primary: " + index, index.isPrimaryKey());
manager.dropTable(SCHEMA, "one");
}
@Test
public void tableWithoutPK() throws Exception {
createTable(ErrorCode.NO_PRIMARY_KEY, SCHEMA, "CREATE TABLE one (id int) engine=akibandb;");
}
@Test
public void testSelfReferencingTable() throws Exception {
createTable(ErrorCode.JOIN_TO_UNKNOWN_TABLE, SCHEMA, "CREATE TABLE one (id int, self_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_a` (`one_id`) REFERENCES one (id) ) engine=akibandb;");
}
@Test
public void noEngineName() throws Exception {
createTable(SCHEMA, "CREATE TABLE zebra( id int key)");
assertDDLS("create table `akiba_objects`.`_akiba_zebra`(`zebra$id` int , INDEX _akiba_zebra$PK_1(`zebra$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.zebra( id int key)");
manager.dropTable(SCHEMA, "zebra");
}
@Test
public void testAddDropTwoTablesTwoGroups() throws Exception {
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(SCHEMA, "CREATE TABLE two (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;",
"CREATE TABLE %s.two (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create table `akiba_objects`.`_akiba_two`(`two$id` int , INDEX _akiba_two$PK_1(`two$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb",
"CREATE TABLE `my_schema`.two (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "one");
assertTables("user tables",
"CREATE TABLE %s.two (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_two`(`two$id` int , INDEX _akiba_two$PK_1(`two$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.two (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "two");
}
@Test
public void testDropAllTables() throws Exception{
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(SCHEMA, "CREATE TABLE two (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;",
"CREATE TABLE %s.two (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create table `akiba_objects`.`_akiba_two`(`two$id` int , INDEX _akiba_two$PK_1(`two$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb",
"CREATE TABLE `my_schema`.two (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropAllTables();
}
@Test
public void testAddDropTwoTablesOneGroupDropRoot() throws Exception {
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(SCHEMA, "CREATE TABLE two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_a` (`one_id`) REFERENCES one (id) ) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;",
"CREATE TABLE %s.two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_a` (`one_id`) REFERENCES one (id) ) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int, `two$id` int, `two$one_id` int , "
+"INDEX _akiba_one$PK_1(`one$id`), INDEX _akiba_one$PK_2(`two$id`), INDEX two$__akiban_fk_a(`two$one_id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb",
"CREATE TABLE `my_schema`.two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_a` (`one_id`) REFERENCES one (id) ) engine=akibandb");
AkibaInformationSchema ais = manager.getAisCopy();
assertEquals("ais size", 2, ais.getUserTables().size());
UserTable table = ais.getUserTable(SCHEMA, "two");
assertEquals("number of index", 2, table.getIndexes().size());
Index primaryIndex = table.getIndex("PRIMARY");
assertTrue("index isn't primary: " + primaryIndex + " in " + table.getIndexes(), primaryIndex.isPrimaryKey());
Index fkIndex = table.getIndex("__akiban_fk_a");
assertEquals("fk index name" + " in " + table.getIndexes(), "__akiban_fk_a", fkIndex.getIndexName().getName());
manager.dropTable(SCHEMA, "one");
}
@Test
public void addChildToNonExistentParent() throws Exception{
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables", "CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(ErrorCode.JOIN_TO_UNKNOWN_TABLE, SCHEMA, "CREATE TABLE two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES zebra (id) ) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "one");
}
@Test
public void addChildToNonExistentColumns() throws Exception{
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables", "CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(ErrorCode.JOIN_TO_WRONG_COLUMNS, SCHEMA, "CREATE TABLE two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES one (invalid_id) ) engine=akibandb;");
assertTables("user tables", "CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "one");
}
@Test
public void addChildToProtectedTable() throws Exception {
createTable(ErrorCode.JOIN_TO_PROTECTED_TABLE, SCHEMA, "CREATE TABLE one (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES akiba_information_schema.tables (table_id) ) engine=akibandb;");
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(ErrorCode.JOIN_TO_PROTECTED_TABLE, SCHEMA, "CREATE TABLE two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES akiba_objects._akiba_one (`one$id`) ) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "one");
}
@Test
public void testAddDropTwoTablesOneGroupDropChild() throws Exception {
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable(SCHEMA, "CREATE TABLE two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES one (id) ) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;",
"CREATE TABLE %s.two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES one (id) ) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int, `two$id` int, `two$one_id` int , "
+"INDEX _akiba_one$PK_1(`one$id`), INDEX _akiba_one$PK_2(`two$id`), INDEX two$__akiban_fk_0(`two$one_id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb",
"CREATE TABLE `my_schema`.two (id int, one_id int, PRIMARY KEY (id), " +
"CONSTRAINT `__akiban_fk_0` FOREIGN KEY `__akiban_fk_0` (`one_id`) REFERENCES one (id) ) engine=akibandb");
manager.dropTable(SCHEMA, "two");
// Commenting out the following as a fix to bug 188. We're now dropping whole groups at a time, instead of just
// branches.
// assertTables("user tables",
// "CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
// assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$one_PK(`one$id`)) engine=akibandb",
// "create database if not exists `my_schema`",
// "use `my_schema`",
// "CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
// Commenting out the following as a fix to bug 188. We're now dropping whole groups at a time, instead of just
// branches.
// manager.dropTable(SCHEMA, "one");
// assertTables("user tables");
// assertDDLS();
}
@Test
public void dropNonExistentTable() throws Exception {
manager.dropTable("this_schema_does_not", "exist");
createTable(SCHEMA, "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE %s.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS("create table `akiba_objects`.`_akiba_one`(`one$id` int , INDEX _akiba_one$PK_1(`one$id`)) engine=akibandb",
"create database if not exists `my_schema`",
"use `my_schema`",
"CREATE TABLE `my_schema`.one (id int, PRIMARY KEY (id)) engine=akibandb");
manager.dropTable(SCHEMA, "one");
manager.dropTable(SCHEMA, "one");
manager.dropTable("this_schema_never_existed", "it_really_didnt");
manager.dropTable("this_schema_never_existed", "it_really_didnt");
}
@Test
public void overloadTableAndColumn() throws Exception {
// we don't allow two tables s1.foo and s2.foo to have any identical columns
// But we do want to allow same-name tables in different schemas if they don't share any columns
List<String> expectedDDLs = Collections.unmodifiableList(Arrays.asList(
"create table `akiba_objects`.`_akiba_one`(`one$idFoo` int , INDEX _akiba_one$PK_1(`one$idFoo`)) engine=akibandb",
"create database if not exists `s1`",
"use `s1`",
"CREATE TABLE `s1`.one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb"));
createTable("s1", "CREATE TABLE one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE `s1`.one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb;");
assertDDLS(expectedDDLs.toArray(new String[expectedDDLs.size()]));
List<String> expectedDDLs2 = new ArrayList<String>(expectedDDLs);
expectedDDLs2.add(0, "create table `akiba_objects`.`_akiba_one$0`(`one$id` int , INDEX _akiba_one$0$PK_1(`one$id`)) engine=akibandb");
expectedDDLs2.add("create database if not exists `s2`");
expectedDDLs2.add("use `s2`");
expectedDDLs2.add("CREATE TABLE `s2`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable("s2", "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE `s1`.one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb;",
"CREATE TABLE `s2`.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS(expectedDDLs2.toArray(new String[expectedDDLs.size()]));
// No changes when trying to add a table like s2.one
createTable(ErrorCode.DUPLICATE_COLUMN_NAMES, "s3", "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
manager.getAisCopy();
assertTables("user tables",
"CREATE TABLE `s1`.one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb;",
"CREATE TABLE `s2`.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS(expectedDDLs2.toArray(new String[expectedDDLs.size()]));
manager.dropTable("s2", "one");
List<String> expectedDDLs3 = new ArrayList<String>(expectedDDLs);
expectedDDLs3.add(0, "create table `akiba_objects`.`_akiba_one$0`(`one$id` int , INDEX _akiba_one$0$PK_1(`one$id`)) engine=akibandb");
expectedDDLs3.add("create database if not exists `s3`");
expectedDDLs3.add("use `s3`");
expectedDDLs3.add("CREATE TABLE `s3`.one (id int, PRIMARY KEY (id)) engine=akibandb");
createTable("s3", "CREATE TABLE one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertTables("user tables",
"CREATE TABLE `s1`.one (idFoo int, PRIMARY KEY (idFoo)) engine=akibandb;",
"CREATE TABLE `s3`.one (id int, PRIMARY KEY (id)) engine=akibandb;");
assertDDLS(expectedDDLs3.toArray(new String[expectedDDLs.size()]));
manager.dropTable("s3", "one");
manager.dropTable("s1", "one");
}
private void assertTables(String message, String... expecteds) throws Exception {
Map<TableName,String> actual = manager.getUserTables();
Map<TableName,String> expMap = new HashMap<TableName, String>(actual.size());
for (String expected : expecteds) {
expected = String.format(expected, '`' + SCHEMA + '`');
Matcher m = REGEX.matcher(expected);
assertTrue("regex not found in " + expected, m.find());
TableName table = TableName.create(m.group(1), m.group(2));
expMap.put(table, expected);
}
assertEquals(message, expMap, new HashMap<TableName,String>(actual));
}
private void assertDDLS(String... expected) throws Exception{
List<String> expectedList = new ArrayList<String>();
expectedList.add("set default_storage_engine = akibandb");
expectedList.add("create database if not exists `akiba_information_schema`");
expectedList.add("use `akiba_information_schema`");
expectedList.add("create table groups( group_name varchar(64), primary key(group_name) ) engine=akibandb");
expectedList.add("create table tables( schema_name varchar(64), table_name varchar(64), table_type varchar(8), table_id int, group_name varchar(64), source_types int, primary key(schema_name, table_name) ) engine=akibandb");
expectedList.add("create table columns ( schema_name varchar(64), table_name varchar(64), column_name varchar(64), position int, type varchar(64), type_param_1 bigint, type_param_2 bigint, nullable tinyint, initial_autoinc bigint, group_schema_name varchar(64), group_table_name varchar(64), group_column_name varchar(64), maximum_size bigint, prefix_size int, character_set varchar(32), collation varchar(32), primary key(schema_name, table_name, column_name) ) engine=akibandb");
expectedList.add("create table joins( join_name varchar(767), parent_schema_name varchar(64), parent_table_name varchar(64), child_schema_name varchar(64), child_table_name varchar(64), group_name varchar(64), join_weight int, grouping_usage int, source_types int, primary key(join_name) ) engine=akibandb");
expectedList.add("create table join_columns( join_name varchar(767), parent_schema_name varchar(64), parent_table_name varchar(64), parent_column_name varchar(64), child_schema_name varchar(64), child_table_name varchar(64), child_column_name varchar(64), primary key(join_name, parent_column_name, child_column_name) ) engine=akibandb");
expectedList.add("create table indexes ( schema_name varchar(64), table_name varchar(64), index_name varchar(64), index_id int, table_constraint varchar(64), is_unique tinyint, primary key(schema_name, table_name, index_name) ) engine=akibandb");
expectedList.add("create table index_columns ( schema_name varchar(64), table_name varchar(64), index_name varchar(64), column_name varchar(64), ordinal_position int, is_ascending tinyint, indexed_length int, primary key(schema_name, table_name, index_name, column_name) ) engine=akibandb");
expectedList.add("create table types( type_name varchar(64), parameters int, fixed_size tinyint, max_size_bytes bigint, primary key(type_name) ) engine=akibandb");
expectedList.add("create table index_analysis( table_id int, index_id int, analysis_timestamp timestamp, item_number int, key_string varchar(2048), index_row_data varbinary(4096), count bigint, primary key(table_id, index_id, item_number) ) engine=akibandb");
expectedList.add("create schema if not exists `akiba_objects`");
expectedList.addAll(Arrays.asList(expected));
String actual = Strings.join(manager.getDDLs());
String expectedStr = Strings.join(expectedList);
assertEquals("DDLs", expectedStr, actual);
}
}
|
package com.celements.pagetype.xobject;
import static com.celements.common.test.CelementsTestUtils.*;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Collections;
import org.junit.Before;
import org.junit.Test;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.EntityReference;
import com.celements.common.test.AbstractComponentTest;
import com.celements.pagetype.IPageTypeClassConfig;
import com.celements.pagetype.PageType;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.objects.BaseObject;
public class XObjectPageTypeConfigTest extends AbstractComponentTest {
private XObjectPageTypeConfig xObjPTconfig;
private PageType pageTypeMock;
private XWikiContext context;
@Before
public void setUp_XObjectPageTypeConfigTest() throws Exception {
context = getContext();
DocumentReference testPageTypeDocRef = new DocumentReference(context.getDatabase(), "PageTypes",
"TestPageType");
xObjPTconfig = new XObjectPageTypeConfig(testPageTypeDocRef);
pageTypeMock = createMockAndAddToDefault(PageType.class);
xObjPTconfig.pageType = pageTypeMock;
expect(pageTypeMock.getConfigName(same(context))).andReturn("TestPageType").anyTimes();
}
@Test
public void testGetCategories_noEmptyCategories() {
expect(pageTypeMock.getCategories(same(context))).andReturn(Collections.<String>emptyList());
replayDefault();
assertEquals(Arrays.asList(""), xObjPTconfig.getCategories());
verifyDefault();
}
@Test
public void testGetCategories() {
expect(pageTypeMock.getCategories(same(context))).andReturn(Arrays.asList("cellType"));
replayDefault();
assertEquals(Arrays.asList("cellType"), xObjPTconfig.getCategories());
verifyDefault();
}
@Test
public void testDisplayInFrameLayout_yes() {
expect(pageTypeMock.showFrame(same(context))).andReturn(true);
replayDefault();
assertTrue(xObjPTconfig.displayInFrameLayout());
verifyDefault();
}
@Test
public void testDisplayInFrameLayout_no() {
expect(pageTypeMock.showFrame(same(context))).andReturn(false);
replayDefault();
assertFalse(xObjPTconfig.displayInFrameLayout());
verifyDefault();
}
@Test
public void testGetName() {
replayDefault();
assertEquals("TestPageType", xObjPTconfig.getName());
verifyDefault();
}
@Test
public void testGetPrettyName() {
String expectedPrettyName = "Test Page Type Pretty Name";
expect(pageTypeMock.getPrettyName(same(context))).andReturn(expectedPrettyName);
replayDefault();
assertEquals(expectedPrettyName, xObjPTconfig.getPrettyName());
verifyDefault();
}
@Test
public void testHasPageTitle_yes() {
expect(pageTypeMock.hasPageTitle(same(context))).andReturn(true);
replayDefault();
assertTrue(xObjPTconfig.hasPageTitle());
verifyDefault();
}
@Test
public void testHasPageTitle_no() {
expect(pageTypeMock.hasPageTitle(same(context))).andReturn(false);
replayDefault();
assertFalse(xObjPTconfig.hasPageTitle());
verifyDefault();
}
@Test
public void testGetRenderTemplateForRenderMode_view() throws Exception {
String expectedRenderTemplate = "Templates.TestPageTypeView";
expect(pageTypeMock.getRenderTemplate(eq("view"), same(context))).andReturn(
expectedRenderTemplate);
replayDefault();
assertEquals(expectedRenderTemplate, xObjPTconfig.getRenderTemplateForRenderMode("view"));
verifyDefault();
}
@Test
public void testGetRenderTemplateForRenderMode_edit() throws Exception {
String expectedRenderTemplate = "Templates.TestPageTypeEdit";
expect(pageTypeMock.getRenderTemplate(eq("edit"), same(context))).andReturn(
expectedRenderTemplate);
replayDefault();
assertEquals(expectedRenderTemplate, xObjPTconfig.getRenderTemplateForRenderMode("edit"));
verifyDefault();
}
@Test
public void testIsVisible_yes() throws Exception {
BaseObject testPageTypePropObj = new BaseObject();
EntityReference pageTypePropClassRef = new DocumentReference(context.getDatabase(),
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_SPACE,
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_DOC);
testPageTypePropObj.setXClassReference(pageTypePropClassRef);
testPageTypePropObj.setIntValue("visible", 1);
expect(pageTypeMock.getPageTypeProperties(same(context))).andReturn(testPageTypePropObj);
replayDefault();
assertTrue(xObjPTconfig.isVisible());
verifyDefault();
}
@Test
public void testIsVisible_no() throws Exception {
BaseObject testPageTypePropObj = new BaseObject();
EntityReference pageTypePropClassRef = new DocumentReference(context.getDatabase(),
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_SPACE,
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_DOC);
testPageTypePropObj.setXClassReference(pageTypePropClassRef);
testPageTypePropObj.setIntValue("visible", 0);
expect(pageTypeMock.getPageTypeProperties(same(context))).andReturn(testPageTypePropObj);
replayDefault();
assertFalse(xObjPTconfig.isVisible());
verifyDefault();
}
@Test
public void test_getPrettyName_absent() {
BaseObject testPageTypePropObj = new BaseObject();
EntityReference pageTypePropClassRef = new DocumentReference(context.getDatabase(),
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_SPACE,
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_DOC);
testPageTypePropObj.setXClassReference(pageTypePropClassRef);
expect(pageTypeMock.getPageTypeProperties(same(context))).andReturn(testPageTypePropObj);
replayDefault();
assertFalse(xObjPTconfig.defaultTagName().isPresent());
verifyDefault();
}
@Test
public void test_getPrettyName_present() {
String tagName = "abstract";
BaseObject testPageTypePropObj = new BaseObject();
EntityReference pageTypePropClassRef = new DocumentReference(context.getDatabase(),
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_SPACE,
IPageTypeClassConfig.PAGE_TYPE_PROPERTIES_CLASS_DOC);
testPageTypePropObj.setXClassReference(pageTypePropClassRef);
testPageTypePropObj.setStringValue(IPageTypeClassConfig.PAGETYPE_PROP_TAG_NAME, tagName);
expect(pageTypeMock.getPageTypeProperties(same(context))).andReturn(
testPageTypePropObj).atLeastOnce();
replayDefault();
assertTrue(xObjPTconfig.defaultTagName().isPresent());
assertEquals(tagName, xObjPTconfig.defaultTagName().get());
verifyDefault();
}
@Test
public void testIsVisible_NPE_no_object() {
expect(pageTypeMock.getPageTypeProperties(same(context))).andReturn(null);
replayDefault();
assertFalse(xObjPTconfig.isVisible());
verifyDefault();
}
}
|
package com.ning.http.client.async;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.AsyncHttpClientConfig;
import com.ning.http.client.BodyDeferringAsyncHandler;
import com.ning.http.client.BodyDeferringAsyncHandler.BodyDeferringInputStream;
import com.ning.http.client.Response;
public abstract class BodyDeferringAsyncHandlerTest extends AbstractBasicTest {
// not a half gig ;) for test shorter run's sake
protected static final int HALF_GIG = 100000;
public static class SlowAndBigHandler extends AbstractHandler {
public void handle(String pathInContext, Request request,
HttpServletRequest httpRequest, HttpServletResponse httpResponse)
throws IOException, ServletException {
// 512MB large download
// 512 * 1024 * 1024 = 536870912
httpResponse.setStatus(200);
httpResponse.setContentLength(HALF_GIG);
httpResponse.setContentType("application/octet-stream");
httpResponse.flushBuffer();
final boolean wantFailure = httpRequest
.getHeader("X-FAIL-TRANSFER") != null;
final boolean wantSlow = httpRequest.getHeader("X-SLOW") != null;
OutputStream os = httpResponse.getOutputStream();
for (int i = 0; i < HALF_GIG; i++) {
os.write(i % 255);
if (wantSlow) {
try {
Thread.sleep(300);
} catch (InterruptedException ex) {
// nuku
}
}
if (wantFailure) {
if (i > HALF_GIG / 2) {
// kaboom
// yes, response is commited, but Jetty does aborts and
// drops connection
httpResponse.sendError(500);
break;
}
}
}
httpResponse.getOutputStream().flush();
httpResponse.getOutputStream().close();
}
}
// a /dev/null but counting how many bytes it ditched
public static class CountingOutputStream extends OutputStream {
private int byteCount = 0;
@Override
public void write(int b) throws IOException {
// /dev/null
byteCount++;
}
public int getByteCount() {
return byteCount;
}
}
// simple stream copy just to "consume". It closes streams.
public static void copy(InputStream in, OutputStream out)
throws IOException {
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.flush();
out.close();
in.close();
}
public AbstractHandler configureHandler() throws Exception {
return new SlowAndBigHandler();
}
public AsyncHttpClientConfig getAsyncHttpClientConfig() {
// for this test brevity's sake, we are limiting to 1 retries
return new AsyncHttpClientConfig.Builder().setMaxRequestRetry(0)
.setRequestTimeoutInMs(10000).build();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredSimple() throws IOException, ExecutionException,
TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + port1 + "/deferredSimple");
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
Future<Response> f = r.execute(bdah);
Response resp = bdah.getResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// we got headers only, it's probably not all yet here (we have BIG file
// downloading)
assertEquals(true, HALF_GIG >= cos.getByteCount());
// now be polite and wait for body arrival too (otherwise we would be
// dropping the "line" on server)
f.get();
// it all should be here now
assertEquals(true, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" }, enabled = false)
public void deferredSimpleWithFailure() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client.prepareGet(
"http://127.0.0.1:" + port1 + "/deferredSimpleWithFailure")
.addHeader("X-FAIL-TRANSFER", Boolean.TRUE.toString());
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
Future<Response> f = r.execute(bdah);
Response resp = bdah.getResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// we got headers only, it's probably not all yet here (we have BIG file
// downloading)
assertEquals(true, HALF_GIG >= cos.getByteCount());
// now be polite and wait for body arrival too (otherwise we would be
// dropping the "line" on server)
try {
f.get();
Assert.fail("get() should fail with IOException!");
} catch (Exception e) {
// good
}
// it's incomplete, there was an error
assertEquals(false, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredInputStreamTrick() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + port1
+ "/deferredInputStreamTrick");
PipedOutputStream pos = new PipedOutputStream();
PipedInputStream pis = new PipedInputStream(pos);
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(pos);
Future<Response> f = r.execute(bdah);
BodyDeferringInputStream is = new BodyDeferringInputStream(f, bdah, pis);
Response resp = is.getAsapResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// "consume" the body, but our code needs input stream
CountingOutputStream cos = new CountingOutputStream();
copy(is, cos);
// now we don't need to be polite, since consuming and closing
// BodyDeferringInputStream does all.
// it all should be here now
assertEquals(true, HALF_GIG == cos.getByteCount());
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void deferredInputStreamTrickWithFailure() throws IOException,
ExecutionException, TimeoutException, InterruptedException {
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client.prepareGet(
"http://127.0.0.1:" + port1
+ "/deferredInputStreamTrickWithFailure").addHeader(
"X-FAIL-TRANSFER", Boolean.TRUE.toString());
PipedOutputStream pos = new PipedOutputStream();
PipedInputStream pis = new PipedInputStream(pos);
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(pos);
Future<Response> f = r.execute(bdah);
BodyDeferringInputStream is = new BodyDeferringInputStream(f, bdah, pis);
Response resp = is.getAsapResponse();
assertNotNull(resp);
assertEquals(resp.getStatusCode(), HttpServletResponse.SC_OK);
assertEquals(
true,
resp.getHeader("content-length").equals(
String.valueOf(HALF_GIG)));
// "consume" the body, but our code needs input stream
CountingOutputStream cos = new CountingOutputStream();
try {
copy(is, cos);
Assert.fail("InputStream consumption should fail with IOException!");
} catch (IOException e) {
// good!
}
client.close();
}
@Test(groups = { "standalone", "default_provider" })
public void testConnectionRefused() throws IOException, ExecutionException,
TimeoutException, InterruptedException {
int newPortWithoutAnyoneListening = findFreePort();
AsyncHttpClient client = getAsyncHttpClient(getAsyncHttpClientConfig());
AsyncHttpClient.BoundRequestBuilder r = client
.prepareGet("http://127.0.0.1:" + newPortWithoutAnyoneListening
+ "/testConnectionRefused");
CountingOutputStream cos = new CountingOutputStream();
BodyDeferringAsyncHandler bdah = new BodyDeferringAsyncHandler(cos);
r.execute(bdah);
try {
bdah.getResponse();
Assert.fail("IOException should be thrown here!");
} catch (IOException e) {
// good
}
client.close();
}
}
|
package com.scaleunlimited.cascading.cuke;
import static org.junit.Assert.assertEquals;
import gherkin.parser.Parser;
import java.util.regex.Matcher;
import org.junit.Test;
public class CascadingFormatterTest {
public static class WorkflowParameterSD extends BaseStepDefinition {
public WorkflowParameterSD() {
super();
setRegex("the (.+) parameter is (.+)");
}
@Override
public boolean isMatchesStep(String keyword, String description) {
Matcher matcher = _pattern.matcher(description);
boolean result = matcher.matches();
if (result) {
String parameterName = matcher.group(1);
StringSSE parameterValue = new StringSSE(matcher.group(2));
_scenarioState.put(parameterName, parameterValue);
}
return result;
}
}
public static class WorkflowSD extends BaseStepDefinition {
public WorkflowSD() {
super();
setRegex("the WordCountTool workflow is run");
}
@Override
public void run() {
super.run();
assertEquals( "2014-08-02",
_scenarioState.get("targetDate").toString());
assertEquals( 30,
Integer.parseInt(_scenarioState.get("backtrace").toString()));
assertEquals( "working",
_scenarioState.get("workingDir").toString());
_scenarioState.put("WordCountToolResult", new StringSSE("success"));
}
}
public static class WorkflowResultSD extends BaseStepDefinition {
public WorkflowResultSD() {
super();
setRegex("the WordCountTool workflow got the expected parameters.");
}
@Override
public void run() {
super.run();
assertEquals( "success",
_scenarioState.get("WordCountToolResult").toString());
}
}
@Test
public void testPassingParametersViaState() throws Throwable {
CascadingFormatter formatter =
new CascadingFormatter(System.out, false, true);
formatter.addStepDefinition(new WorkflowParameterSD());
formatter.addStepDefinition(new WorkflowSD());
formatter.addStepDefinition(new WorkflowResultSD());
StringBuilder featureSource = new StringBuilder();
featureSource.append("Feature: WordCountTool\n");
featureSource.append("Scenario: Accesses parameters\n");
featureSource.append("When the WordCountTool workflow is run\n");
featureSource.append("And the targetDate parameter is 2014-08-02\n");
featureSource.append("And the backtrace parameter is 30\n");
featureSource.append("And the workingDir parameter is working\n");
featureSource.append("Then the WordCountTool workflow got the expected parameters.\n");
new Parser(formatter).parse(featureSource.toString(), "", 0);
formatter.close();
}
}
|
package edu.depaul.armada.controller;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.transaction.TransactionConfiguration;
import org.springframework.transaction.annotation.Transactional;
import edu.depaul.armada.dao.PreferenceDao;
import edu.depaul.armada.domain.Preference;
import edu.depaul.armada.model.Metric;
import edu.depaul.armada.model.ThresholdMetric;
/**
* Exercises metric restful controller
*
* @author rcraddol
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"/beans/armada-config-test.xml"})
@TransactionConfiguration(transactionManager="armadaTransactionManager")
@Transactional
public class MetricRestfulControllerTest {
@Autowired private MetricRestfulController metricRestfulController;
@Autowired private PreferenceDao preferenceDao;
@Test
public void testThresholds() {
Preference memory = new Preference();
memory.setName("memory_threshold");
memory.setValue(85);
preferenceDao.storePreference(memory);
Preference disk = new Preference();
disk.setName("disk_threshold");
disk.setValue(85);
preferenceDao.storePreference(disk);
Preference cpu = new Preference();
cpu.setName("cpu_threshold");
cpu.setValue(85);
preferenceDao.storePreference(cpu);
List<ThresholdMetric> results = metricRestfulController.getThresholdStats(0);
assertNotNull(results);
assertTrue(results.isEmpty());
results = metricRestfulController.getThresholdStats(4);
assertNotNull(results);
ThresholdMetric threshold = results.get(0);
assertNotNull(threshold);
}
@Test
public void testGetContainerCounts() {
List<Metric> results = metricRestfulController.getContainerCounts(0);
assertNotNull(results);
assertTrue(results.isEmpty());
int counts = 4;
results = metricRestfulController.getContainerCounts(counts);
assertNotNull(results);
assertTrue(results.size() == counts);
Metric metric = results.get(0);
assertNotNull(metric.getHour());
assertNotNull(metric.getValue());
}
}
|
package info.gehrels.voting.web.applicationState;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import info.gehrels.voting.Ballot;
import info.gehrels.voting.Vote;
import info.gehrels.voting.genderedElections.GenderedCandidate;
import info.gehrels.voting.genderedElections.GenderedElection;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
public class BallotStateTest {
private static final String OLD_OFFICE_NAME = "office";
private static final GenderedElection ORIGINAL_ELECTION = new GenderedElection(OLD_OFFICE_NAME, 1, 0, ImmutableSet.of(new GenderedCandidate("Peter", true)));
private static final String NEW_OFFICE_NAME = "new Office";
private final BallotLayout ballotLayout = new BallotLayout(ImmutableList.of(ORIGINAL_ELECTION));
@Test
public void initializedWithoutLayout() {
assertThat(new BallotState().isBallotLayoutPresent(), is(false));
}
@Test
public void hasLayoutAfterSettingOne() {
BallotState ballotLayoutState = new BallotState();
ballotLayoutState.setBallotLayout(ballotLayout);
assertThat(ballotLayoutState.isBallotLayoutPresent(), is(true));
assertThat(ballotLayoutState.getBallotLayout(), is(ballotLayout));
}
@Test
public void changeOfficeNameStoresNewElection() {
BallotState ballotLayoutState = new BallotState();
ballotLayoutState.setBallotLayout(ballotLayout);
ballotLayoutState.changeOfficeName(OLD_OFFICE_NAME, NEW_OFFICE_NAME);
assertThat(ballotLayoutState.getBallotLayout().getElections().get(0).getOfficeName(), is(NEW_OFFICE_NAME));
}
@Test
public void changeOfficeNameMigratesCastBallots() {
BallotState ballotLayoutState = new BallotState();
ballotLayoutState.setBallotLayout(ballotLayout);
ballotLayoutState.addCastBallot(BallotInputTry.FIRST, new Ballot<>(1L, ImmutableSet.of(Vote.createNoVote(ORIGINAL_ELECTION))));
GenderedElection newChangedElection = ballotLayoutState.changeOfficeName(OLD_OFFICE_NAME, NEW_OFFICE_NAME);
assertThat(ballotLayoutState.getFirstTryCastBallots().iterator().next().getVote(ORIGINAL_ELECTION).isPresent(), is(false));
assertThat(ballotLayoutState.getFirstTryCastBallots().iterator().next().getVote(newChangedElection).get().getElection().getOfficeName(), is(NEW_OFFICE_NAME));
}
@Test(expected = IllegalArgumentException.class)
public void throwsIfOldOfficeNameDoesNotExist() {
BallotState ballotLayoutState = new BallotState();
ballotLayoutState.setBallotLayout(ballotLayout);
ballotLayoutState.changeOfficeName("non existing Office Name", NEW_OFFICE_NAME);
assertThat(ballotLayoutState.getBallotLayout().getElections().get(0).getOfficeName(), is(OLD_OFFICE_NAME));
}
@Test
public void setBallotLayoutResetsCastBallots() {
BallotState ballotLayoutState = new BallotState();
ballotLayoutState.setBallotLayout(ballotLayout);
ballotLayoutState.addCastBallot(BallotInputTry.FIRST, new Ballot<>(1L, ImmutableSet.of(Vote.createNoVote(ORIGINAL_ELECTION))));
ballotLayoutState.setBallotLayout(ballotLayout);
assertThat(ballotLayoutState.getFirstTryCastBallots(), is(empty()));
assertThat(ballotLayoutState.getFirstTryCastBallots(), is(empty()));
}
}
|
package it.cosenonjaviste.security.jwt.utils;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import com.auth0.jwt.JWTVerifier;
public class JwtTokenBuilderTest {
private static final String SECRET = "my secret";
@Test
public void shouldContains5Claims() throws Exception {
String token = createToken();
assertNotNull(token);
JWTVerifier verifier = new JWTVerifier(SECRET);
Map<String, Object> tokenObject = verifier.verify(token);
assertNotNull(tokenObject);
assertEquals(5, tokenObject.size());
assertEquals(tokenObject.get(JwtConstants.USER_ID), "test");
assertEquals(tokenObject.get(JwtConstants.ROLES), Arrays.asList("role1, role2"));
long now = System.currentTimeMillis() / 1000L;
long timeToExpire = ((int)tokenObject.get("exp")) - now;
assertTrue(timeToExpire > 0);
assertTrue(timeToExpire <= 10000);
int issueTime = (int) tokenObject.get("iat");
assertTrue(issueTime <= now);
}
@Test(expected=IllegalStateException.class)
public void shouldBeEmptyAndInvalid() throws Exception {
JwtTokenBuilder.create(SECRET).build();
}
@Test
public void shouldParseJwtFromString() throws Exception {
String token = createToken();
assertNotNull(token);
JwtTokenBuilder from = JwtTokenBuilder.from(token, SECRET);
String token2 = from.expirySecs(20000).notValidBeforeLeeway(10000).build();
int now = (int) (System.currentTimeMillis() / 1000L);
JWTVerifier verifier = new JWTVerifier(SECRET);
Map<String, Object> tokenObject = verifier.verify(token2);
int exp = (int) tokenObject.get("exp");
assertTrue(exp <= now + 20000);
assertTrue(exp > now);
int nbf = (int) tokenObject.get("nbf");
assertTrue(nbf >= now - 10000);
assertTrue(nbf < now);
}
@Test(expected = IllegalStateException.class)
public void shouldThrowIllegalStateException() throws Exception {
JwtTokenVerifier verifier = JwtTokenVerifier.create(SECRET);
JwtTokenBuilder.from(verifier, SECRET);
}
@Test
public void shouldIncreaseExpireTime() throws Exception {
String token = createToken();
JwtTokenVerifier verifier = JwtTokenVerifier.create(SECRET);
int firstExpire = getExp(verifier, token);
TimeUnit.SECONDS.sleep(2);
token = JwtTokenBuilder.from(verifier, SECRET).build();
verifier = JwtTokenVerifier.create(SECRET);
int secondExpire = getExp(verifier, token);
assertTrue(secondExpire >= firstExpire + 2);
}
@Test
public void shouldRecalculateNotBeforeClaimCorrectly() {
String token = createToken();
JwtTokenVerifier verifier = JwtTokenVerifier.create(SECRET);
verifier.verify(token);
Integer nbf = (Integer) verifier.getClaims().get("nbf");
Integer exp = getExp(verifier, token);
assertNotNull(nbf);
long nowInSecs = new Date().getTime() / 1000;
assertTrue(nowInSecs > nbf);
assertNotNull(exp);
assertTrue(nowInSecs < exp);
JwtTokenBuilder tokenBuilder = JwtTokenBuilder.from(verifier, SECRET);
String recreatedToken = tokenBuilder.build();
verifier = JwtTokenVerifier.create(SECRET);
verifier.verify(recreatedToken);
Integer recreatedNbf = (Integer) verifier.getClaims().get("nbf");
Integer recreatedExp = getExp(verifier, token);
assertEquals(exp, recreatedExp);
assertEquals((float) nbf, (float) recreatedNbf, 1);
}
private int getExp(JwtTokenVerifier verifier, String token) {
verifier.verify(token);
Map<String, Object> claims = verifier.getClaims();
return (int) claims.get("exp");
}
private String createToken() {
JwtTokenBuilder builder = JwtTokenBuilder.create(SECRET);
return builder.userId("test").roles(Arrays.asList("role1, role2")).expirySecs(10000).notValidBeforeLeeway(5000).build();
}
}
|
package mil.dds.anet.test.integration.emails;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import io.dropwizard.testing.junit5.DropwizardAppExtension;
import io.dropwizard.testing.junit5.DropwizardExtensionsSupport;
import java.time.Instant;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import mil.dds.anet.AnetApplication;
import mil.dds.anet.beans.AnetEmail;
import mil.dds.anet.config.AnetConfiguration;
import mil.dds.anet.database.EmailDao;
import mil.dds.anet.test.integration.config.AnetTestConfiguration;
import mil.dds.anet.test.integration.utils.EmailResponse;
import mil.dds.anet.test.integration.utils.FakeSmtpServer;
import mil.dds.anet.threads.AnetEmailWorker;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
@ExtendWith(DropwizardExtensionsSupport.class)
public class AnetEmailWorkerTest {
private static AnetEmailWorker emailWorker;
private static EmailDao emailDao;
private static FakeSmtpServer emailServer;
private static final DropwizardAppExtension<AnetConfiguration> app =
new DropwizardAppExtension<AnetConfiguration>(AnetApplication.class, "anet.yml");
/**
* Sets up the test.
*
* @throws Exception If the setup fails
*/
@BeforeAll
public static void setUp() throws Exception {
assumeTrue(Boolean.parseBoolean(
AnetTestConfiguration.getConfiguration().get("emailServerTestsExecute").toString()));
emailDao = mock(EmailDao.class, Mockito.RETURNS_DEEP_STUBS);
final ScheduledExecutorService scheduler =
mock(ScheduledExecutorService.class, Mockito.RETURNS_DEEP_STUBS);
// Configuration
app.getConfiguration().getDictionary().put("SUPPORT_EMAIL_ADDR", "support@example.com");
app.getConfiguration().getDictionary().put("dateFormats.email.date", "d MMMM yyyy");
app.getConfiguration().getDictionary().put("dateFormats.email.withTime", "d MMMM yyyy @ HH:mm");
app.getConfiguration().getDictionary().put("engagementsIncludeTimeAndDuration", true);
app.getConfiguration().getDictionary().put("activeDomainNames", Arrays.asList("anet.com"));
app.getConfiguration().getDictionary().put("fields", new HashMap<String, Object>());
app.getConfiguration().setEmailFromAddr("test_from_address@anet.com");
emailServer = new FakeSmtpServer(app.getConfiguration().getSmtp());
emailWorker = new AnetEmailWorker(emailDao, app.getConfiguration(), scheduler);
// Clear the email server before starting test
emailServer.clearEmailServer();
}
@AfterAll
public static void tearDown() throws Exception {
// Clear the email server after test
emailServer.clearEmailServer();
}
/**
* Test the worker.
*
* @throws Exception On error from the email server
*/
@Test
public void testWorker() throws Exception {
final List<String> toAddresses = Arrays.asList("test_to_address@anet.com");
final AnetEmail testEmail = createTestEmail(1, toAddresses, "test_comment");
// Run
final List<AnetEmail> emailsToReadyToSend = Arrays.asList(testEmail);
when(emailDao.getAll()).thenReturn(emailsToReadyToSend);
emailWorker.run();
// Verify
final List<EmailResponse> emails = emailServer.requestAllEmailsFromServer();
assertThat(emails.size()).isEqualTo(1);
}
private AnetEmail createTestEmail(int id, List<String> toAddresses, String comment) {
final AnetEmail email = mock(AnetEmail.class, Mockito.RETURNS_DEEP_STUBS);
when(email.getId()).thenReturn(id);
when(email.getToAddresses()).thenReturn(toAddresses);
when(email.getCreatedAt()).thenReturn(Instant.now());
when(email.getComment()).thenReturn(comment);
return email;
}
}
|
package org.sagebionetworks.web.unitserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ACCESS_REQUIREMENTS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ANNOTATIONS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ENTITY;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ENTITY_PATH;
import static org.sagebionetworks.web.shared.EntityBundleTransport.HAS_CHILDREN;
import static org.sagebionetworks.web.shared.EntityBundleTransport.PERMISSIONS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.UNMET_ACCESS_REQUIREMENTS;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.sagebionetworks.client.SynapseClient;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.client.exceptions.SynapseNotFoundException;
import org.sagebionetworks.evaluation.model.Evaluation;
import org.sagebionetworks.evaluation.model.EvaluationStatus;
import org.sagebionetworks.evaluation.model.Participant;
import org.sagebionetworks.evaluation.model.Submission;
import org.sagebionetworks.evaluation.model.UserEvaluationPermissions;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.BatchResults;
import org.sagebionetworks.repo.model.Data;
import org.sagebionetworks.repo.model.EntityBundle;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityIdList;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.ExampleEntity;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.LayerTypeNames;
import org.sagebionetworks.repo.model.LocationData;
import org.sagebionetworks.repo.model.LocationTypeNames;
import org.sagebionetworks.repo.model.LogEntry;
import org.sagebionetworks.repo.model.MembershipInvitation;
import org.sagebionetworks.repo.model.MembershipInvtnSubmission;
import org.sagebionetworks.repo.model.MembershipRequest;
import org.sagebionetworks.repo.model.MembershipRqstSubmission;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.PaginatedResults;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ProjectHeader;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.RestResourceList;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.RestrictableObjectType;
import org.sagebionetworks.repo.model.Team;
import org.sagebionetworks.repo.model.TeamMember;
import org.sagebionetworks.repo.model.TeamMembershipStatus;
import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement;
import org.sagebionetworks.repo.model.UserGroup;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.UserSessionData;
import org.sagebionetworks.repo.model.VariableContentPaginatedResults;
import org.sagebionetworks.repo.model.attachment.AttachmentData;
import org.sagebionetworks.repo.model.attachment.PresignedUrl;
import org.sagebionetworks.repo.model.auth.UserEntityPermissions;
import org.sagebionetworks.repo.model.doi.Doi;
import org.sagebionetworks.repo.model.doi.DoiStatus;
import org.sagebionetworks.repo.model.file.ChunkRequest;
import org.sagebionetworks.repo.model.file.ChunkedFileToken;
import org.sagebionetworks.repo.model.file.CompleteAllChunksRequest;
import org.sagebionetworks.repo.model.file.CompleteChunkedFileRequest;
import org.sagebionetworks.repo.model.file.CreateChunkedFileTokenRequest;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.file.State;
import org.sagebionetworks.repo.model.file.UploadDaemonStatus;
import org.sagebionetworks.repo.model.message.MessageToUser;
import org.sagebionetworks.repo.model.principal.AddEmailInfo;
import org.sagebionetworks.repo.model.quiz.PassingRecord;
import org.sagebionetworks.repo.model.quiz.Quiz;
import org.sagebionetworks.repo.model.quiz.QuizResponse;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHeader;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHistorySnapshot;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage;
import org.sagebionetworks.repo.model.wiki.WikiHeader;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.schema.adapter.AdapterFactory;
import org.sagebionetworks.schema.adapter.JSONEntity;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.sagebionetworks.web.client.transform.JSONEntityFactory;
import org.sagebionetworks.web.client.transform.JSONEntityFactoryImpl;
import org.sagebionetworks.web.client.transform.NodeModelCreator;
import org.sagebionetworks.web.client.transform.NodeModelCreatorImpl;
import org.sagebionetworks.web.client.widget.entity.file.FileTitleBar;
import org.sagebionetworks.web.client.widget.table.v2.TableModelUtils;
import org.sagebionetworks.web.server.servlet.MarkdownCacheRequest;
import org.sagebionetworks.web.server.servlet.ServiceUrlProvider;
import org.sagebionetworks.web.server.servlet.SynapseClientImpl;
import org.sagebionetworks.web.server.servlet.SynapseProvider;
import org.sagebionetworks.web.server.servlet.TokenProvider;
import org.sagebionetworks.web.shared.AccessRequirementUtils;
import org.sagebionetworks.web.shared.EntityBundleTransport;
import org.sagebionetworks.web.shared.EntityWrapper;
import org.sagebionetworks.web.shared.MembershipInvitationBundle;
import org.sagebionetworks.web.shared.PagedResults;
import org.sagebionetworks.web.shared.TeamBundle;
import org.sagebionetworks.web.shared.WikiPageKey;
import org.sagebionetworks.web.shared.exceptions.BadRequestException;
import org.sagebionetworks.web.shared.exceptions.ConflictException;
import org.sagebionetworks.web.shared.exceptions.NotFoundException;
import org.sagebionetworks.web.shared.exceptions.RestServiceException;
import org.sagebionetworks.web.shared.users.AclUtils;
import org.sagebionetworks.web.shared.users.PermissionLevel;
import com.google.common.cache.Cache;
/**
* Test for the SynapseClientImpl
* @author John
*
*/
public class SynapseClientImplTest {
SynapseProvider mockSynapseProvider;
TokenProvider mockTokenProvider;
ServiceUrlProvider mockUrlProvider;
SynapseClient mockSynapse;
SynapseClientImpl synapseClient;
String entityId = "123";
String inviteeUserId = "900";
UserProfile inviteeUserProfile;
ExampleEntity entity;
AttachmentData attachment1, attachment2;
Annotations annos;
UserEntityPermissions eup;
UserEvaluationPermissions userEvaluationPermissions;
List<EntityHeader> batchHeaderResults;
String testFileName = "testFileEntity.R";
EntityPath path;
org.sagebionetworks.repo.model.PaginatedResults<UserGroup> pgugs;
org.sagebionetworks.repo.model.PaginatedResults<UserProfile> pgups;
AccessControlList acl;
WikiPage page;
V2WikiPage v2Page;
S3FileHandle handle;
Evaluation mockEvaluation;
Participant mockParticipant;
UserSessionData mockUserSessionData;
UserProfile mockUserProfile;
MembershipInvtnSubmission testInvitation;
MessageToUser sentMessage;
TableModelUtils tableModelUtils;
private static final String EVAL_ID_1 = "eval ID 1";
private static final String EVAL_ID_2 = "eval ID 2";
private static JSONObjectAdapter jsonObjectAdapter = new JSONObjectAdapterImpl();
private static AdapterFactory adapterFactory = new AdapterFactoryImpl();
private static JSONEntityFactory jsonEntityFactory = new JSONEntityFactoryImpl(adapterFactory);
private static NodeModelCreator nodeModelCreator = new NodeModelCreatorImpl(jsonEntityFactory, jsonObjectAdapter);
private TeamMembershipStatus membershipStatus;
@Before
public void before() throws SynapseException, JSONObjectAdapterException{
mockSynapse = Mockito.mock(SynapseClient.class);
mockSynapseProvider = Mockito.mock(SynapseProvider.class);
mockUrlProvider = Mockito.mock(ServiceUrlProvider.class);
when(mockSynapseProvider.createNewClient()).thenReturn(mockSynapse);
mockTokenProvider = Mockito.mock(TokenProvider.class);
tableModelUtils = new TableModelUtils(adapterFactory);
synapseClient = new SynapseClientImpl();
synapseClient.setSynapseProvider(mockSynapseProvider);
synapseClient.setTokenProvider(mockTokenProvider);
synapseClient.setServiceUrlProvider(mockUrlProvider);
synapseClient.setTableModelUtils(tableModelUtils);
// Setup the the entity
entity = new ExampleEntity();
entity.setId(entityId);
entity.setEntityType(ExampleEntity.class.getName());
List<AttachmentData> attachments = new ArrayList<AttachmentData>();
attachment1 = new AttachmentData();
attachment1.setName("attachment1");
attachment2 = new AttachmentData();
attachment2.setName("attachment2");
attachments.add(attachment1);
attachments.add(attachment2);
entity.setAttachments(attachments);
// the mock synapse should return this object
when(mockSynapse.getEntityById(entityId)).thenReturn(entity);
// Setup the annotations
annos = new Annotations();
annos.setId(entityId);
annos.addAnnotation("string", "a string value");
// the mock synapse should return this object
when(mockSynapse.getAnnotations(entityId)).thenReturn(annos);
eup = new UserEntityPermissions();
eup.setCanDelete(true);
eup.setCanView(false);
eup.setOwnerPrincipalId(999L);
// the mock synapse should return this object
when(mockSynapse.getUsersEntityPermissions(entityId)).thenReturn(eup);
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(false);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_1)).thenReturn(userEvaluationPermissions);
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(true);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_2)).thenReturn(userEvaluationPermissions);
// Setup the path
path = new EntityPath();
path.setPath(new ArrayList<EntityHeader>());
EntityHeader header = new EntityHeader();
header.setId(entityId);
header.setName("RomperRuuuu");
path.getPath().add(header);
// the mock synapse should return this object
when(mockSynapse.getEntityPath(entityId)).thenReturn(path);
pgugs = new org.sagebionetworks.repo.model.PaginatedResults<UserGroup>();
List<UserGroup> ugs = new ArrayList<UserGroup>();
ugs.add(new UserGroup());
pgugs.setResults(ugs);
when(mockSynapse.getGroups(anyInt(), anyInt())).thenReturn(pgugs);
pgups = new org.sagebionetworks.repo.model.PaginatedResults<UserProfile>();
List<UserProfile> ups = new ArrayList<UserProfile>();
ups.add(new UserProfile());
pgups.setResults(ups);
when(mockSynapse.getUsers(anyInt(), anyInt())).thenReturn(pgups);
acl = new AccessControlList();
acl.setId("sys999");
Set<ResourceAccess> ras = new HashSet<ResourceAccess>();
ResourceAccess ra = new ResourceAccess();
ra.setPrincipalId(101L);
ra.setAccessType(AclUtils.getACCESS_TYPEs(PermissionLevel.CAN_ADMINISTER));
acl.setResourceAccess(ras);
when(mockSynapse.getACL(anyString())).thenReturn(acl);
when(mockSynapse.createACL((AccessControlList)any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any(), eq(true))).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any(), eq(false))).thenReturn(acl);
EntityHeader bene = new EntityHeader();
bene.setId("syn999");
when(mockSynapse.getEntityBenefactor(anyString())).thenReturn(bene);
BatchResults<EntityHeader> batchHeaders = new BatchResults<EntityHeader>();
batchHeaderResults = new ArrayList<EntityHeader>();
for (int i = 0; i < 10; i++) {
EntityHeader h = new EntityHeader();
h.setId("syn"+i);
batchHeaderResults.add(h);
}
batchHeaders.setResults(batchHeaderResults);
when(mockSynapse.getEntityHeaderBatch(anyList())).thenReturn(batchHeaders);
List<AccessRequirement> accessRequirements= new ArrayList<AccessRequirement>();
accessRequirements.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH |
HAS_CHILDREN | ACCESS_REQUIREMENTS | UNMET_ACCESS_REQUIREMENTS;
int emptyMask = 0;
EntityBundle bundle = new EntityBundle();
bundle.setEntity(entity);
bundle.setAnnotations(annos);
bundle.setPermissions(eup);
bundle.setPath(path);
bundle.setHasChildren(false);
bundle.setAccessRequirements(accessRequirements);
bundle.setUnmetAccessRequirements(accessRequirements);
when(mockSynapse.getEntityBundle(anyString(),Matchers.eq(mask))).thenReturn(bundle);
EntityBundle emptyBundle = new EntityBundle();
when(mockSynapse.getEntityBundle(anyString(),Matchers.eq(emptyMask))).thenReturn(emptyBundle);
when(mockSynapse.canAccess("syn101", ACCESS_TYPE.READ)).thenReturn(true);
page = new WikiPage();
page.setId("testId");
page.setMarkdown("my markdown");
page.setParentWikiId(null);
page.setTitle("A Title");
v2Page = new V2WikiPage();
v2Page.setId("v2TestId");
v2Page.setEtag("122333");
handle = new S3FileHandle();
handle.setId("4422");
handle.setBucketName("bucket");
handle.setFileName(testFileName);
handle.setKey("key");
when(mockSynapse.getRawFileHandle(anyString())).thenReturn(handle);
when(mockSynapse.completeChunkFileUpload(any(CompleteChunkedFileRequest.class))).thenReturn(handle);
VariableContentPaginatedResults<AccessRequirement> ars = new VariableContentPaginatedResults<AccessRequirement>();
ars.setTotalNumberOfResults(0);
ars.setResults(new ArrayList<AccessRequirement>());
when(mockSynapse.getAccessRequirements(any(RestrictableObjectDescriptor.class))).thenReturn(ars);
when(mockSynapse.getUnmetAccessRequirements(any(RestrictableObjectDescriptor.class))).thenReturn(ars);
mockEvaluation = Mockito.mock(Evaluation.class);
when(mockEvaluation.getStatus()).thenReturn(EvaluationStatus.OPEN);
when(mockSynapse.getEvaluation(anyString())).thenReturn(mockEvaluation);
mockUserSessionData = Mockito.mock(UserSessionData.class);
mockUserProfile = Mockito.mock(UserProfile.class);
when(mockSynapse.getUserSessionData()).thenReturn(mockUserSessionData);
when(mockUserSessionData.getProfile()).thenReturn(mockUserProfile);
when(mockUserProfile.getOwnerId()).thenReturn("MyOwnerID");
mockParticipant = Mockito.mock(Participant.class);
when(mockSynapse.getParticipant(anyString(), anyString())).thenReturn(mockParticipant);
when(mockSynapse.createParticipant(anyString())).thenReturn(mockParticipant);
UploadDaemonStatus status = new UploadDaemonStatus();
String fileHandleId = "myFileHandleId";
status.setFileHandleId(fileHandleId);
status.setState(State.COMPLETED);
when(mockSynapse.getCompleteUploadDaemonStatus(anyString())).thenReturn(status);
status = new UploadDaemonStatus();
status.setState(State.PROCESSING);
status.setPercentComplete(.05d);
when(mockSynapse.startUploadDeamon(any(CompleteAllChunksRequest.class))).thenReturn(status);
PaginatedResults<MembershipInvitation> openInvites = new PaginatedResults<MembershipInvitation>();
openInvites.setTotalNumberOfResults(0);
when(mockSynapse.getOpenMembershipInvitations(anyString(), anyString(), anyLong(), anyLong())).thenReturn(openInvites);
PaginatedResults<MembershipRequest> openRequests = new PaginatedResults<MembershipRequest>();
openRequests.setTotalNumberOfResults(0);
when(mockSynapse.getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong())).thenReturn(openRequests);
membershipStatus = new TeamMembershipStatus();
membershipStatus.setCanJoin(false);
membershipStatus.setHasOpenInvitation(false);
membershipStatus.setHasOpenRequest(false);
membershipStatus.setHasUnmetAccessRequirement(false);
membershipStatus.setIsMember(false);
membershipStatus.setMembershipApprovalRequired(false);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString())).thenReturn(membershipStatus);
sentMessage = new MessageToUser();
sentMessage.setId("987");
when(mockSynapse.sendStringMessage(any(MessageToUser.class), anyString())).thenReturn(sentMessage);
//getMyProjects getUserProjects
PaginatedResults<ProjectHeader> headers = new PaginatedResults<ProjectHeader>();
headers.setTotalNumberOfResults(1100);
List<ProjectHeader> projectHeaders = new ArrayList();
projectHeaders.add(new ProjectHeader());
headers.setResults(projectHeaders);
when(mockSynapse.getMyProjects(anyInt(), anyInt())).thenReturn(headers);
when(mockSynapse.getProjectsFromUser(anyLong(), anyInt(), anyInt())).thenReturn(headers);
when(mockSynapse.getProjectsForTeam(anyLong(), anyInt(), anyInt())).thenReturn(headers);
}
private AccessRequirement createAccessRequirement(ACCESS_TYPE type) {
TermsOfUseAccessRequirement accessRequirement = new TermsOfUseAccessRequirement();
accessRequirement.setConcreteType(TermsOfUseAccessRequirement.class.getName());
RestrictableObjectDescriptor descriptor = new RestrictableObjectDescriptor();
descriptor.setId("101");
descriptor.setType(RestrictableObjectType.ENTITY);
accessRequirement.setSubjectIds(Arrays.asList(new RestrictableObjectDescriptor[]{descriptor}));
accessRequirement.setAccessType(type);
return accessRequirement;
}
private void setupTeamInvitations() throws SynapseException{
ArrayList<MembershipInvtnSubmission> testInvitations = new ArrayList<MembershipInvtnSubmission>();
testInvitation = new MembershipInvtnSubmission();
testInvitation.setId("628319");
testInvitation.setInviteeId(inviteeUserId);
testInvitations.add(testInvitation);
PaginatedResults<MembershipInvtnSubmission> paginatedInvitations = new PaginatedResults<MembershipInvtnSubmission>();
paginatedInvitations.setResults(testInvitations);
when(mockSynapse.getOpenMembershipInvitationSubmissions(anyString(), anyString(), anyLong(), anyLong())).thenReturn(paginatedInvitations);
inviteeUserProfile = new UserProfile();
inviteeUserProfile.setUserName("Invitee User");
inviteeUserProfile.setOwnerId(inviteeUserId);
when(mockSynapse.getUserProfile(eq(inviteeUserId))).thenReturn(inviteeUserProfile);
}
@Test
public void testGetEntityBundleAll() throws RestServiceException{
// Make sure we can get all parts of the bundel
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH | HAS_CHILDREN
| ACCESS_REQUIREMENTS | UNMET_ACCESS_REQUIREMENTS;
EntityBundleTransport bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNotNull(bundle.getEntityJson());
assertNotNull(bundle.getAnnotationsJson());
assertNotNull(bundle.getEntityPath());
assertNotNull(bundle.getPermissions());
assertNotNull(bundle.getHasChildren());
assertNotNull(bundle.getAccessRequirementsJson());
assertNotNull(bundle.getUnmetAccessRequirementsJson());
}
@Test
public void testGetEntityBundleNone() throws RestServiceException{
// Make sure all are null
int mask = 0x0;
EntityBundleTransport bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNull(bundle.getEntityJson());
assertNull(bundle.getAnnotationsJson());
assertNull(bundle.getEntityPath());
assertNull(bundle.getPermissions());
assertNull(bundle.getHasChildren());
assertNull(bundle.getAccessRequirementsJson());
assertNull(bundle.getUnmetAccessRequirementsJson());
}
@Test (expected=IllegalArgumentException.class)
public void testParseEntityFromJsonNoType() throws JSONObjectAdapterException{
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
// do not set the type
String json = EntityFactory.createJSONStringForEntity(example);
// This will fail as the type is required
synapseClient.parseEntityFromJson(json);
}
@Test
public void testParseEntityFromJson() throws JSONObjectAdapterException{
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
example.setEntityType(ExampleEntity.class.getName());
String json = EntityFactory.createJSONStringForEntity(example);
// System.out.println(json);
// Now make sure this can be read back
ExampleEntity clone = (ExampleEntity) synapseClient.parseEntityFromJson(json);
assertEquals(example, clone);
}
@Test
public void testCreateOrUpdateEntityFalse() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.putEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, false);
assertEquals(out.getId(), result);
verify(mockSynapse).putEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrue() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrueWithAnnos() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
Annotations annos = new Annotations();
annos.addAnnotation("someString", "one");
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, annos, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
annos.setEtag(out.getEtag());
annos.setId(out.getId());
verify(mockSynapse).updateAnnotations(out.getId(), annos);
}
@Test
public void testGetNodeAcl() throws Exception {
AccessControlList clone = synapseClient.getNodeAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testCreateAcl() throws Exception {
AccessControlList clone = synapseClient.createAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAcl() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAclRecursive() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl, true);
assertEquals(acl, clone);
verify(mockSynapse).updateACL(any(AccessControlList.class), eq(true));
}
@Test
public void testDeleteAcl() throws Exception {
AccessControlList clone = synapseClient.deleteAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testHasAccess() throws Exception {
assertTrue(synapseClient.hasAccess("syn101", "READ"));
}
@Test
public void testGetAllUsers() throws Exception {
EntityWrapper ew = synapseClient.getAllUsers();
org.sagebionetworks.web.shared.PaginatedResults<UserProfile> clone =
nodeModelCreator.createPaginatedResults(ew.getEntityJson(), UserProfile.class);
assertEquals(this.pgups.getResults(), clone.getResults());
}
@Test
public void testGetUserProfile() throws Exception {
//verify call is directly calling the synapse client provider
UserProfile testUserProfile = new UserProfile();
testUserProfile.setUserName("Test User");
String testRepoUrl = "http://mytestrepourl";
String testUserId = "myUserId";
when(mockUrlProvider.getRepositoryServiceUrl()).thenReturn(testRepoUrl);
when(mockSynapse.getUserProfile(eq(testUserId))).thenReturn(testUserProfile);
UserProfile userProfile = synapseClient.getUserProfile(testUserId);
assertEquals(userProfile, testUserProfile);
}
@Test
public void testCreateUserProfileAttachment() throws Exception {
//verify call is directly calling the synapse client provider
PresignedUrl testPresignedUrl = new PresignedUrl();
testPresignedUrl.setPresignedUrl("http://mytestpresignedurl");
String testId = "myTestId";
String testToken = "myTestToken";
when(mockSynapse.createUserProfileAttachmentPresignedUrl(testId, testToken)).thenReturn(testPresignedUrl);
String presignedUrl = synapseClient.createUserProfileAttachmentPresignedUrl(testId, testToken);
assertEquals(presignedUrl, EntityFactory.createJSONStringForEntity(testPresignedUrl));
}
private void resetUpdateLocationableMock(Data layer, String testUrl, String testId) throws SynapseException {
reset(mockSynapse);
when(mockSynapse.updateExternalLocationableToSynapse(layer, testUrl)).thenReturn(layer);
when(mockSynapse.getEntityById(testId)).thenReturn(layer);
}
@Test
public void testUpdateLocationable() throws Exception {
//verify call is directly calling the synapse client provider
String testUrl = "http://mytesturl/something.jpg";
List<LocationData> locations = new ArrayList<LocationData>();
LocationData externalLocation = new LocationData();
externalLocation.setPath(testUrl);
externalLocation.setType(LocationTypeNames.external);
locations.add(externalLocation);
Data layer = new Data();
layer.setType(LayerTypeNames.M);
layer.setLocations(locations);
String testId = "myTestId";
resetUpdateLocationableMock(layer, testUrl, testId);
EntityWrapper returnedLayer = synapseClient.updateExternalLocationable(testId, testUrl, null);
//should have called with the layer
verify(mockSynapse).updateExternalLocationableToSynapse(eq(layer), eq(testUrl));
assertEquals(returnedLayer.getEntityJson(), EntityFactory.createJSONStringForEntity(layer));
//test with empty string new name
resetUpdateLocationableMock(layer, testUrl, testId);
synapseClient.updateExternalLocationable(testId, testUrl, "");
verify(mockSynapse).updateExternalLocationableToSynapse(eq(layer), eq(testUrl));
//and test with a rename
resetUpdateLocationableMock(layer, testUrl, testId);
String newName = "a new name";
synapseClient.updateExternalLocationable(testId, testUrl, newName);
layer.setName(newName);
verify(mockSynapse).updateExternalLocationableToSynapse(eq(layer), eq(testUrl));
}
@Test
public void testRemoveAttachmentFromEntity() throws Exception {
Mockito.when(mockSynapse.putEntity(any(ExampleEntity.class))).thenReturn(entity);
ArgumentCaptor<ExampleEntity> arg = ArgumentCaptor.forClass(ExampleEntity.class);
synapseClient.removeAttachmentFromEntity(entityId, attachment2.getName());
//test to see if attachment has been removed
verify(mockSynapse).getEntityById(entityId);
verify(mockSynapse).putEntity(arg.capture());
//verify that attachment2 has been removed
ExampleEntity updatedEntity = arg.getValue();
List<AttachmentData> attachments = updatedEntity.getAttachments();
assertTrue(attachments.size() == 1 && attachments.get(0).equals(attachment1));
}
@Test
public void testGetJSONEntity() throws Exception {
JSONObject json = EntityFactory.createJSONObjectForEntity(entity);
Mockito.when(mockSynapse.getEntity(anyString())).thenReturn(json);
String testRepoUri = "/testservice";
synapseClient.getJSONEntity(testRepoUri);
//verify that this call uses Synapse.getEntity(testRepoUri)
verify(mockSynapse).getEntity(testRepoUri);
}
@Test
public void testGetWikiHeaderTree() throws Exception {
PaginatedResults<WikiHeader> headerTreeResults = new PaginatedResults<WikiHeader>();
when(mockSynapse.getWikiHeaderTree(anyString(), any(ObjectType.class))).thenReturn(headerTreeResults);
synapseClient.getWikiHeaderTree("testId", ObjectType.ENTITY.toString());
verify(mockSynapse).getWikiHeaderTree(anyString(), any(ObjectType.class));
}
@Test
public void testGetWikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(mockSynapse.getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(testResults);
synapseClient.getWikiAttachmentHandles(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testCreateV2WikiPage() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(v2Page);
Mockito.when(mockSynapse.createV2WikiPage(anyString(), any(ObjectType.class), any(V2WikiPage.class))).thenReturn(v2Page);
synapseClient.createV2WikiPage("testId", ObjectType.ENTITY.toString(), wikiPageJson);
verify(mockSynapse).createV2WikiPage(anyString(), any(ObjectType.class), any(V2WikiPage.class));
}
@Test
public void testDeleteV2WikiPage() throws Exception {
synapseClient.deleteV2WikiPage(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).deleteV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testGetV2WikiPage() throws Exception {
Mockito.when(mockSynapse.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(v2Page);
synapseClient.getV2WikiPage(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(mockSynapse.getVersionOfV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class))).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPage(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class));
}
@Test
public void testUpdateV2WikiPage() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(v2Page);
Mockito.when(mockSynapse.updateV2WikiPage(anyString(), any(ObjectType.class), any(V2WikiPage.class))).thenReturn(v2Page);
synapseClient.updateV2WikiPage("testId", ObjectType.ENTITY.toString(), wikiPageJson);
verify(mockSynapse).updateV2WikiPage(anyString(), any(ObjectType.class), any(V2WikiPage.class));
}
@Test
public void testRestoreV2WikiPage() throws Exception {
String wikiId = "syn123";
Mockito.when(mockSynapse.restoreV2WikiPage(anyString(), any(ObjectType.class), any(String.class), anyLong())).thenReturn(v2Page);
synapseClient.restoreV2WikiPage("ownerId", ObjectType.ENTITY.toString(), wikiId, new Long(2));
verify(mockSynapse).restoreV2WikiPage(anyString(), any(ObjectType.class), any(String.class), anyLong());
}
@Test
public void testGetV2WikiHeaderTree() throws Exception {
PaginatedResults<V2WikiHeader> headerTreeResults = new PaginatedResults<V2WikiHeader>();
when(mockSynapse.getV2WikiHeaderTree(anyString(), any(ObjectType.class))).thenReturn(headerTreeResults);
synapseClient.getV2WikiHeaderTree("testId", ObjectType.ENTITY.toString());
verify(mockSynapse).getV2WikiHeaderTree(anyString(), any(ObjectType.class));
}
@Test
public void testGetV2WikiHistory() throws Exception {
PaginatedResults<V2WikiHistorySnapshot> historyResults = new PaginatedResults<V2WikiHistorySnapshot>();
when(mockSynapse.getV2WikiHistory(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class), any(Long.class))).thenReturn(historyResults);
synapseClient.getV2WikiHistory(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(10), new Long(0));
verify(mockSynapse).getV2WikiHistory(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class), any(Long.class));
}
@Test
public void testGetV2WikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(mockSynapse.getV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(testResults);
synapseClient.getV2WikiAttachmentHandles(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(mockSynapse.getVersionOfV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class))).thenReturn(testResults);
synapseClient.getVersionOfV2WikiAttachmentHandles(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class));
}
@Test
public void testZipAndUpload() throws IOException, RestServiceException, JSONObjectAdapterException, SynapseException {
Mockito.when(mockSynapse.createFileHandle(any(File.class), any(String.class))).thenReturn(handle);
synapseClient.zipAndUploadFile("markdown", "fileName");
verify(mockSynapse).createFileHandle(any(File.class), any(String.class));
}
@Test
public void testGetMarkdown() throws IOException, RestServiceException, SynapseException {
String someMarkDown = "someMarkDown";
Mockito.when(mockSynapse.downloadV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(someMarkDown);
synapseClient.getMarkdown(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).downloadV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(mockSynapse.downloadVersionOfV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class))).thenReturn(someMarkDown);
synapseClient.getVersionOfMarkdown(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).downloadVersionOfV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class));
}
@Test
public void testCreateV2WikiPageWithV1() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(page);
Mockito.when(mockSynapse.createV2WikiPageWithV1(anyString(), any(ObjectType.class), any(WikiPage.class))).thenReturn(page);
synapseClient.createV2WikiPageWithV1("testId", ObjectType.ENTITY.toString(), wikiPageJson);
verify(mockSynapse).createV2WikiPageWithV1(anyString(), any(ObjectType.class), any(WikiPage.class));
}
@Test
public void testUpdateV2WikiPageWithV1() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(page);
Mockito.when(mockSynapse.updateV2WikiPageWithV1(anyString(), any(ObjectType.class), any(WikiPage.class))).thenReturn(page);
synapseClient.updateV2WikiPageWithV1("testId", ObjectType.ENTITY.toString(), wikiPageJson);
verify(mockSynapse).updateV2WikiPageWithV1(anyString(), any(ObjectType.class), any(WikiPage.class));
}
@Test
public void getV2WikiPageAsV1() throws Exception {
Mockito.when(mockSynapse.getV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(page);
Mockito.when(mockSynapse.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(v2Page);
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
//asking for the same page twice should result in a cache hit, and it should not ask for it from the synapse client
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse, Mockito.times(1)).getV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(mockSynapse.getVersionOfV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class))).thenReturn(page);
Mockito.when(mockSynapse.getVersionOfV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), anyLong())).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class));
//asking for the same page twice should result in a cache hit, and it should not ask for it from the synapse client
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse, Mockito.times(1)).getVersionOfV2WikiPageAsV1(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), any(Long.class));
}
private void resetUpdateExternalFileHandleMocks(String testId, FileEntity file, ExternalFileHandle handle) throws SynapseException, JSONObjectAdapterException {
reset(mockSynapse);
when(mockSynapse.getEntityById(testId)).thenReturn(file);
when(mockSynapse.createExternalFileHandle(any(ExternalFileHandle.class))).thenReturn(handle);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file);
}
@Test
public void testUpdateExternalFileHandle() throws Exception {
//verify call is directly calling the synapse client provider, and it tries to rename the entity to the filename
String myFileName = "testFileName.csv";
String testUrl = "http://mytesturl/"+myFileName;
String testId = "myTestId";
FileEntity file = new FileEntity();
String originalFileEntityName = "syn1223";
file.setName(originalFileEntityName);
file.setId(testId);
file.setDataFileHandleId("handle1");
ExternalFileHandle handle = new ExternalFileHandle();
handle.setExternalURL(testUrl);
resetUpdateExternalFileHandleMocks(testId, file, handle);
ArgumentCaptor<FileEntity> arg = ArgumentCaptor.forClass(FileEntity.class);
synapseClient.updateExternalFile(testId, testUrl, null);
verify(mockSynapse).getEntityById(testId);
verify(mockSynapse).createExternalFileHandle(any(ExternalFileHandle.class));
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
//verify rename
FileEntity fileEntityArg = arg.getValue(); //last value captured
assertEquals(myFileName, fileEntityArg.getName());
//and if rename fails, verify all is well (but the FileEntity name is not updated)
resetUpdateExternalFileHandleMocks(testId, file, handle);
file.setName(originalFileEntityName);
//first call should return file, second call to putEntity should throw an exception
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file).thenThrow(new IllegalArgumentException("invalid name for some reason"));
synapseClient.updateExternalFile(testId, testUrl, "");
//called createExternalFileHandle
verify(mockSynapse).createExternalFileHandle(any(ExternalFileHandle.class));
//and it should have called putEntity 2 additional times
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
fileEntityArg = arg.getValue(); //last value captured
assertEquals(originalFileEntityName, fileEntityArg.getName());
//and (finally) verify the correct name if it is explicitly set
resetUpdateExternalFileHandleMocks(testId, file, handle);
String newName = "a new name";
synapseClient.updateExternalFile(testId, testUrl, newName);
file.setName(newName);
verify(mockSynapse).putEntity(eq(file)); //should equal the previous file but with the new name
}
@Test
public void testCreateExternalFile() throws Exception {
//test setting file handle name
String parentEntityId = "syn123333";
String externalUrl = "sftp://foobar.edu/b/test.txt";
String fileName = "testing.txt";
when(mockSynapse.createExternalFileHandle(any(ExternalFileHandle.class))).thenReturn(new ExternalFileHandle());
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(new FileEntity());
synapseClient.createExternalFile(parentEntityId, externalUrl, fileName);
ArgumentCaptor<ExternalFileHandle> captor = ArgumentCaptor.forClass(ExternalFileHandle.class);
verify(mockSynapse).createExternalFileHandle(captor.capture());
ExternalFileHandle handle = captor.getValue();
//verify name is set
assertEquals(fileName, handle.getFileName());
assertEquals(externalUrl, handle.getExternalURL());
}
@Test
public void testGetEntityDoi() throws Exception {
//wiring test
Doi testDoi = new Doi();
testDoi.setDoiStatus(DoiStatus.CREATED);
testDoi.setId("test doi id");
testDoi.setCreatedBy("Test User");
testDoi.setCreatedOn(new Date());
testDoi.setObjectId("syn1234");
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong())).thenReturn(testDoi);
synapseClient.getEntityDoi("test entity id", null);
verify(mockSynapse).getEntityDoi(anyString(), anyLong());
}
// @Test
// public void testGetParticipant() throws Exception{
// //basic wiring test
// //String returnJson = synapseClient.createParticipant("myEvalId");
private FileEntity getTestFileEntity() {
FileEntity testFileEntity = new FileEntity();
testFileEntity.setId("5544");
testFileEntity.setName(testFileName);
return testFileEntity;
}
@Test (expected=NotFoundException.class)
public void testGetEntityDoiNotFound() throws Exception {
//wiring test
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong())).thenThrow(new SynapseNotFoundException());
synapseClient.getEntityDoi("test entity id", null);
}
@Test
public void testCreateDoi() throws Exception {
//wiring test
synapseClient.createDoi("test entity id", null);
verify(mockSynapse).createEntityDoi(anyString(), anyLong());
}
private List<ChunkRequest> getTestChunkRequestJson() throws JSONObjectAdapterException {
ChunkRequest chunkRequest = new ChunkRequest();
ChunkedFileToken token = new ChunkedFileToken();
token.setKey("test key");
chunkRequest.setChunkedFileToken(token);
chunkRequest.setChunkNumber(1l);
List<ChunkRequest> chunkRequests = new ArrayList<ChunkRequest>();
chunkRequests.add(chunkRequest);
return chunkRequests;
}
@Test
public void testCombineChunkedFileUpload() throws JSONObjectAdapterException, SynapseException, RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
synapseClient.combineChunkedFileUpload(chunkRequests);
verify(mockSynapse).startUploadDeamon(any(CompleteAllChunksRequest.class));
}
@Test
public void testGetUploadDaemonStatus() throws JSONObjectAdapterException, SynapseException, RestServiceException {
synapseClient.getUploadDaemonStatus("daemonId");
verify(mockSynapse).getCompleteUploadDaemonStatus(anyString());
}
/**
* Direct upload tests. Most of the methods are simple pass-throughs to the Java Synapse client, but completeUpload has
* additional logic
* @throws JSONObjectAdapterException
* @throws SynapseException
* @throws RestServiceException
*/
@Test
public void testCompleteUpload() throws JSONObjectAdapterException, SynapseException, RestServiceException {
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(testFileEntity);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(testFileEntity);
//parent entity has no immediate children
EntityIdList childEntities = new EntityIdList();
childEntities.setIdList(new ArrayList());
when(mockSynapse.getDescendants(anyString(), anyInt(), anyInt(), anyString())).thenReturn(childEntities);
synapseClient.setFileEntityFileHandle(null, null, "parentEntityId");
//it should have tried to create a new entity (since entity id was null)
verify(mockSynapse).createEntity(any(FileEntity.class));
}
@Test(expected = NotFoundException.class)
public void testGetFileEntityIdWithSameNameNotFound() throws JSONObjectAdapterException, SynapseException, RestServiceException, JSONException {
JSONObject queryResult = new JSONObject();
queryResult.put("totalNumberOfResults", (long) 0);
when(mockSynapse.query(anyString())).thenReturn(queryResult); // TODO
String fileEntityId = synapseClient.getFileEntityIdWithSameName(testFileName,"parentEntityId");
}
@Test(expected = ConflictException.class)
public void testGetFileEntityIdWithSameNameConflict() throws JSONObjectAdapterException, SynapseException, RestServiceException, JSONException {
Folder folder = new Folder();
folder.setName(testFileName);
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory.createJSONObjectForEntity(folder);
JSONArray typeArray = new JSONArray();
typeArray.put("Folder");
objectResult.put("entity.concreteType", typeArray);
results.put(objectResult);
// Set up query result.
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(testFileName,"parentEntityId");
}
@Test
public void testGetFileEntityIdWithSameNameFound() throws JSONException, JSONObjectAdapterException, SynapseException, RestServiceException {
FileEntity file = getTestFileEntity();
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory.createJSONObjectForEntity(file);
JSONArray typeArray = new JSONArray();
typeArray.put(FileEntity.class.getName());
objectResult.put("entity.concreteType", typeArray);
objectResult.put("entity.id", file.getId());
results.put(objectResult);
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(testFileName,"parentEntityId");
assertEquals(fileEntityId, file.getId());
}
@Test
public void testCompleteChunkedFileUploadExistingEntity() throws JSONObjectAdapterException, SynapseException, RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.getEntityById(anyString())).thenReturn(testFileEntity);
when(mockSynapse.createEntity(any(FileEntity.class))).thenThrow(new AssertionError("No need to create a new entity!"));
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(testFileEntity);
synapseClient.setFileEntityFileHandle(null, entityId, "parentEntityId");
//it should have tried to find the entity
verify(mockSynapse).getEntityById(anyString());
//update the data file handle id
verify(mockSynapse, Mockito.times(1)).putEntity(any(FileEntity.class));
}
@Test
public void testGetChunkedFileToken() throws SynapseException, RestServiceException, JSONObjectAdapterException {
String fileName = "test file.zip";
String contentType = "application/test";
String md5 = "0123456789abcdef";
ChunkedFileToken testToken = new ChunkedFileToken();
testToken.setFileName(fileName);
testToken.setKey("a key 42");
testToken.setUploadId("upload ID 123");
testToken.setContentMD5(md5);
when(mockSynapse.createChunkedFileUploadToken(any(CreateChunkedFileTokenRequest.class))).thenReturn(testToken);
ChunkedFileToken token = synapseClient.getChunkedFileToken(fileName, contentType, md5);
verify(mockSynapse).createChunkedFileUploadToken(any(CreateChunkedFileTokenRequest.class));
assertEquals(testToken, token);
}
@Test
public void testGetChunkedPresignedUrl() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
URL testUrl = new URL("http://test.presignedurl.com/foo");
when(mockSynapse.createChunkedPresignedUrl(any(ChunkRequest.class))).thenReturn(testUrl);
String presignedUrl = synapseClient.getChunkedPresignedUrl(getTestChunkRequestJson().get(0));
verify(mockSynapse).createChunkedPresignedUrl(any(ChunkRequest.class));
assertEquals(testUrl.toString(), presignedUrl);
}
@Test
public void testGetAvailableEvaluations() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
PaginatedResults<Evaluation> testResults = new PaginatedResults<Evaluation>();
Evaluation e = new Evaluation();
e.setId("A test ID");
when(mockSynapse.getAvailableEvaluationsPaginated(anyInt(),anyInt())).thenReturn(testResults);
String evaluationsJson = synapseClient.getAvailableEvaluations();
verify(mockSynapse).getAvailableEvaluationsPaginated(anyInt(),anyInt());
String expectedJson = EntityFactory.createJSONStringForEntity(testResults);
assertEquals(expectedJson, evaluationsJson);
}
@Test
public void testGetEvaluations() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
when(mockSynapse.getEvaluation(anyString())).thenReturn(new Evaluation());
List<String> evaluationIds = new ArrayList<String>();
evaluationIds.add("1");
evaluationIds.add("2");
String evaluationsJson = synapseClient.getEvaluations(evaluationIds);
verify(mockSynapse, Mockito.times(2)).getEvaluation(anyString());
org.sagebionetworks.web.shared.PaginatedResults<Evaluation> evaluationObjectList =
nodeModelCreator.createPaginatedResults(evaluationsJson, Evaluation.class);
assertEquals(2, evaluationObjectList.getTotalNumberOfResults());
assertEquals(2, evaluationObjectList.getResults().size());
}
@Test
public void testHasSubmitted() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
String sharedEntityId = "syn123455";
setupGetAvailableEvaluations(sharedEntityId);
PaginatedResults<Submission> submissions = new PaginatedResults<Submission>();
//verify when all empty, hasSubmitted returns false
when(mockSynapse.getMySubmissions(anyString(), anyLong(), anyLong())).thenReturn(submissions);
assertFalse(synapseClient.hasSubmitted());
//verify when there is a submission, it returns true
submissions.setTotalNumberOfResults(1);
List<Submission> submissionList = new ArrayList<Submission>();
submissionList.add(new Submission());
submissions.setResults(submissionList);
assertTrue(synapseClient.hasSubmitted());
}
public void setupGetAllEvaluations(String sharedEntityId) throws SynapseException {
PaginatedResults<Evaluation> testResults = getTestEvaluations(sharedEntityId);
when(mockSynapse.getEvaluationsPaginated(anyInt(),anyInt())).thenReturn(testResults);
}
public void setupGetEvaluationsForEntity(String sharedEntityId) throws SynapseException {
PaginatedResults<Evaluation> testResults = getTestEvaluations(sharedEntityId);
when(mockSynapse.getEvaluationByContentSource(anyString(),anyInt(),anyInt())).thenReturn(getEmptyPaginatedResults());
when(mockSynapse.getEvaluationByContentSource(eq(sharedEntityId),anyInt(),anyInt())).thenReturn(testResults);
}
private PaginatedResults<Evaluation> getEmptyPaginatedResults() {
PaginatedResults<Evaluation> testResults = new PaginatedResults<Evaluation>();
List<Evaluation> evaluationList = new ArrayList<Evaluation>();
testResults.setTotalNumberOfResults(0);
testResults.setResults(evaluationList);
return testResults;
}
private PaginatedResults<Evaluation> getTestEvaluations(String sharedEntityId) {
PaginatedResults<Evaluation> testResults = new PaginatedResults<Evaluation>();
List<Evaluation> evaluationList = new ArrayList<Evaluation>();
Evaluation e = new Evaluation();
e.setId(EVAL_ID_1);
e.setContentSource(sharedEntityId);
evaluationList.add(e);
e = new Evaluation();
e.setId(EVAL_ID_2);
e.setContentSource(sharedEntityId);
evaluationList.add(e);
testResults.setTotalNumberOfResults(2);
testResults.setResults(evaluationList);
return testResults;
}
public void setupGetAvailableEvaluations(String sharedEntityId) throws SynapseException {
PaginatedResults<Evaluation> testResults = getTestEvaluations(sharedEntityId);
when(mockSynapse.getAvailableEvaluationsPaginated(anyInt(),anyInt())).thenReturn(testResults);
}
@Test
public void testCreateSubmission() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
Submission inputSubmission = new Submission();
inputSubmission.setId("my submission id");
when(mockSynapse.createSubmission(any(Submission.class), anyString())).thenReturn(inputSubmission);
Submission returnSubmission = synapseClient.createSubmission(inputSubmission, "fakeEtag");
verify(mockSynapse).createSubmission(any(Submission.class), anyString());
assertEquals(inputSubmission, returnSubmission);
}
private void setupTestSubmitterAliases() throws SynapseException{
//set up 2 available evaluations
PaginatedResults<Evaluation> availableEvaluations = new PaginatedResults<Evaluation>();
List<Evaluation> evalResults = new ArrayList<Evaluation>();
Evaluation e = new Evaluation();
String eval1Id ="evaluation1";
e.setId(eval1Id);
evalResults.add(e);
e = new Evaluation();
String eval2Id = "evaluation2";
e.setId(eval2Id);
evalResults.add(e);
availableEvaluations.setResults(evalResults);
when(mockSynapse.getAvailableEvaluationsPaginated(anyInt(),anyInt())).thenReturn(availableEvaluations);
//test sorting, uniqueness, and empty/null values
Submission[] submissions = new Submission[6];
Date date = new Date();
for (int i = 0; i < submissions.length; i++) {
submissions[i] = new Submission();
//submission 0 is the most recently used (largest date time), and submission 6 is the oldest
submissions[i].setCreatedOn(new Date(date.getTime() - i));
submissions[i].setSubmitterAlias("Alias " + i);
}
//set a duplicate
submissions[3].setSubmitterAlias("Alias 0");
//and add a null and empty string submitter alias, to verify that these are removed
submissions[4].setSubmitterAlias(null);
submissions[5].setSubmitterAlias("");
//assign 2 submissions to evaluation1, and the other 4 submissions to evaluation2
//mix them up to test sort
PaginatedResults<Submission> submissionSet1 = new PaginatedResults<Submission>();
List<Submission> submissionList = new ArrayList<Submission>();
submissionList.add(submissions[0]);
submissionList.add(submissions[2]);
submissionSet1.setTotalNumberOfResults(2);
submissionSet1.setResults(submissionList);
PaginatedResults<Submission> submissionSet2 = new PaginatedResults<Submission>();
submissionList = new ArrayList<Submission>();
submissionList.add(submissions[1]);
submissionList.add(submissions[3]);
submissionList.add(submissions[4]);
submissionList.add(submissions[5]);
submissionSet2.setTotalNumberOfResults(4);
submissionSet2.setResults(submissionList);
when(mockSynapse.getMySubmissions(eq(eval1Id), anyLong(), anyLong())).thenReturn(submissionSet1);
when(mockSynapse.getMySubmissions(eq(eval2Id), anyLong(), anyLong())).thenReturn(submissionSet2);
}
@Test
public void testGetAvailableEvaluationSubmitterAliases() throws SynapseException, RestServiceException, JSONObjectAdapterException {
setupTestSubmitterAliases();
String resourceListJson = synapseClient.getAvailableEvaluationsSubmitterAliases();
RestResourceList resourceList = EntityFactory.createEntityFromJSONString(resourceListJson, RestResourceList.class);
List<String> submitterAliasList = resourceList.getList();
//3 unique submitter aliases across the evaluations
assertEquals(3, submitterAliasList.size());
//order should be Alias 0, Alias 1, Alias 2
for (int i = 0; i < submitterAliasList.size(); i++) {
assertEquals("Alias " + i, submitterAliasList.get(i));
}
}
@Test
public void testGetSharableEvaluations() throws SynapseException, RestServiceException, JSONObjectAdapterException {
String myEntityId = "syn123";
//set up 2 available evaluations associated to this entity id
setupGetEvaluationsForEntity(myEntityId);
ArrayList<String> sharableEvaluations = synapseClient.getSharableEvaluations(myEntityId);
//verify this is eval 2
assertEquals(1, sharableEvaluations.size());
Evaluation e2 = nodeModelCreator.createJSONEntity(sharableEvaluations.get(0), Evaluation.class);
assertEquals(EVAL_ID_2, e2.getId());
//and verify that no evaluations are returned for a different entity id
sharableEvaluations = synapseClient.getSharableEvaluations("syn456");
assertEquals(0, sharableEvaluations.size());
}
@Test
public void testInviteMemberOpenInvitations() throws SynapseException, RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenInvitation(true);
//verify it does not create a new invitation since one is already open
synapseClient.inviteMember("123", "a team", "");
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(), anyString());
verify(mockSynapse, Mockito.times(0)).createMembershipInvitation(any(MembershipInvtnSubmission.class));
}
@Test
public void testRequestMemberOpenRequests() throws SynapseException, RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenRequest(true);
//verify it does not create a new request since one is already open
synapseClient.requestMembership("123", "a team", "");
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(), anyString());
verify(mockSynapse, Mockito.times(0)).createMembershipRequest(any(MembershipRqstSubmission.class));
}
@Test
public void testInviteMemberCanJoin() throws SynapseException, RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.inviteMember("123", "a team", "");
verify(mockSynapse).addTeamMember(anyString(), anyString());
}
@Test
public void testRequestMembershipCanJoin() throws SynapseException, RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.requestMembership("123", "a team", "");
verify(mockSynapse).addTeamMember(anyString(), anyString());
}
@Test
public void testInviteMember() throws SynapseException, RestServiceException, JSONObjectAdapterException {
synapseClient.inviteMember("123", "a team", "");
verify(mockSynapse).createMembershipInvitation(any(MembershipInvtnSubmission.class));
}
@Test
public void testRequestMembership() throws SynapseException, RestServiceException, JSONObjectAdapterException {
synapseClient.requestMembership("123", "a team", "");
verify(mockSynapse).createMembershipRequest(any(MembershipRqstSubmission.class));
}
@Test
public void testGetOpenRequestCountUnauthorized() throws SynapseException, RestServiceException {
//is not an admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(false);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(testTeamMember);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
//should never ask for open request count
verify(mockSynapse, Mockito.never()).getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong());
assertNull(count);
}
@Test
public void testGetOpenRequestCount() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
//is admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(true);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(testTeamMember);
Long testCount = 42L;
PaginatedResults<MembershipRequest> testOpenRequests = new PaginatedResults<MembershipRequest>();
testOpenRequests.setTotalNumberOfResults(testCount);
when(mockSynapse.getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong())).thenReturn(testOpenRequests);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
verify(mockSynapse, Mockito.times(1)).getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong());
assertEquals(testCount, count);
}
@Test
public void testGetOpenTeamInvitations() throws SynapseException, RestServiceException, JSONObjectAdapterException {
setupTeamInvitations();
int limit = 55;
int offset = 2;
String teamId = "132";
List<MembershipInvitationBundle> invitationBundles = synapseClient.getOpenTeamInvitations(teamId, limit, offset);
verify(mockSynapse).getOpenMembershipInvitationSubmissions(eq(teamId), anyString(), eq((long)limit), eq((long)offset));
//we set this up so that a single invite would be returned. Verify that it is the one we're looking for
assertEquals(1, invitationBundles.size());
MembershipInvitationBundle invitationBundle = invitationBundles.get(0);
String invitationJson = testInvitation.writeToJSONObject(adapterFactory.createNew()).toJSONString();
String userProfileJson = inviteeUserProfile.writeToJSONObject(adapterFactory.createNew()).toJSONString();
assertEquals(userProfileJson, invitationBundle.getUserProfileJson());
assertEquals(invitationJson, invitationBundle.getMembershipInvitationJson());
}
@Test
public void testGetTeamBundle() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
//set team member count
Long testMemberCount = 111L;
PaginatedResults<TeamMember> allMembers = new PaginatedResults<TeamMember>();
allMembers.setTotalNumberOfResults(testMemberCount);
when(mockSynapse.getTeamMembers(anyString(), anyString(), anyLong(), anyLong())).thenReturn(allMembers);
//set team
Team team = new Team();
team.setId("test team id");
when(mockSynapse.getTeam(anyString())).thenReturn(team);
//is member
TeamMembershipStatus membershipStatus = new TeamMembershipStatus();
membershipStatus.setIsMember(true);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString())).thenReturn(membershipStatus);
//is admin
TeamMember testTeamMember = new TeamMember();
boolean isAdmin = true;
testTeamMember.setIsAdmin(isAdmin);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(testTeamMember);
//make the call
TeamBundle bundle = synapseClient.getTeamBundle("myUserId", "myTeamId", true);
//now verify round all values were returned in the bundle (based on the mocked service calls)
String membershipStatusJson = membershipStatus.writeToJSONObject(adapterFactory.createNew()).toJSONString();
String teamJson = team.writeToJSONObject(adapterFactory.createNew()).toJSONString();
assertEquals(teamJson, bundle.getTeamJson());
assertEquals(membershipStatusJson, bundle.getTeamMembershipStatusJson());
assertEquals(isAdmin, bundle.isUserAdmin());
assertEquals(testMemberCount, bundle.getTotalMemberCount());
}
@Test
public void testGetEntityHeaderBatch() throws SynapseException, RestServiceException, MalformedURLException, JSONObjectAdapterException {
List<EntityHeader> headers = synapseClient.getEntityHeaderBatch(new ArrayList());
//in the setup, we told the mockSynapse.getEntityHeaderBatch to return batchHeaderResults
for (int i = 0; i < batchHeaderResults.size(); i++) {
assertEquals(batchHeaderResults.get(i), headers.get(i));
}
}
@Test
public void testSendMessage() throws SynapseException, RestServiceException, JSONObjectAdapterException {
//essentially a pass through to sendStringMessage
ArgumentCaptor<MessageToUser> arg = ArgumentCaptor.forClass(MessageToUser.class);
Set<String> recipients = new HashSet<String>();
recipients.add("333");
String subject = "The Mathematics of Quantum Neutrino Fields";
String messageBody = "Atoms are not to be trusted, they make up everything";
synapseClient.sendMessage(recipients, subject, messageBody);
verify(mockSynapse).sendStringMessage(arg.capture(), eq(messageBody));
MessageToUser toSendMessage = arg.getValue();
assertEquals(subject, toSendMessage.getSubject());
assertEquals(recipients, toSendMessage.getRecipients());
}
@Test
public void testGetCertifiedUserPassingRecord() throws RestServiceException, SynapseException, JSONObjectAdapterException{
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(true);
passingRecord.setQuizId(1238L);
String passingRecordJson = passingRecord.writeToJSONObject(adapterFactory.createNew()).toJSONString();
when(mockSynapse.getCertifiedUserPassingRecord(anyString())).thenReturn(passingRecord);
String returnedPassingRecordJson = synapseClient.getCertifiedUserPassingRecord("123");
verify(mockSynapse).getCertifiedUserPassingRecord(anyString());
assertEquals(passingRecordJson, returnedPassingRecordJson);
}
@Test (expected=NotFoundException.class)
public void testUserNeverAttemptedCertification() throws RestServiceException, SynapseException{
when(mockSynapse.getCertifiedUserPassingRecord(anyString())).thenThrow(new SynapseNotFoundException("PassingRecord not found"));
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test (expected=NotFoundException.class)
public void testUserFailedCertification() throws RestServiceException, SynapseException{
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(false);
passingRecord.setQuizId(1238L);
when(mockSynapse.getCertifiedUserPassingRecord(anyString())).thenReturn(passingRecord);
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test
public void testGetCertificationQuiz() throws RestServiceException, SynapseException{
when(mockSynapse.getCertifiedUserTest()).thenReturn(new Quiz());
synapseClient.getCertificationQuiz();
verify(mockSynapse).getCertifiedUserTest();
}
@Test
public void testSubmitCertificationQuizResponse() throws RestServiceException, SynapseException, JSONObjectAdapterException{
PassingRecord mockPassingRecord = new PassingRecord();
when(mockSynapse.submitCertifiedUserTestResponse(any(QuizResponse.class))).thenReturn(mockPassingRecord);
QuizResponse myResponse = new QuizResponse();
myResponse.setId(837L);
String quizResponseJson = myResponse.writeToJSONObject(adapterFactory.createNew()).toJSONString();
synapseClient.submitCertificationQuizResponse(quizResponseJson);
verify(mockSynapse).submitCertifiedUserTestResponse(eq(myResponse));
}
@Test
public void testMarkdownCache() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(mockSynapse.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(v2Page);
WikiPage actualResult = synapseClient.getV2WikiPageAsV1(new WikiPageKey(entity.getId(), ObjectType.ENTITY.toString(), "12"));
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testMarkdownCacheWithVersion() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(mockSynapse.getVersionOfV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class), anyLong())).thenReturn(v2Page);
WikiPage actualResult = synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey(entity.getId(), ObjectType.ENTITY.toString(), "12"), 5L);
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testFilterAccessRequirements() throws Exception {
List<AccessRequirement> unfilteredAccessRequirements = new ArrayList<AccessRequirement>();
List<AccessRequirement> filteredAccessRequirements;
//filter empty list should not result in failure
filteredAccessRequirements = AccessRequirementUtils.filterAccessRequirements(unfilteredAccessRequirements, ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
unfilteredAccessRequirements.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
unfilteredAccessRequirements.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
unfilteredAccessRequirements.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
//no requirements of type UPDATE
filteredAccessRequirements = AccessRequirementUtils.filterAccessRequirements(unfilteredAccessRequirements, ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
//1 download
filteredAccessRequirements = AccessRequirementUtils.filterAccessRequirements(unfilteredAccessRequirements, ACCESS_TYPE.DOWNLOAD);
assertEquals(1, filteredAccessRequirements.size());
//2 submit
filteredAccessRequirements = AccessRequirementUtils.filterAccessRequirements(unfilteredAccessRequirements, ACCESS_TYPE.SUBMIT);
assertEquals(2, filteredAccessRequirements.size());
//finally, filter null list - result will be an empty list
filteredAccessRequirements = AccessRequirementUtils.filterAccessRequirements(null, ACCESS_TYPE.SUBMIT);
assertNotNull(filteredAccessRequirements);
assertTrue(filteredAccessRequirements.isEmpty());
}
@Test
public void testGetEntityUnmetAccessRequirements() throws Exception {
//verify it calls getUnmetAccessRequirements when unmet is true
synapseClient.getEntityAccessRequirements(entityId, true, null);
verify(mockSynapse).getUnmetAccessRequirements(any(RestrictableObjectDescriptor.class));
}
@Test
public void testGetAllEntityAccessRequirements() throws Exception {
//verify it calls getAccessRequirements when unmet is false
synapseClient.getEntityAccessRequirements(entityId, false, null);
verify(mockSynapse).getAccessRequirements(any(RestrictableObjectDescriptor.class));
}
//pass through tests for email validation
@Test
public void testAdditionalEmailValidation() throws Exception {
Long userId = 992843l;
String emailAddress = "test@test.com";
String callbackUrl = "http:
synapseClient.additionalEmailValidation(userId.toString(), emailAddress, callbackUrl);
verify(mockSynapse).additionalEmailValidation(eq(userId), eq(emailAddress), eq(callbackUrl));
}
@Test
public void testAddEmail() throws Exception {
String emailAddressToken = "long synapse email token";
synapseClient.addEmail(emailAddressToken);
verify(mockSynapse).addEmail(any(AddEmailInfo.class), anyBoolean());
}
@Test
public void testGetNotificationEmail() throws Exception {
synapseClient.getNotificationEmail();
verify(mockSynapse).getNotificationEmail();
}
@Test
public void testSetNotificationEmail() throws Exception {
String emailAddress = "test@test.com";
synapseClient.setNotificationEmail(emailAddress);
verify(mockSynapse).setNotificationEmail(eq(emailAddress));
}
@Test
public void testLogErrorToRepositoryServices() throws SynapseException, RestServiceException, JSONObjectAdapterException {
String errorMessage = "error has occurred";
synapseClient.logErrorToRepositoryServices(errorMessage, null);
verify(mockSynapse).logError(any(LogEntry.class));
}
@Test
public void testLogErrorToRepositoryServicesTruncation() throws SynapseException, RestServiceException, JSONObjectAdapterException {
StringBuilder stackTrace = new StringBuilder();
for (int i = 0; i < SynapseClientImpl.MAX_LOG_ENTRY_LABEL_SIZE + 100; i++) {
stackTrace.append('a');
}
String errorMessage = "error has occurred";
synapseClient.logErrorToRepositoryServices(errorMessage, stackTrace.toString());
ArgumentCaptor<LogEntry> captor = ArgumentCaptor.forClass(LogEntry.class);
verify(mockSynapse).logError(captor.capture());
LogEntry logEntry = captor.getValue();
//25 extra characters allowed for the prefix and portal version
assertTrue(logEntry.getLabel().length() - 25 <= SynapseClientImpl.MAX_LOG_ENTRY_LABEL_SIZE);
assertEquals(errorMessage, logEntry.getMessage());
}
@Test
public void testGetMyProjects() throws Exception {
int limit = 11;
int offset = 20;
synapseClient.getMyProjects(limit, offset);
verify(mockSynapse).getMyProjects(eq(limit), eq(offset));
}
@Test
public void testGetUserProjects() throws Exception {
int limit = 11;
int offset = 20;
Long userId = 133l;
String userIdString = userId.toString();
synapseClient.getUserProjects(userIdString, limit, offset);
verify(mockSynapse).getProjectsFromUser(eq(userId), eq(limit), eq(offset));
}
@Test
public void testGetProjectsForTeam() throws Exception {
int limit = 13;
int offset = 40;
Long teamId = 144l;
String teamIdString = teamId.toString();
synapseClient.getProjectsForTeam(teamIdString, limit, offset);
verify(mockSynapse).getProjectsForTeam(eq(teamId), eq(limit), eq(offset));
}
@Test
public void testSafeLongToInt() {
int inRangeInt = 500;
int after = SynapseClientImpl.safeLongToInt(inRangeInt);
assertEquals(inRangeInt, after);
}
@Test (expected=IllegalArgumentException.class)
public void testSafeLongToIntPositive() {
long testValue = Integer.MAX_VALUE;
testValue++;
SynapseClientImpl.safeLongToInt(testValue);
}
@Test (expected=IllegalArgumentException.class)
public void testSafeLongToIntNegative() {
long testValue = Integer.MIN_VALUE;
testValue
SynapseClientImpl.safeLongToInt(testValue);
}
@Test
public void testGetHost() throws RestServiceException {
assertEquals("mydomain.com", synapseClient.getHost("sfTp://mydomain.com/foo/bar"));
assertEquals("mydomain.com", synapseClient.getHost("http://mydomain.com/foo/bar"));
assertEquals("mydomain.com", synapseClient.getHost("http://mydomain.com"));
assertEquals("mydomain.com", synapseClient.getHost("sftp://mydomain.com:22/foo/bar"));
}
@Test (expected=IllegalArgumentException.class)
public void testGetHostNull() throws RestServiceException {
synapseClient.getHost(null);
}
@Test (expected=IllegalArgumentException.class)
public void testGetHostEmpty() throws RestServiceException {
synapseClient.getHost("");
}
@Test (expected=BadRequestException.class)
public void testGetHostBadUrl() throws RestServiceException {
synapseClient.getHost("foobar");
}
}
|
package org.sagebionetworks.web.unitserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ACCESS_REQUIREMENTS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ANNOTATIONS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ENTITY;
import static org.sagebionetworks.web.shared.EntityBundleTransport.ENTITY_PATH;
import static org.sagebionetworks.web.shared.EntityBundleTransport.HAS_CHILDREN;
import static org.sagebionetworks.web.shared.EntityBundleTransport.PERMISSIONS;
import static org.sagebionetworks.web.shared.EntityBundleTransport.UNMET_ACCESS_REQUIREMENTS;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.sagebionetworks.client.Synapse;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.Data;
import org.sagebionetworks.repo.model.EntityBundle;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.ExampleEntity;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.LayerTypeNames;
import org.sagebionetworks.repo.model.LocationData;
import org.sagebionetworks.repo.model.LocationTypeNames;
import org.sagebionetworks.repo.model.PaginatedResults;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement;
import org.sagebionetworks.repo.model.UserGroup;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.attachment.AttachmentData;
import org.sagebionetworks.repo.model.attachment.PresignedUrl;
import org.sagebionetworks.repo.model.auth.UserEntityPermissions;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.FileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.message.ObjectType;
import org.sagebionetworks.repo.model.storage.StorageUsage;
import org.sagebionetworks.repo.model.wiki.WikiHeader;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.schema.adapter.AdapterFactory;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.sagebionetworks.web.client.transform.JSONEntityFactory;
import org.sagebionetworks.web.client.transform.JSONEntityFactoryImpl;
import org.sagebionetworks.web.client.transform.NodeModelCreator;
import org.sagebionetworks.web.client.transform.NodeModelCreatorImpl;
import org.sagebionetworks.web.client.widget.entity.registration.WidgetConstants;
import org.sagebionetworks.web.server.servlet.ServiceUrlProvider;
import org.sagebionetworks.web.server.servlet.SynapseClientImpl;
import org.sagebionetworks.web.server.servlet.SynapseProvider;
import org.sagebionetworks.web.server.servlet.TokenProvider;
import org.sagebionetworks.web.shared.EntityBundleTransport;
import org.sagebionetworks.web.shared.EntityWrapper;
import org.sagebionetworks.web.shared.WikiPageKey;
import org.sagebionetworks.web.shared.exceptions.RestServiceException;
import org.sagebionetworks.web.shared.users.AclUtils;
import org.sagebionetworks.web.shared.users.PermissionLevel;
import com.google.gwt.dev.javac.testing.impl.MockJavaResource;
/**
* Test for the SynapseClientImpl
* @author John
*
*/
public class SynapseClientImplTest {
SynapseProvider mockSynapseProvider;
TokenProvider mockTokenProvider;
ServiceUrlProvider mockUrlProvider;
Synapse mockSynapse;
SynapseClientImpl synapseClient;
String entityId = "123";
ExampleEntity entity;
AttachmentData attachment1, attachment2;
Annotations annos;
UserEntityPermissions eup;
EntityPath path;
org.sagebionetworks.repo.model.PaginatedResults<UserGroup> pgugs;
org.sagebionetworks.repo.model.PaginatedResults<UserProfile> pgups;
AccessControlList acl;
WikiPage page;
private static JSONObjectAdapter jsonObjectAdapter = new JSONObjectAdapterImpl();
private static AdapterFactory adapterFactory = new AdapterFactoryImpl();
private static JSONEntityFactory jsonEntityFactory = new JSONEntityFactoryImpl(adapterFactory);
private static NodeModelCreator nodeModelCreator = new NodeModelCreatorImpl(jsonEntityFactory, jsonObjectAdapter);
@Before
public void before() throws SynapseException, JSONObjectAdapterException{
mockSynapse = Mockito.mock(Synapse.class);
mockSynapseProvider = Mockito.mock(SynapseProvider.class);
mockUrlProvider = Mockito.mock(ServiceUrlProvider.class);
when(mockSynapseProvider.createNewClient()).thenReturn(mockSynapse);
mockTokenProvider = Mockito.mock(TokenProvider.class);
synapseClient = new SynapseClientImpl();
synapseClient.setSynapseProvider(mockSynapseProvider);
synapseClient.setTokenProvider(mockTokenProvider);
synapseClient.setServiceUrlProvider(mockUrlProvider);
// Setup the the entity
entity = new ExampleEntity();
entity.setId(entityId);
entity.setEntityType(ExampleEntity.class.getName());
List<AttachmentData> attachments = new ArrayList<AttachmentData>();
attachment1 = new AttachmentData();
attachment1.setName("attachment1");
attachment2 = new AttachmentData();
attachment2.setName("attachment2");
attachments.add(attachment1);
attachments.add(attachment2);
entity.setAttachments(attachments);
// the mock synapse should return this object
when(mockSynapse.getEntityById(entityId)).thenReturn(entity);
// Setup the annotations
annos = new Annotations();
annos.setId(entityId);
annos.addAnnotation("string", "a string value");
// the mock synapse should return this object
when(mockSynapse.getAnnotations(entityId)).thenReturn(annos);
eup = new UserEntityPermissions();
eup.setCanDelete(true);
eup.setCanView(false);
eup.setOwnerPrincipalId(999L);
// the mock synapse should return this object
when(mockSynapse.getUsersEntityPermissions(entityId)).thenReturn(eup);
// Setup the path
path = new EntityPath();
path.setPath(new ArrayList<EntityHeader>());
EntityHeader header = new EntityHeader();
header.setId(entityId);
header.setName("RomperRuuuu");
path.getPath().add(header);
// the mock synapse should return this object
when(mockSynapse.getEntityPath(entityId)).thenReturn(path);
pgugs = new org.sagebionetworks.repo.model.PaginatedResults<UserGroup>();
List<UserGroup> ugs = new ArrayList<UserGroup>();
ugs.add(new UserGroup());
pgugs.setResults(ugs);
when(mockSynapse.getGroups(anyInt(), anyInt())).thenReturn(pgugs);
pgups = new org.sagebionetworks.repo.model.PaginatedResults<UserProfile>();
List<UserProfile> ups = new ArrayList<UserProfile>();
ups.add(new UserProfile());
pgups.setResults(ups);
when(mockSynapse.getUsers(anyInt(), anyInt())).thenReturn(pgups);
acl = new AccessControlList();
acl.setId("sys999");
Set<ResourceAccess> ras = new HashSet<ResourceAccess>();
ResourceAccess ra = new ResourceAccess();
ra.setPrincipalId(101L);
ra.setAccessType(AclUtils.getACCESS_TYPEs(PermissionLevel.CAN_ADMINISTER));
acl.setResourceAccess(ras);
when(mockSynapse.getACL(anyString())).thenReturn(acl);
when(mockSynapse.createACL((AccessControlList)any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any(), eq(true))).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList)any(), eq(false))).thenReturn(acl);
EntityHeader bene = new EntityHeader();
bene.setId("syn999");
when(mockSynapse.getEntityBenefactor(anyString())).thenReturn(bene);
List<AccessRequirement> accessRequirements= new ArrayList<AccessRequirement>();
TermsOfUseAccessRequirement accessRequirement = new TermsOfUseAccessRequirement();
accessRequirements.add(accessRequirement);
accessRequirement.setEntityType(TermsOfUseAccessRequirement.class.getName());
accessRequirement.setEntityIds(Arrays.asList(new String[]{"101"}));
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH |
HAS_CHILDREN | ACCESS_REQUIREMENTS | UNMET_ACCESS_REQUIREMENTS;
int emptyMask = 0;
EntityBundle bundle = new EntityBundle();
bundle.setEntity(entity);
bundle.setAnnotations(annos);
bundle.setPermissions(eup);
bundle.setPath(path);
bundle.setHasChildren(false);
bundle.setAccessRequirements(accessRequirements);
bundle.setUnmetAccessRequirements(accessRequirements);
when(mockSynapse.getEntityBundle(anyString(),Matchers.eq(mask))).thenReturn(bundle);
EntityBundle emptyBundle = new EntityBundle();
when(mockSynapse.getEntityBundle(anyString(),Matchers.eq(emptyMask))).thenReturn(emptyBundle);
when(mockSynapse.canAccess("syn101", ACCESS_TYPE.READ)).thenReturn(true);
page = new WikiPage();
page.setId("testId");
page.setMarkdown("my markdown");
page.setParentWikiId(null);
page.setTitle("A Title");
}
@Test
public void testGetEntityBundleAll() throws RestServiceException{
// Make sure we can get all parts of the bundel
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH | HAS_CHILDREN
| ACCESS_REQUIREMENTS | UNMET_ACCESS_REQUIREMENTS;
EntityBundleTransport bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNotNull(bundle.getEntityJson());
assertNotNull(bundle.getAnnotationsJson());
assertNotNull(bundle.getEntityPathJson());
assertNotNull(bundle.getPermissionsJson());
assertNotNull(bundle.getHasChildren());
assertNotNull(bundle.getAccessRequirementsJson());
assertNotNull(bundle.getUnmetAccessRequirementsJson());
}
@Test
public void testGetEntityBundleNone() throws RestServiceException{
// Make sure all are null
int mask = 0x0;
EntityBundleTransport bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNull(bundle.getEntityJson());
assertNull(bundle.getAnnotationsJson());
assertNull(bundle.getEntityPathJson());
assertNull(bundle.getPermissionsJson());
assertNull(bundle.getHasChildren());
assertNull(bundle.getAccessRequirementsJson());
assertNull(bundle.getUnmetAccessRequirementsJson());
}
@Test (expected=IllegalArgumentException.class)
public void testParseEntityFromJsonNoType() throws JSONObjectAdapterException{
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
// do not set the type
String json = EntityFactory.createJSONStringForEntity(example);
// This will fail as the type is required
synapseClient.parseEntityFromJson(json);
}
@Test
public void testParseEntityFromJson() throws JSONObjectAdapterException{
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
example.setEntityType(ExampleEntity.class.getName());
String json = EntityFactory.createJSONStringForEntity(example);
// System.out.println(json);
// Now make sure this can be read back
ExampleEntity clone = (ExampleEntity) synapseClient.parseEntityFromJson(json);
assertEquals(example, clone);
}
@Test
public void testCreateOrUpdateEntityFalse() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.putEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, false);
assertEquals(out.getId(), result);
verify(mockSynapse).putEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrue() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrueWithAnnos() throws JSONObjectAdapterException, RestServiceException, SynapseException{
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
Annotations annos = new Annotations();
annos.addAnnotation("someString", "one");
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, annos, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
annos.setEtag(out.getEtag());
annos.setId(out.getId());
verify(mockSynapse).updateAnnotations(out.getId(), annos);
}
@Test
public void testGetNodeAcl() throws Exception {
EntityWrapper ew = synapseClient.getNodeAcl("syn101");
AccessControlList clone = EntityFactory.createEntityFromJSONString(ew.getEntityJson(), AccessControlList.class);
assertEquals(acl, clone);
}
@Test
public void testCreateAcl() throws Exception {
EntityWrapper in = new EntityWrapper();
in.setEntityJson(EntityFactory.createJSONObjectForEntity(acl).toString());
EntityWrapper ew = synapseClient.createAcl(in);
AccessControlList clone = EntityFactory.createEntityFromJSONString(ew.getEntityJson(), AccessControlList.class);
assertEquals(acl, clone);
}
@Test
public void testUpdateAcl() throws Exception {
EntityWrapper in = new EntityWrapper();
in.setEntityJson(EntityFactory.createJSONObjectForEntity(acl).toString());
EntityWrapper ew = synapseClient.updateAcl(in);
AccessControlList clone = EntityFactory.createEntityFromJSONString(ew.getEntityJson(), AccessControlList.class);
assertEquals(acl, clone);
}
@Test
public void testUpdateAclRecursive() throws Exception {
EntityWrapper in = new EntityWrapper();
in.setEntityJson(EntityFactory.createJSONObjectForEntity(acl).toString());
EntityWrapper ew = synapseClient.updateAcl(in, true);
AccessControlList clone = EntityFactory.createEntityFromJSONString(ew.getEntityJson(), AccessControlList.class);
assertEquals(acl, clone);
verify(mockSynapse).updateACL(any(AccessControlList.class), eq(true));
}
@Test
public void testDeleteAcl() throws Exception {
EntityWrapper ew = synapseClient.deleteAcl("syn101");
AccessControlList clone = EntityFactory.createEntityFromJSONString(ew.getEntityJson(), AccessControlList.class);
assertEquals(acl, clone);
}
@Test
public void testHasAccess() throws Exception {
assertTrue(synapseClient.hasAccess("syn101", "READ"));
}
@Test
public void testGetAllUsers() throws Exception {
EntityWrapper ew = synapseClient.getAllUsers();
org.sagebionetworks.web.shared.PaginatedResults<UserProfile> clone =
nodeModelCreator.createPaginatedResults(ew.getEntityJson(), UserProfile.class);
assertEquals(this.pgups.getResults(), clone.getResults());
}
@Test
public void testGetAllGroups() throws Exception {
EntityWrapper ew = synapseClient.getAllGroups();
org.sagebionetworks.web.shared.PaginatedResults<UserGroup> clone =
nodeModelCreator.createPaginatedResults(ew.getEntityJson(), UserGroup.class);
assertEquals(this.pgugs.getResults(), clone.getResults());
}
@Test
public void testGetUserProfile() throws Exception {
//verify call is directly calling the synapse client provider
JSONObject testUserJSONObject = new JSONObject("{ username: \"Test User\"}");
String testRepoUrl = "http://mytestrepourl";
String testUserId = "myUserId";
when(mockUrlProvider.getRepositoryServiceUrl()).thenReturn(testRepoUrl);
when(mockSynapse.getSynapseEntity(testRepoUrl, "/userProfile/" + testUserId)).thenReturn(testUserJSONObject);
String userProfile = synapseClient.getUserProfile(testUserId);
assertEquals(userProfile, testUserJSONObject.toString());
}
@Test
public void testCreateUserProfileAttachment() throws Exception {
//verify call is directly calling the synapse client provider
PresignedUrl testPresignedUrl = new PresignedUrl();
testPresignedUrl.setPresignedUrl("http://mytestpresignedurl");
String testId = "myTestId";
String testToken = "myTestToken";
when(mockSynapse.createUserProfileAttachmentPresignedUrl(testId, testToken)).thenReturn(testPresignedUrl);
String presignedUrl = synapseClient.createUserProfileAttachmentPresignedUrl(testId, testToken);
assertEquals(presignedUrl, EntityFactory.createJSONStringForEntity(testPresignedUrl));
}
@Test
public void testUpdateLocationable() throws Exception {
//verify call is directly calling the synapse client provider
String testUrl = "http://mytesturl/something.jpg";
List<LocationData> locations = new ArrayList<LocationData>();
LocationData externalLocation = new LocationData();
externalLocation.setPath(testUrl);
externalLocation.setType(LocationTypeNames.external);
locations.add(externalLocation);
Data layer = new Data();
layer.setType(LayerTypeNames.M);
layer.setLocations(locations);
String testId = "myTestId";
when(mockSynapse.updateExternalLocationableToSynapse(layer, testUrl)).thenReturn(layer);
when(mockSynapse.getEntityById(testId)).thenReturn(layer);
EntityWrapper returnedLayer = synapseClient.updateExternalLocationable(testId, testUrl);
assertEquals(returnedLayer.getEntityJson(), EntityFactory.createJSONStringForEntity(layer));
}
@Test
public void testGetStorageUsage() throws Exception {
//verify call is directly calling the synapse client provider.
PaginatedResults<StorageUsage> testPaginatedResults = new PaginatedResults<StorageUsage>();
StorageUsage expectedUsage = new StorageUsage();
Long expectedSize = 12345l;
expectedUsage.setId("usageId");
expectedUsage.setContentSize(expectedSize);
List<StorageUsage> list = new ArrayList<StorageUsage>();
list.add(expectedUsage);
testPaginatedResults.setResults(list);
testPaginatedResults.setTotalNumberOfResults(1l);
when(mockSynapse.getItemizedStorageUsageForNode(anyString(), anyInt(), anyInt())).thenReturn(testPaginatedResults);
Long actual = synapseClient.getStorageUsage(entityId);
assertEquals(expectedSize, actual);
}
@Test
public void testRemoveAttachmentFromEntity() throws Exception {
Mockito.when(mockSynapse.putEntity(any(ExampleEntity.class))).thenReturn(entity);
ArgumentCaptor<ExampleEntity> arg = ArgumentCaptor.forClass(ExampleEntity.class);
synapseClient.removeAttachmentFromEntity(entityId, attachment2.getName());
//test to see if attachment has been removed
verify(mockSynapse).getEntityById(entityId);
verify(mockSynapse).putEntity(arg.capture());
//verify that attachment2 has been removed
ExampleEntity updatedEntity = arg.getValue();
List<AttachmentData> attachments = updatedEntity.getAttachments();
assertTrue(attachments.size() == 1 && attachments.get(0).equals(attachment1));
}
@Test
public void testGetJSONEntity() throws Exception {
JSONObject json = EntityFactory.createJSONObjectForEntity(entity);
Mockito.when(mockSynapse.getEntity(anyString())).thenReturn(json);
String testRepoUri = "/testservice";
synapseClient.getJSONEntity(testRepoUri);
//verify that this call uses Synapse.getEntity(testRepoUri)
verify(mockSynapse).getEntity(testRepoUri);
}
@Test
public void testCreateWikiPage() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(page);
Mockito.when(mockSynapse.createWikiPage(anyString(), any(ObjectType.class), any(WikiPage.class))).thenReturn(page);
synapseClient.createWikiPage("testId", WidgetConstants.WIKI_OWNER_ID_ENTITY, wikiPageJson);
verify(mockSynapse).createWikiPage(anyString(), any(ObjectType.class), any(WikiPage.class));
}
@Test
public void testDeleteWikiPage() throws Exception {
synapseClient.deleteWikiPage(new WikiPageKey("syn123", WidgetConstants.WIKI_OWNER_ID_ENTITY, "20"));
verify(mockSynapse).deleteWikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testGetWikiHeaderTree() throws Exception {
PaginatedResults<WikiHeader> headerTreeResults = new PaginatedResults<WikiHeader>();
when(mockSynapse.getWikiHeaderTree(anyString(), any(ObjectType.class))).thenReturn(headerTreeResults);
synapseClient.getWikiHeaderTree("testId", WidgetConstants.WIKI_OWNER_ID_ENTITY);
verify(mockSynapse).getWikiHeaderTree(anyString(), any(ObjectType.class));
}
@Test
public void testGetWikiPage() throws Exception {
Mockito.when(mockSynapse.getWikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(page);
synapseClient.getWikiPage(new WikiPageKey("syn123", WidgetConstants.WIKI_OWNER_ID_ENTITY, "20"));
verify(mockSynapse).getWikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testUpdateWikiPage() throws Exception {
String wikiPageJson = EntityFactory.createJSONStringForEntity(page);
Mockito.when(mockSynapse.updateWikiPage(anyString(), any(ObjectType.class), any(WikiPage.class))).thenReturn(page);
synapseClient.updateWikiPage("testId", WidgetConstants.WIKI_OWNER_ID_ENTITY, wikiPageJson);
verify(mockSynapse).updateWikiPage(anyString(), any(ObjectType.class), any(WikiPage.class));
}
@Test
public void testGetWikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(mockSynapse.getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class))).thenReturn(testResults);
synapseClient.getWikiAttachmentHandles(new WikiPageKey("syn123", WidgetConstants.WIKI_OWNER_ID_ENTITY, "20"));
verify(mockSynapse).getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testUpdateExternalFileHandle() throws Exception {
//verify call is directly calling the synapse client provider, and it tries to rename the entity to the filename
String myFileName = "testFileName.csv";
String testUrl = "http://mytesturl/"+myFileName;
String testId = "myTestId";
FileEntity file = new FileEntity();
String originalFileEntityName = "syn1223";
file.setName(originalFileEntityName);
file.setId(testId);
file.setDataFileHandleId("handle1");
ExternalFileHandle handle = new ExternalFileHandle();
handle.setExternalURL(testUrl);
when(mockSynapse.getEntityById(testId)).thenReturn(file);
when(mockSynapse.createExternalFileHandle(any(ExternalFileHandle.class))).thenReturn(handle);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file);
ArgumentCaptor<FileEntity> arg = ArgumentCaptor.forClass(FileEntity.class);
synapseClient.updateExternalFile(testId, testUrl);
verify(mockSynapse).getEntityById(testId);
verify(mockSynapse).createExternalFileHandle(any(ExternalFileHandle.class));
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
//verify rename
FileEntity fileEntityArg = arg.getValue(); //last value captured
assertEquals(myFileName, fileEntityArg.getName());
//and if rename fails, verify all is well (but the FileEntity name is not updated)
file.setName(originalFileEntityName);
//first call should return file, second call to putEntity should throw an exception
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file).thenThrow(new IllegalArgumentException("invalid name for some reason"));
synapseClient.updateExternalFile(testId, testUrl);
//second time calling createExternalFileHandle
verify(mockSynapse, Mockito.times(2)).createExternalFileHandle(any(ExternalFileHandle.class));
//and it should have called putEntity 2 additional times
verify(mockSynapse, Mockito.times(4)).putEntity(arg.capture());
fileEntityArg = arg.getValue(); //last value captured
assertEquals(originalFileEntityName, fileEntityArg.getName());
}
}
|
package pt.fccn.mobile.arquivo.tests.imagesearch;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.openqa.selenium.By;
import pt.fccn.arquivo.selenium.Retry;
import pt.fccn.arquivo.selenium.WebDriverTestBaseParalell;
/**
*
* @author ivo.branco@fccn.pt
*
*/
public class ImageSearchTest extends WebDriverTestBaseParalell {
/**
* Test the search of one term in the index interface.
*/
public ImageSearchTest(String os, String version, String browser, String deviceName, String deviceOrientation) {
super(os, version, browser, deviceName, deviceOrientation);
}
@Test
@Retry
public void testImageSearchOneTerm() throws Exception {
run("Search FCCN term", () -> {
driver.findElement(By.id("txtSearch")).clear();
driver.findElement(By.id("txtSearch")).sendKeys("fccn");
/*** Details attributes ***/
|
package test.org.hubotek.service.data;
import javax.inject.Inject;
import javax.transaction.UserTransaction;
import org.hubotek.ElementEnum;
import org.hubotek.model.HubDocument;
import org.hubotek.model.cse.GoogleSearchEngine;
import org.hubotek.model.feed.FeedUrl;
import org.hubotek.model.google.GoogleBase;
import org.hubotek.model.google.news.NewsTopic;
import org.hubotek.model.project.api.GoogleApiKey;
import org.hubotek.model.rss.RssDocument;
import org.hubotek.model.url.NamedUrl;
import org.hubotek.service.DataBaseService;
import org.hubotek.service.data.FeedService;
import org.hubotek.service.data.GoogleSearchEngineService;
import org.hubotek.service.orm.PersistenceService;
import org.hubotek.test.BasePersistenceTestClass;
import org.hubotek.util.DOMElementExtratorUtil;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.nanotek.Base;
@RunWith(Arquillian.class)
public class BasePersistenceServiceTest {
@Inject
FeedService feedService;
@Inject
GoogleSearchEngineService googleSearchEngineService;
@Inject
UserTransaction utx;
@Deployment
public static JavaArchive createDeployment()
{
return ShrinkWrap.create(JavaArchive.class)
.addPackage(DataBaseService.class.getPackage())
.addPackage(FeedService.class.getPackage())
.addPackage(GoogleSearchEngineService.class.getPackage())
.addPackage(GoogleSearchEngine.class.getPackage())
.addPackage(FeedUrl.class.getPackage())
.addPackage(PersistenceService.class.getPackage())
.addPackage(BasePersistenceTestClass.class.getPackage())
.addPackage(Base.class.getPackage())
.addPackage(GoogleApiKey.class.getPackage())
.addPackage(DOMElementExtratorUtil.class.getPackage())
.addPackage(ElementEnum.class.getPackage())
.addPackage(HubDocument.class.getPackage())
.addPackage(RssDocument.class.getPackage())
.addPackage(NamedUrl.class.getPackage())
.addClass(GoogleBase.class)
.addPackage(NewsTopic.class.getPackage())
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")
.addAsResource("log4j.properties", "log4j.properties")
.addAsResource("META-INF/persistence.xml", "META-INF/persistence.xml");
}
@Test
public void test(){}
@Test
public void testSaveCseEngine() throws Exception
{
utx.begin();
googleSearchEngineService.deleteAll();
GoogleSearchEngine n = new GoogleSearchEngine();
n.setId("cx-id-for-service");
n.setDescription("A simple Description new Test");
n.setName("CSE-NAME");
googleSearchEngineService.saveSearchEngineDefinition(n);
utx.commit();
}
@Test
public void testSaveFeedUrl() throws Exception
{
utx.begin();
feedService.deleteAll();
FeedUrl n = new FeedUrl();
n.setId(1l);
n.setUrl("A Simple Url For Test");
feedService.saveFeedUrl(n);
utx.commit();
}
}
|
package org.biojavax.bio;
import java.util.Collections;
import java.util.Set;
import java.util.TreeSet;
import junit.framework.*;
import org.biojava.utils.ChangeEvent;
import org.biojava.utils.ChangeListener.ChangeEventRecorder;
import org.biojavax.Comment;
import org.biojavax.DocRefAuthor;
import org.biojavax.Namespace;
import org.biojavax.RankedCrossRef;
import org.biojavax.RankedDocRef;
import org.biojavax.RichObjectFactory;
import org.biojavax.SimpleComment;
import org.biojavax.SimpleCrossRef;
import org.biojavax.SimpleDocRef;
import org.biojavax.SimpleDocRefAuthor;
import org.biojavax.SimpleNamespace;
import org.biojavax.SimpleNote;
import org.biojavax.SimpleRankedCrossRef;
import org.biojavax.SimpleRankedDocRef;
import org.biojavax.bio.taxa.NCBITaxon;
import org.biojavax.bio.taxa.SimpleNCBITaxon;
import org.biojavax.ontology.ComparableTerm;
/**
*
* @author Mark Schreiber
*/
public class SimpleBioEntryTest extends TestCase {
SimpleBioEntry be;
String name;
Namespace ns;
String acc;
int version;
ChangeEventRecorder cr;
public SimpleBioEntryTest(String testName) {
super(testName);
name = "aaa";
ns = RichObjectFactory.getDefaultNamespace();
acc = "test_acc";
version = 0;
cr = new ChangeEventRecorder();
}
protected void setUp() throws Exception {
be = new SimpleBioEntry(ns, name, acc, version);
cr = new ChangeEventRecorder();
be.addChangeListener(cr);
}
protected void tearDown() throws Exception {
be.removeChangeListener(cr);
be = null;
cr = null;
}
public static Test suite() {
TestSuite suite = new TestSuite(SimpleBioEntryTest.class);
return suite;
}
/**
* Test of getRankedCrossRefs method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetRankedCrossRefs() {
System.out.println("testGetRankedCrossRefs");
//at the start should be not-null and empty;
assertNotNull(be.getRankedCrossRefs());
assertEquals(0, be.getRankedCrossRefs().size());
//hibernate needs it to be writable
try{
be.getRankedCrossRefs().add(new Object());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of setTaxon method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetTaxon() {
System.out.println("testSetTaxon");
NCBITaxon tax = new SimpleNCBITaxon(1621);
try{
be.addChangeListener(cr);
be.setTaxon(tax);
//should have fired an event;
assertNotNull(cr.getEvent());
//type should be TAXON
assertEquals(be.TAXON, cr.getEvent().getType());
//old should be null
assertNull(cr.getEvent().getPrevious());
//new should be tax
assertEquals(tax, cr.getEvent().getChange());
//tax and get taxon should be equal
assertEquals(tax, be.getTaxon());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of getAnnotation method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetAnnotation() {
System.out.println("testGetAnnotation");
//should be not null!
assertNotNull(be.getAnnotation());
}
/**
* Test of getNoteSet method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetNoteSet() {
System.out.println("testGetNoteSet");
//should not be null;
assertNotNull(be.getNoteSet());
//hibernate needs it to be writable
try{
be.getNoteSet().add(new Object());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of setNoteSet method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetNoteSet() {
System.out.println("testSetNoteSet");
Set notes = new TreeSet();
notes.add(new SimpleNote(
RichObjectFactory.getDefaultOntology().getOrCreateTerm("foo"),
"bar", 0));
try{
be.setNoteSet(notes);
//doesn't generate a change event, should it??
//should get notes back.
assertEquals(notes, be.getNoteSet());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of getComments method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetComments() {
System.out.println("testGetComments");
//should be not-null and empty at start
Set s = be.getComments();
assertNotNull(s);
assertEquals(0, s.size());
}
/**
* Test of getRankedDocRefs method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetRankedDocRefs() {
System.out.println("testGetRankedDocRefs");
//should be not-null and empty at the start
Set s = be.getRankedDocRefs();
assertNotNull(s);
assertEquals(0, s.size());
}
/**
* Test of getRelationships method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetRelationships() {
System.out.println("testGetRelationships");
//should be not-null and empty at the start
Set s = be.getRelationships();
assertNotNull(s);
assertEquals(0, s.size());
}
/**
* Test of setIdentifier method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetIdentifier() {
System.out.println("testSetIdentifier");
String id = "new id";
try{
be.setIdentifier(id);
//should get back id
assertEquals(id, be.getIdentifier());
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.IDENTIFIER, ce.getType());
//was null
assertNull(ce.getPrevious());
//now id
assertEquals(id, ce.getChange());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of setDivision method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetDivision() {
System.out.println("testSetDivision");
String div = "new div";
try{
be.setDivision(div);
//should get back div
assertEquals(div, be.getDivision());
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.DIVISION, ce.getType());
//was null
assertNull(ce.getPrevious());
//now div
assertEquals(div, ce.getChange());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of setDescription method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetDescription() {
System.out.println("testSetDescription");
String d = "new desc";
try{
be.setDescription(d);
//should get back d
assertEquals(d, be.getDescription());
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.DESCRIPTION, ce.getType());
//was null
assertNull(ce.getPrevious());
//now d
assertEquals(d, ce.getChange());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of getAccession method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetAccession() {
System.out.println("testGetAccession");
// should be acc
assertEquals(this.acc, be.getAccession());
}
/**
* Test of getDescription method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetDescription() {
System.out.println("testGetDescription");
// should be null at start
assertNull(be.getDescription());
}
/**
* Test of getDivision method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetDivision() {
System.out.println("testGetDivision");
// should be null at start
assertNull(be.getDivision());
}
/**
* Test of getIdentifier method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetIdentifier() {
System.out.println("testGetIdentifier");
// should be null at start
assertNull(be.getIdentifier());
}
/**
* Test of getName method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetName() {
System.out.println("testGetName");
// should be name
assertEquals(this.name, be.getName());
}
/**
* Test of getNamespace method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetNamespace() {
System.out.println("testGetNamespace");
// should be ns
assertEquals(this.ns, be.getNamespace());
}
/**
* Test of getTaxon method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetTaxon() {
System.out.println("testGetTaxon");
// should be null at start
assertNull(be.getTaxon());
}
/**
* Test of getVersion method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testGetVersion() {
System.out.println("testGetVersion");
// should be version
assertEquals(this.version, be.getVersion());
}
/**
* Test of equals method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testEquals() {
System.out.println("testEquals");
//Two bioentries are equal if they share the same namespace, name,
//accession and version.
assertFalse(be.equals(new Object()));
assertFalse(be.equals(null));
assertTrue(be.equals(be));
BioEntry be2 = new SimpleBioEntry(be.getNamespace(), be.getName(),
be.getAccession(), be.getVersion());
assertTrue(be.equals(be2));
assertTrue(be2.equals(be));
//should still be equal
try{
be2.setDescription("test");
assertTrue(be.equals(be2));
assertTrue(be2.equals(be));
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
//should not be equal
be2 = new SimpleBioEntry(
new SimpleNamespace("new"), be.getName(),
be.getAccession(), be.getVersion());
assertFalse(be.equals(be2));
assertFalse(be2.equals(be));
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), "different",
be.getAccession(), be.getVersion());
assertFalse(be.equals(be2));
assertFalse(be2.equals(be));
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), be.getName(),
"different", be.getVersion());
assertFalse(be.equals(be2));
assertFalse(be2.equals(be));
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), be.getName(),
be.getAccession(), be.getVersion()+1);
assertFalse(be.equals(be2));
assertFalse(be2.equals(be));
}
/**
* Test of compareTo method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testCompareTo() {
System.out.println("testCompareTo");
/*
* Bioentries are ordered first by namespace, then name, accession, and
* finally version.
*/
assertTrue(be.compareTo(be) == 0);
BioEntry be2 = new SimpleBioEntry(be.getNamespace(), be.getName(),
be.getAccession(), be.getVersion());
assertTrue(be.compareTo(be2) == 0);
assertTrue(be2.compareTo(be) == 0);
//should still be equal
try{
be2.setDescription("test");
assertTrue(be.compareTo(be2) == 0);
assertTrue(be2.compareTo(be) == 0);
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
//should not be equal
be2 = new SimpleBioEntry(
new SimpleNamespace("new"), be.getName(),
be.getAccession(), be.getVersion());
assertTrue(be.compareTo(be2) < 0);
assertTrue(be2.compareTo(be) > 0);
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), "different",
be.getAccession(), be.getVersion());
assertTrue(be.compareTo(be2) < 0);
assertTrue(be2.compareTo(be) > 0);
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), be.getName(),
"different", be.getVersion());
assertTrue(be.compareTo(be2) > 0);
assertTrue(be2.compareTo(be) < 0);
//should not be equal
be2 = new SimpleBioEntry(
be.getNamespace(), be.getName(),
be.getAccession(), be.getVersion()+1);
assertTrue(be.compareTo(be2) < 0);
assertTrue(be2.compareTo(be) > 0);
}
/**
* Test of hashCode method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testHashCode() {
System.out.println("testHashCode");
BioEntry be2 = new SimpleBioEntry(
be.getNamespace(), be.getName(),
be.getAccession(), be.getVersion());
assertTrue(be.hashCode() == be2.hashCode());
//should still be equal
try{
be2.setDescription("test");
assertTrue(be.hashCode() == be2.hashCode());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of toString method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testToString() {
System.out.println("testToString");
String expected = be.getNamespace()+":"+be.getName()+"/"+be.getAccession()+"."+be.getVersion();
assertEquals(expected, be.toString());
}
/**
* Test of addRankedCrossRef method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testAddRankedCrossRef() {
System.out.println("testAddRankedCrossRef");
//should not be able to add null
try{
be.addRankedCrossRef(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
RankedCrossRef xref = new SimpleRankedCrossRef(
new SimpleCrossRef("dbname", "AC123456", 1), 0);
try{
be.addRankedCrossRef(xref);
assertTrue(be.getRankedCrossRefs().contains(xref));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RANKEDCROSSREF, ce.getType());
//was null
assertNull(ce.getPrevious());
//now xref
assertEquals(xref, ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of removeRankedCrossRef method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testRemoveRankedCrossRef() {
System.out.println("testRemoveRankedCrossRef");
//first add one
RankedCrossRef xref = new SimpleRankedCrossRef(
new SimpleCrossRef("dbname", "AC123456", 1), 0);
try{
be.addRankedCrossRef(xref);
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
//cannot remove null
try{
be.removeRankedCrossRef(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
try{
be.removeRankedCrossRef(xref);
assertFalse(be.getRankedCrossRefs().contains(xref));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RANKEDCROSSREF, ce.getType());
//was xref
assertEquals(xref, ce.getPrevious());
//now null
assertNull(ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of addRankedDocRef method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testAddRankedDocRef() {
System.out.println("testAddRankedDocRef");
//should not be able to add null
try{
be.addRankedDocRef(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
DocRefAuthor author = new SimpleDocRefAuthor("Hemmingway");
RankedDocRef ref = new SimpleRankedDocRef(
new SimpleDocRef(Collections.singletonList(author), "a book", "a title"),
new Integer(1), new Integer(10), 0);
try{
be.addRankedDocRef(ref);
assertTrue(be.getRankedDocRefs().contains(ref));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RANKEDDOCREF, ce.getType());
//was null
assertNull(ce.getPrevious());
//now xref
assertEquals(ref, ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of removeRankedDocRef method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testRemoveRankedDocRef() {
System.out.println("testRemoveRankedDocRef");
//should not be able to remove null
try{
be.removeRankedDocRef(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
DocRefAuthor author = new SimpleDocRefAuthor("Hemmingway");
RankedDocRef ref = new SimpleRankedDocRef(
new SimpleDocRef(Collections.singletonList(author), "a book", "a title"),
new Integer(1), new Integer(10), 0);
//first add one
try{
be.addRankedDocRef(ref);
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
try{
be.removeRankedDocRef(ref);
assertFalse(be.getRankedCrossRefs().contains(ref));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RANKEDDOCREF, ce.getType());
//was ref
assertEquals(ref, ce.getPrevious());
//now null
assertNull(ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of addComment method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testAddComment() {
System.out.println("testAddComment");
//should not be able to add null
try{
be.addComment(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
Comment com = new SimpleComment("comment", 0);
try{
be.addComment(com);
assertTrue(be.getComments().contains(com));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.COMMENT, ce.getType());
//was null
assertNull(ce.getPrevious());
//now com
assertEquals(com, ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of removeComment method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testRemoveComment() {
System.out.println("testRemoveComment");
//should not be able to remove null
try{
be.removeComment(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
Comment com = new SimpleComment("comment", 0);
//first add one
try{
be.addComment(com);
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
try{
be.removeComment(com);
assertFalse(be.getComments().contains(com));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.COMMENT, ce.getType());
//was com
assertEquals(com, ce.getPrevious());
//now null
assertNull(ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of addRelationship method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testAddRelationship() {
System.out.println("testAddRelationship");
//should not be able to add null
try{
be.addRelationship(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
BioEntry be2 = new SimpleBioEntry(
be.getNamespace(), "different",
be.getAccession(), be.getVersion());
ComparableTerm term =
RichObjectFactory.getDefaultOntology().getOrCreateTerm("foo");
BioEntryRelationship rel = new SimpleBioEntryRelationship(
be,be2, term, new Integer(0));
try{
be.addRelationship(rel);
assertTrue(be.getRelationships().contains(rel));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RELATIONS, ce.getType());
//was null
assertNull(ce.getPrevious());
//now rel
assertEquals(rel, ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of removeRelationship method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testRemoveRelationship() {
System.out.println("testRemoveRelationship");
//should not be able to remove null
try{
be.removeRelationship(null);
fail("Expected IllegalArgumentException");
}catch(IllegalArgumentException ex){}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
BioEntry be2 = new SimpleBioEntry(
be.getNamespace(), "different",
be.getAccession(), be.getVersion());
ComparableTerm term =
RichObjectFactory.getDefaultOntology().getOrCreateTerm("foo");
BioEntryRelationship rel = new SimpleBioEntryRelationship(
be,be2, term, new Integer(0));
//first add one
try{
be.addRelationship(rel);
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
try{
be.removeRelationship(rel);
assertFalse(be.getRelationships().contains(rel));
//should have generated an event
ChangeEvent ce = cr.getEvent();
assertNotNull(ce);
//of the right type
assertEquals(be.RELATIONS, ce.getType());
//was rel
assertEquals(rel, ce.getPrevious());
//now null
assertNull(ce.getChange());
}
catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
/**
* Test of setRankedCrossRefs method, of class org.biojavax.bio.SimpleBioEntry.
*/
public void testSetRankedCrossRefs() {
System.out.println("testSetRankedCrossRefs");
Set s = new TreeSet();
s.add(new SimpleRankedCrossRef(new SimpleCrossRef("dbname", "AC123456", 1), 0));
try{
be.setRankedCrossRefs(s);
assertEquals(s, be.getRankedCrossRefs());
}catch(Exception ex){
fail("Not expecting "+ex.getClass().getName());
}
}
}
|
package mb.statix.solver.store;
import java.util.Collection;
import java.util.Deque;
import java.util.HashSet;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedDeque;
import org.metaborg.util.log.Level;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.Multimap;
import mb.nabl2.terms.ITermVar;
import mb.statix.solver.CriticalEdge;
import mb.statix.solver.Delay;
import mb.statix.solver.IConstraint;
import mb.statix.solver.IConstraintStore;
import mb.statix.solver.log.IDebugContext;
public class BaseConstraintStore implements IConstraintStore {
final IDebugContext debug;
private final Deque<IConstraint> active;
private final Multimap<ITermVar, Delayed> stuckOnVar;
private final Multimap<CriticalEdge, Delayed> stuckOnEdge;
public BaseConstraintStore(IDebugContext debug) {
this.debug = debug;
this.active = new ConcurrentLinkedDeque<>();
this.stuckOnVar = HashMultimap.create();
this.stuckOnEdge = HashMultimap.create();
}
@Override public int activeSize() {
return active.size();
}
@Override public int delayedSize() {
return stuckOnVar.size() + stuckOnEdge.size();
}
@Override public void add(IConstraint constraint) {
active.push(constraint);
}
@Override public IConstraint remove() throws NoSuchElementException {
return active.poll();
}
@Override public void delay(IConstraint constraint, Delay delay) {
final Delayed delayed = new Delayed(constraint);
if(!delay.vars().isEmpty()) {
if(debug.isEnabled(Level.Debug)) {
debug.debug("delayed {} on vars {}", constraint, delay.vars());
}
for(ITermVar var : delay.vars()) {
stuckOnVar.put(var, delayed);
}
} else if(!delay.criticalEdges().isEmpty()) {
if(debug.isEnabled(Level.Debug)) {
debug.debug("delayed {} on critical edges {}", constraint, delay.criticalEdges());
}
for(CriticalEdge edge : delay.criticalEdges()) {
stuckOnEdge.put(edge, delayed);
}
} else {
throw new IllegalArgumentException("delayed for no apparent reason");
}
}
@Override public void activateFromVars(Iterable<? extends ITermVar> vars, IDebugContext debug) {
for(ITermVar var : vars) {
final Collection<Delayed> activated = stuckOnVar.removeAll(var);
for(Delayed delayed : activated) {
if(delayed.activate()) {
final IConstraint constraint = delayed.constraint;
if(debug.isEnabled(Level.Debug)) {
debug.debug("activating {}", constraint);
}
add(constraint);
}
}
}
}
@Override public void activateFromEdges(Iterable<? extends CriticalEdge> edges, IDebugContext debug) {
for(CriticalEdge edge : edges) {
final Collection<Delayed> activated = stuckOnEdge.removeAll(edge);
for(Delayed delayed : activated) {
if(delayed.activate()) {
final IConstraint constraint = delayed.constraint;
if(debug.isEnabled(Level.Debug)) {
debug.debug("activating {}", constraint);
}
add(constraint);
}
}
}
}
@Override public Map<IConstraint, Delay> delayed() {
final Multimap<IConstraint, ITermVar> varStuck = HashMultimap.create();
stuckOnVar.entries().stream().filter(e -> !e.getValue().activated)
.forEach(e -> varStuck.put(e.getValue().constraint, e.getKey()));
final Multimap<IConstraint, CriticalEdge> edgeStuck = HashMultimap.create();
stuckOnEdge.entries().stream().filter(e -> !e.getValue().activated)
.forEach(e -> edgeStuck.put(e.getValue().constraint, e.getKey()));
final Set<IConstraint> stuck = new HashSet<>();
stuck.addAll(varStuck.keys());
stuck.addAll(edgeStuck.keys());
final Builder<IConstraint, Delay> delayed = ImmutableMap.builder();
stuck.stream().forEach(c -> delayed.put(c, new Delay(varStuck.get(c), edgeStuck.get(c))));
return delayed.build();
}
private static class Delayed {
public final IConstraint constraint;
private boolean activated = false;
public Delayed(IConstraint constraint) {
this.constraint = constraint;
}
public boolean activate() {
if(activated) {
return false;
} else {
activated = true;
return true;
}
}
@Override public String toString() {
return activated ? "*" : constraint.toString();
}
}
}
|
package com.bbn.kbp.events;
import com.bbn.bue.common.Finishable;
import com.bbn.bue.common.HasDocID;
import com.bbn.bue.common.Inspector;
import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector;
import com.bbn.bue.common.evaluation.BinaryErrorLogger;
import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy;
import com.bbn.bue.common.evaluation.BootstrapInspector;
import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner;
import com.bbn.bue.common.evaluation.EvalPair;
import com.bbn.bue.common.evaluation.InspectionNode;
import com.bbn.bue.common.evaluation.InspectorTreeDSL;
import com.bbn.bue.common.evaluation.InspectorTreeNode;
import com.bbn.bue.common.evaluation.ProvenancedAlignment;
import com.bbn.bue.common.files.FileUtils;
import com.bbn.bue.common.parameters.Parameters;
import com.bbn.bue.common.strings.offsets.CharOffset;
import com.bbn.bue.common.strings.offsets.OffsetRange;
import com.bbn.bue.common.symbols.Symbol;
import com.bbn.bue.common.symbols.SymbolUtils;
import com.bbn.kbp.events.ontology.EREToKBPEventOntologyMapper;
import com.bbn.kbp.events.ontology.SimpleEventOntologyMapper;
import com.bbn.kbp.events2014.DocumentSystemOutput2015;
import com.bbn.kbp.events2014.Response;
import com.bbn.kbp.events2014.ResponseLinking;
import com.bbn.kbp.events2014.ResponseSet;
import com.bbn.kbp.events2014.SystemOutputLayout;
import com.bbn.kbp.events2014.io.SystemOutputStore;
import com.bbn.kbp.linking.ExplicitFMeasureInfo;
import com.bbn.kbp.linking.LinkF1;
import com.bbn.nlp.corenlp.CoreNLPDocument;
import com.bbn.nlp.corenlp.CoreNLPParseNode;
import com.bbn.nlp.corenlp.CoreNLPXMLLoader;
import com.bbn.nlp.corpora.ere.EREArgument;
import com.bbn.nlp.corpora.ere.EREDocument;
import com.bbn.nlp.corpora.ere.EREEntity;
import com.bbn.nlp.corpora.ere.EREEntityArgument;
import com.bbn.nlp.corpora.ere.EREEntityMention;
import com.bbn.nlp.corpora.ere.EREEvent;
import com.bbn.nlp.corpora.ere.EREEventMention;
import com.bbn.nlp.corpora.ere.EREFillerArgument;
import com.bbn.nlp.corpora.ere.ERELoader;
import com.bbn.nlp.events.HasEventType;
import com.bbn.nlp.events.scoring.DocLevelEventArg;
import com.bbn.nlp.parsing.HeadFinders;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multiset;
import com.google.common.io.Files;
import com.google.common.reflect.TypeToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.annotation.Nullable;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformBoth;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformLeft;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight;
import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformed;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.filter;
/**
* Scores KBP 2015 event argument output against an ERE gold standard. Scoring is in terms of
* (Event Type, Event Role, Entity) tuples. This program is an experimental rough draft and has a
* number of limitations: <ul> <li>We only handle arguments which are entity mentions; others are
* ignored according to the ERE structure on the gold side and by filtering out a (currently
* hardcoded) set of argument roles on the system side.</li> <i>We map system responses to entities
* by looking for an entity which has a mention which shares the character offsets of the base
* filler exactly either by itself or by its nominal head (given in ERE). In the future we may
* implement more lenient alignment strategies.</i> <li> Currently system responses which fail to
* align to any entity at all are discarded rather than penalized.</li> </ul>
*/
public final class ScoreKBPAgainstERE {
private static final Logger log = LoggerFactory.getLogger(ScoreKBPAgainstERE.class);
private ScoreKBPAgainstERE() {
throw new UnsupportedOperationException();
}
public static void main(String[] argv) {
// we wrap the main method in this way to
// ensure a non-zero return value on failure
try {
trueMain(argv);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private static void trueMain(String[] argv) throws IOException {
Parameters params = Parameters.loadSerifStyle(new File(argv[0]));
log.info(params.dump());
final ImmutableSet<Symbol> docIDsToScore = ImmutableSet.copyOf(
FileUtils.loadSymbolList(params.getExistingFile("docIDsToScore")));
final ImmutableMap<Symbol, File> goldDocIDToFileMap = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("goldDocIDToFileMap"), Charsets.UTF_8));
final File outputDir = params.getCreatableDirectory("ereScoringOutput");
final SystemOutputLayout outputLayout = SystemOutputLayout.ParamParser.fromParamVal(
params.getString("outputLayout"));
final SystemOutputStore outputStore =
outputLayout.open(params.getExistingDirectory("systemOutput"));
final ImmutableMap<Symbol, File> coreNLPProcessedRawDocs = FileUtils.loadSymbolToFileMap(
Files.asCharSource(params.getExistingFile("coreNLPDocIDMap"), Charsets.UTF_8));
final boolean relaxUsingCORENLP = params.getBoolean("relaxUsingCoreNLP");
final boolean useExactMatchForCoreNLPRelaxation =
relaxUsingCORENLP && params.getBoolean("useExactMatchForCoreNLPRelaxation");
final CoreNLPXMLLoader coreNLPXMLLoader =
CoreNLPXMLLoader.builder(HeadFinders.<CoreNLPParseNode>getEnglishPTBHeadFinder()).build();
log.info("Scoring over {} documents", docIDsToScore.size());
// on the gold side we take an ERE document as input
final TypeToken<EREDocument> inputIsEREDoc = new TypeToken<EREDocument>() {
};
// on the test side we take an AnswerKey, but we bundle it with the gold ERE document
// for use in alignment later
final TypeToken<EREDocAndResponses> inputIsEREDocAndAnswerKey =
new TypeToken<EREDocAndResponses>() {
};
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>>
input = InspectorTreeDSL.pairedInput(inputIsEREDoc, inputIsEREDocAndAnswerKey);
// these will extract the scoring tuples from the KBP system input and ERE docs, respectively
// we create these here because we will call their .finish method()s
// at the end to record some statistics about alignment failures,
// so we need to keep references to them
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor =
new ResponsesAndLinkingFromKBPExtractor(coreNLPProcessedRawDocs,
coreNLPXMLLoader, relaxUsingCORENLP,
useExactMatchForCoreNLPRelaxation);
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor =
new ResponsesAndLinkingFromEREExtractor(EREToKBPEventOntologyMapper.create2015Mapping());
// this sets it up so that everything fed to input will be scored in various ways
setupScoring(input, responsesAndLinkingFromKBPExtractor, responsesAndLinkingFromEREExtractor,
outputDir);
final ERELoader loader = ERELoader.create();
for (final Symbol docID : docIDsToScore) {
final File ereFileName = goldDocIDToFileMap.get(docID);
if (ereFileName == null) {
throw new RuntimeException("Missing key file for " + docID);
}
final EREDocument ereDoc = loader.loadFrom(ereFileName);
checkState(ereDoc.getDocId().equals(docID.asString()),
"fetched document ID must be equal to stored");
final Iterable<Response>
responses = filter(outputStore.read(docID).arguments().responses(), bannedRolesFilter);
final ResponseLinking linking =
((DocumentSystemOutput2015) outputStore.read(docID)).linking();
linking.copyWithFilteredResponses(Predicates.in(ImmutableSet.copyOf(responses)));
// feed this ERE doc/ KBP output pair to the scoring network
input.inspect(EvalPair.of(ereDoc, new EREDocAndResponses(ereDoc, responses, linking)));
}
// trigger the scoring network to write its summary files
input.finish();
// log alignment failures
responsesAndLinkingFromKBPExtractor.finish();
responsesAndLinkingFromEREExtractor.finish();
}
private static final ImmutableSet<Symbol> BANNED_ROLES =
SymbolUtils.setFrom("Time", "Crime", "Position",
"Fine", "Sentence");
private static final Predicate<Response> bannedRolesFilter = new Predicate<Response>() {
@Override
public boolean apply(@Nullable final Response response) {
return !BANNED_ROLES.contains(response.role());
}
};
private static Function<EvalPair<? extends Iterable<? extends DocLevelEventArg>, ? extends Iterable<? extends DocLevelEventArg>>, ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner
.forEquivalenceFunction(Functions.<DocLevelEventArg>identity())
.asFunction();
// this sets up a scoring network which is executed on every input
private static void setupScoring(
final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input,
final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor,
final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor,
final File outputDir) {
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking =
transformRight(transformLeft(input, responsesAndLinkingFromEREExtractor),
responsesAndLinkingFromKBPExtractor);
// set up for event argument scoring in 2015 style
eventArgumentScoringSetup(inputAsResponsesAndLinking, outputDir);
// set up for linking scoring in 2015 style
linkingScoringSetup(inputAsResponsesAndLinking, outputDir);
}
private static void eventArgumentScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking, final File outputDir) {
final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>>
inputAsSetsOfScoringTuples =
transformBoth(inputAsResponsesAndLinking, ResponsesAndLinking.argFunction);
final InspectorTreeNode<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>>
alignmentNode = transformed(inputAsSetsOfScoringTuples, EXACT_MATCH_ALIGNER);
// overall F score
final AggregateBinaryFScoresInspector<Object, Object> scoreAndWriteOverallFScore =
AggregateBinaryFScoresInspector.createOutputtingTo("aggregateF.txt", outputDir);
inspect(alignmentNode).with(scoreAndWriteOverallFScore);
// log errors
final BinaryErrorLogger<HasDocID, HasDocID> logWrongAnswers = BinaryErrorLogger
.forStringifierAndOutputDir(Functions.<HasDocID>toStringFunction(), outputDir);
inspect(alignmentNode).with(logWrongAnswers);
final BinaryFScoreBootstrapStrategy perEventBootstrapStrategy =
BinaryFScoreBootstrapStrategy.createBrokenDownBy("EventType",
HasEventType.ExtractFunction.INSTANCE, outputDir);
final BootstrapInspector breakdownScoresByEventTypeWithBootstrapping =
BootstrapInspector.forStrategy(perEventBootstrapStrategy, 1000, new Random(0));
inspect(alignmentNode).with(breakdownScoresByEventTypeWithBootstrapping);
}
private static void linkingScoringSetup(
final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>>
inputAsResponsesAndLinking, final File outputDir) {
final InspectorTreeNode<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>>
linkingNode = transformRight(
transformLeft(inputAsResponsesAndLinking, ResponsesAndLinking.linkingFunction),
ResponsesAndLinking.linkingFunction);
final InspectorTreeNode<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>>
filteredNode =
transformed(linkingNode, ScoreKBPAgainstERE.<DocLevelEventArg>restrictToLinkingFunction());
final LinkingInspector linkingInspector =
LinkingInspector.createOutputtingTo(new File(outputDir, "linkingF.txt"));
inspect(filteredNode).with(linkingInspector);
}
private static <T> Function<Iterable<? extends Set<T>>, ImmutableSet<ImmutableSet<T>>> filterNestedElements(
final Predicate<T> filter) {
return new Function<Iterable<? extends Set<T>>, ImmutableSet<ImmutableSet<T>>>() {
@Nullable
@Override
public ImmutableSet<ImmutableSet<T>> apply(@Nullable final Iterable<? extends Set<T>> sets) {
final ImmutableSet.Builder<ImmutableSet<T>> ret = ImmutableSet.builder();
for (final Set<T> s : sets) {
ret.add(ImmutableSet.copyOf(Iterables.filter(s, filter)));
}
return ret.build();
}
};
}
private static <T> Function<EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>, EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>> restrictToLinkingFunction() {
return new Function<EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>, EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>>>() {
@Nullable
@Override
public EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>> apply(
@Nullable final EvalPair<ImmutableSet<ImmutableSet<T>>, ImmutableSet<ImmutableSet<T>>> input) {
final ImmutableSet<ImmutableSet<T>> key =
filterNestedElements(Predicates.in(ImmutableSet.copyOf(Iterables.concat(input.test()))))
.apply(input.key());
return EvalPair.of(key, input.test());
}
};
}
private static final class LinkingInspector implements
Inspector<EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>>> {
private final File outputFile;
ExplicitFMeasureInfo counts = null;
private LinkingInspector(final File outputFile) {
this.outputFile = outputFile;
}
public static LinkingInspector createOutputtingTo(final File outputFile) {
return new LinkingInspector(outputFile);
}
@Override
public void inspect(
final EvalPair<ImmutableSet<ImmutableSet<DocLevelEventArg>>, ImmutableSet<ImmutableSet<DocLevelEventArg>>> item) {
checkArgument(ImmutableSet.copyOf(concat(item.test())).containsAll(
ImmutableSet.copyOf(concat(item.key()))), "Must contain only answers in test set!");
counts = LinkF1.create().score(item.key(), item.test());
}
@Override
public void finish() throws IOException {
checkNotNull(counts, "Inspect must be called before Finish!");
final PrintWriter outputWriter = new PrintWriter(outputFile);
outputWriter.println(counts.toString());
outputWriter.close();
}
}
private static final class ResponsesAndLinkingFromEREExtractor
implements Function<EREDocument, ResponsesAndLinking>, Finishable {
// for tracking things from the answer key discarded due to not being entity mentions
private final Multiset<String> allGoldArgs = HashMultiset.create();
private final Multiset<String> discarded = HashMultiset.create();
private final SimpleEventOntologyMapper mapper;
private ResponsesAndLinkingFromEREExtractor(final SimpleEventOntologyMapper mapper) {
this.mapper = checkNotNull(mapper);
}
@Override
public ResponsesAndLinking apply(final EREDocument doc) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
// every event mention argument within a hopper is linked
final ImmutableSet.Builder<ImmutableSet<DocLevelEventArg>> linking = ImmutableSet.builder();
for (final EREEvent ereEvent : doc.getEvents()) {
final ImmutableSet.Builder<DocLevelEventArg> responseSet = ImmutableSet.builder();
for (final EREEventMention ereEventMention : ereEvent.getEventMentions()) {
for (final EREArgument ereArgument : ereEventMention.getArguments()) {
final Symbol ereEventMentionType = Symbol.from(ereEventMention.getType());
final Symbol ereEventMentionSubtype = Symbol.from(ereEventMention.getSubtype());
final Symbol ereArgumentRole = Symbol.from(ereArgument.getRole());
boolean skip = false;
if (!mapper.eventType(ereEventMentionType).isPresent()) {
log.debug("EventType {} is not known to the KBP ontology", ereEventMentionType);
skip = true;
}
if (!mapper.eventRole(ereArgumentRole).isPresent()) {
log.debug("EventRole {} is not known to the KBP ontology", ereArgumentRole);
skip = true;
}
if (!mapper.eventSubtype(ereEventMentionSubtype).isPresent()) {
log.debug("EventSubtype {} is not known to the KBP ontology", ereEventMentionSubtype);
skip = true;
}
if (skip) {
continue;
}
// type.subtype is Response format
final String typeRoleKey = mapper.eventType(ereEventMentionType).get() +
"." + mapper.eventSubtype(ereEventMentionSubtype).get() +
"/" + mapper.eventRole(ereArgumentRole).get();
allGoldArgs.add(typeRoleKey);
if (ereArgument instanceof EREEntityArgument) {
final EREEntityMention entityMention =
((EREEntityArgument) ereArgument).entityMention();
final Optional<EREEntity> containingEntity = doc.getEntityContaining(entityMention);
checkState(containingEntity.isPresent(), "Corrupt ERE key input lacks "
+ "entity for entity mention %s", entityMention);
final DocLevelEventArg arg = DocLevelEventArg.create(Symbol.from(doc.getDocId()),
Symbol.from(mapper.eventType(ereEventMentionType).get() + "." +
mapper.eventSubtype(ereEventMentionSubtype).get()),
mapper.eventRole(ereArgumentRole).get(),
containingEntity.get().getID());
ret.add(arg);
responseSet.add(arg);
} else if (ereArgument instanceof EREFillerArgument) {
final EREFillerArgument filler = (EREFillerArgument) ereArgument;
final DocLevelEventArg arg = DocLevelEventArg.create(Symbol.from(doc.getDocId()),
Symbol.from(mapper.eventType(ereEventMentionType).get() + "." +
mapper.eventSubtype(ereEventMentionSubtype).get()),
mapper.eventRole(ereArgumentRole).get(), filler.filler().getID());
ret.add(arg);
responseSet.add(arg);
} else {
throw new RuntimeException("Unknown ERE argument type " + ereArgument.getClass());
}
}
}
linking.add(responseSet.build());
}
return new EREResponsesAndLinking(ret.build(), linking.build());
}
@Override
public void finish() throws IOException {
log.info(
"Of {} gold event arguments, {} were discarded as non-entities",
allGoldArgs.size(), discarded.size());
for (final String errKey : discarded.elementSet()) {
if (discarded.count(errKey) > 0) {
log.info("Of {} gold {} arguments, {} discarded ",
+allGoldArgs.count(errKey), errKey, discarded.count(errKey));
}
}
}
}
private static final class ResponsesAndLinkingFromKBPExtractor
implements Function<EREDocAndResponses, ResponsesAndLinking>,
Finishable {
private Multiset<String> mentionAlignmentFailures = HashMultiset.create();
private Multiset<String> numResponses = HashMultiset.create();
private final ImmutableMap<Symbol, File> ereMapping;
private final CoreNLPXMLLoader coreNLPXMLLoader;
private final boolean relaxUsingCORENLP;
private final boolean useExactMatchForCoreNLPRelaxation;
public ResponsesAndLinkingFromKBPExtractor(final Map<Symbol, File> ereMapping,
final CoreNLPXMLLoader coreNLPXMLLoader, final boolean relaxUsingCORENLP,
final boolean useExactMatchForCoreNLPRelaxation) {
this.ereMapping = ImmutableMap.copyOf(ereMapping);
this.coreNLPXMLLoader = coreNLPXMLLoader;
this.relaxUsingCORENLP = relaxUsingCORENLP;
this.useExactMatchForCoreNLPRelaxation = useExactMatchForCoreNLPRelaxation;
}
public ResponsesAndLinking apply(final EREDocAndResponses input) {
final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder();
final Iterable<Response> responses = input.responses();
final EREDocument doc = input.ereDoc();
final Symbol ereID = Symbol.from(doc.getDocId());
final Optional<CoreNLPDocument> coreNLPDoc;
final EREAligner ereAligner;
try {
coreNLPDoc = Optional.fromNullable(ereMapping.get(ereID)).isPresent() ? Optional
.of(coreNLPXMLLoader.loadFrom(ereMapping.get(ereID)))
: Optional.<CoreNLPDocument>absent();
ereAligner = EREAligner
.create(relaxUsingCORENLP, useExactMatchForCoreNLPRelaxation, doc, coreNLPDoc);
} catch (IOException e) {
throw new RuntimeException(e);
}
final ImmutableMap.Builder<Response, DocLevelEventArg> responseToDocLevelArg =
ImmutableMap.builder();
for (final Response response : responses) {
numResponses.add(errKey(response));
final OffsetRange<CharOffset> baseFillerOffsets = response.baseFiller().asCharOffsetRange();
// TODO match this using the type instead of first entity
final ImmutableSet<EREEntity> candidateEntities = ereAligner.entitiesForResponse(response);
if (candidateEntities.size() > 1) {
log.warn(
"Found {} candidate entities for base filler {}, using the first one!",
candidateEntities.size(), response.baseFiller());
}
// TODO match on type instead of just taking the first for both filler and entities
final EREEntity matchingEntity = Iterables.getFirst(candidateEntities, null);
if (matchingEntity != null) {
final DocLevelEventArg res =
DocLevelEventArg.create(Symbol.from(doc.getDocId()), response.type(),
response.role(), matchingEntity.getID());
ret.add(res);
responseToDocLevelArg.put(response, res);
} else {
final ImmutableSet<EREFillerArgument> fillers = ereAligner.fillersForResponse(response);
final EREFillerArgument filler = Iterables.getFirst(fillers, null);
if (fillers.size() > 1) {
log.warn("Found multiple {} matching fillers for {}", fillers.size(),
response.baseFiller());
}
if (filler != null) {
final DocLevelEventArg res = DocLevelEventArg
.create(Symbol.from(doc.getDocId()), response.type(), response.role(),
filler.filler().getID());
ret.add(res);
responseToDocLevelArg.put(response, res);
} else {
log.warn("Neither entity nor filler match found for " + response.baseFiller());
}
}
}
return new KBPResponsesAndLinking(ImmutableSet.copyOf(input.responses()),
responseToDocLevelArg.build(), input.linking());
}
public String errKey(Response r) {
return r.type() + "/" + r.role();
}
public void finish() {
log.info(
"Of {} system responses, got {} mention alignment failures",
numResponses.size(), mentionAlignmentFailures.size());
for (final String errKey : numResponses.elementSet()) {
if (mentionAlignmentFailures.count(errKey) > 0) {
log.info("Of {} {} responses, {} mention alignment failures",
+numResponses.count(errKey), errKey, mentionAlignmentFailures.count(errKey));
}
}
}
}
}
interface ResponsesAndLinking {
ImmutableSet<DocLevelEventArg> args();
ImmutableSet<ImmutableSet<DocLevelEventArg>> linking();
Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>> argFunction =
new Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>>() {
@Nullable
@Override
public ImmutableSet<DocLevelEventArg> apply(
@Nullable final ResponsesAndLinking responsesAndLinking) {
return responsesAndLinking.args();
}
};
Function<ResponsesAndLinking, ImmutableSet<ImmutableSet<DocLevelEventArg>>> linkingFunction =
new Function<ResponsesAndLinking, ImmutableSet<ImmutableSet<DocLevelEventArg>>>() {
@Nullable
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> apply(
@Nullable final ResponsesAndLinking responsesAndLinking) {
return responsesAndLinking.linking();
}
};
}
final class KBPResponsesAndLinking implements ResponsesAndLinking {
final ImmutableSet<Response> originalResponses;
final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg;
final ImmutableSet<ImmutableSet<DocLevelEventArg>> responseSets;
KBPResponsesAndLinking(final ImmutableSet<Response> originalResponses,
final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg,
final ResponseLinking responseLinking) {
this.originalResponses = originalResponses;
this.responseToDocLevelEventArg = responseToDocLevelEventArg;
final ImmutableSet.Builder<ImmutableSet<DocLevelEventArg>> responseSetsB =
ImmutableSet.builder();
for (final ResponseSet rs : responseLinking.responseSets()) {
final ImmutableSet.Builder<DocLevelEventArg> rsn = ImmutableSet.builder();
for (final Response response : rs) {
if (responseToDocLevelEventArg.containsKey(response)) {
rsn.add(responseToDocLevelEventArg.get(response));
}
}
responseSetsB.add(rsn.build());
}
this.responseSets = responseSetsB.build();
}
@Override
public ImmutableSet<DocLevelEventArg> args() {
return ImmutableSet.copyOf(responseToDocLevelEventArg.values());
}
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> linking() {
return responseSets;
}
}
final class EREResponsesAndLinking implements ResponsesAndLinking {
final ImmutableSet<DocLevelEventArg> args;
final ImmutableSet<ImmutableSet<DocLevelEventArg>> linking;
EREResponsesAndLinking(final Iterable<DocLevelEventArg> args,
final Iterable<ImmutableSet<DocLevelEventArg>> linking) {
this.args = ImmutableSet.copyOf(args);
this.linking = ImmutableSet.copyOf(linking);
}
@Override
public ImmutableSet<DocLevelEventArg> args() {
return args;
}
@Override
public ImmutableSet<ImmutableSet<DocLevelEventArg>> linking() {
return linking;
}
}
final class EREDocAndResponses {
private final EREDocument ereDoc;
private final Iterable<Response> responses;
private final ResponseLinking linking;
public EREDocAndResponses(final EREDocument ereDoc, final Iterable<Response> responses,
final ResponseLinking linking) {
this.ereDoc = checkNotNull(ereDoc);
this.responses = checkNotNull(responses);
this.linking = checkNotNull(linking);
}
public EREDocument ereDoc() {
return ereDoc;
}
public Iterable<Response> responses() {
return responses;
}
public ResponseLinking linking() {
return linking;
}
}
|
package example;
//-*- mode:java; encoding:utf8n; coding:utf-8 -*-
// vim:set fileencoding=utf-8:
//@homepage@
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
import javax.swing.*;
import javax.swing.event.EventListenerList;
public class MainPanel extends JPanel{
private Box northBox = Box.createVerticalBox();
private Box centerBox = Box.createVerticalBox();
private Box southBox = Box.createVerticalBox();
public MainPanel() {
super(new BorderLayout());
JPanel panel = new JPanel(new BorderLayout());
final List<ExpansionPanel> panelList = makeList();
ExpansionListener rl = new ExpansionListener() {
public void expansionStateChanged(ExpansionEvent e) {
initComps(panelList, e);
}
};
for(ExpansionPanel exp: panelList) {
northBox.add(exp);
exp.addExpansionListener(rl);
}
panel.add(northBox, BorderLayout.NORTH);
panel.add(centerBox);
panel.add(southBox, BorderLayout.SOUTH);
panel.setMinimumSize(new Dimension(120, 0));
JSplitPane sp = new JSplitPane();
sp.setLeftComponent(panel);
sp.setRightComponent(new JScrollPane(new JTree()));
add(sp);
setPreferredSize(new Dimension(320, 240));
}
public void initComps(List<ExpansionPanel> list, ExpansionEvent e) {
setVisible(false);
centerBox.removeAll();
northBox.removeAll();
southBox.removeAll();
ExpansionPanel es = (ExpansionPanel) e.getSource();
boolean insertSouth = false;
for(ExpansionPanel exp: list) {
if(exp==es && exp.isSelected()) {
centerBox.add(exp);
insertSouth = true;
}else if(insertSouth) {
exp.setSelected(false);
southBox.add(exp);
}else{
exp.setSelected(false);
northBox.add(exp);
}
}
setVisible(true);
}
private List<ExpansionPanel> makeList() {
return Arrays.<ExpansionPanel>asList(
new ExpansionPanel("Panel1") {
public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
p.add(new JCheckBox("aaaa"));
p.add(new JCheckBox("bbbbbbbbb"));
return p;
}
},
new ExpansionPanel("Panel2") {
public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
for(int i=0;i<16;i++) p.add(new JLabel(String.format("%02d", i)));
return p;
}
},
new ExpansionPanel("Panel3") {
public Container makePanel() {
Box p = Box.createVerticalBox();
p.setBorder(BorderFactory.createEmptyBorder(5, 15, 5, 15));
ButtonGroup bg = new ButtonGroup();
for(JRadioButton b: Arrays.<JRadioButton>asList(
new JRadioButton("aa"), new JRadioButton("bb"), new JRadioButton("cc"))) {
p.add(b); bg.add(b); b.setSelected(true);
}
return p;
}
});
}
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
@Override public void run() {
createAndShowGUI();
}
});
}
public static void createAndShowGUI() {
try{
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
}catch(Exception e) {
e.printStackTrace();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
abstract class ExpansionPanel extends JPanel {
abstract public Container makePanel();
private final JButton button;
private final Container panel;
private final JScrollPane scroll = new JScrollPane();
private final String title;
private boolean openFlag = false;
public ExpansionPanel(String title_) {
super(new BorderLayout());
title = title_;
button = new JButton(new AbstractAction(title) {
@Override public void actionPerformed(ActionEvent e) {
openFlag = !openFlag;
initPanel();
fireExpansionEvent();
}
});
panel = makePanel();
scroll.getVerticalScrollBar().setUnitIncrement(25);
scroll.getViewport().add(panel);
add(button, BorderLayout.NORTH);
}
public boolean isSelected() {
return openFlag;
}
public void setSelected(boolean flg) {
openFlag = flg;
initPanel();
}
protected void initPanel() {
if(openFlag) {
add(scroll);
setPreferredSize(new Dimension(getSize().width, button.getSize().height+panel.getSize().height));
}else{
remove(scroll);
setPreferredSize(new Dimension(getSize().width, button.getSize().height));
}
revalidate();
}
/*/
//*/
|
// typica - A client library for Amazon Web Services
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
package com.xerox.amazonws.sqs;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBException;
import com.xerox.amazonws.common.JAXBuddy;
import com.xerox.amazonws.typica.jaxb.DeleteMessageResponse;
import com.xerox.amazonws.typica.jaxb.DeleteQueueResponse;
import com.xerox.amazonws.typica.jaxb.GetVisibilityTimeoutResponse;
import com.xerox.amazonws.typica.jaxb.PeekMessageResponse;
import com.xerox.amazonws.typica.jaxb.ReceiveMessageResponse;
import com.xerox.amazonws.typica.jaxb.SendMessageResponse;
import com.xerox.amazonws.typica.jaxb.SetVisibilityTimeoutResponse;
/**
* This class provides an interface with the Amazon SQS message queue. It provides methods
* for sending / receiving messages and deleting queues and messsages on queues.
*
* @author D. Kavanagh
* @author developer@dotech.com
*/
public class MessageQueue extends QueueService {
public static final int MAX_MESSAGES = 600;
public static final int MAX_MESSAGE_BODIES_SIZE = 4096;
protected String queueId;
protected MessageQueue(String queueUrl, String awsAccessKeyId,
String awsSecretAccessKey, boolean isSecure,
String server) throws SQSException {
super(awsAccessKeyId, awsSecretAccessKey, isSecure, server);
queueId = queueUrl.substring(queueUrl.indexOf("
queueId = queueId.substring(queueId.indexOf("/")+1);
}
/**
* This method provides the URL for the message queue represented by this object.
*
* @return generated queue service url
*/
public URL getUrl() {
try {
return new URL(super.getUrl().toString()+queueId);
} catch (MalformedURLException ex) {
return null;
}
}
/**
* Sends a message to a specified queue. The message must be between 1 and 256K bytes long.
*
* @param msg the message to be sent (should be base64 encoded)
* @return the message id for the message just sent
*/
public String sendMessage(String msg) throws SQSException {
try {
String request = queueId+"/back";
URLConnection conn = makeRequest("PUT", request, super.headers);
conn.setDoOutput(true);
OutputStream oStr = conn.getOutputStream();
oStr.write(new String(msg).getBytes());
oStr.flush();
InputStream iStr = conn.getInputStream();
SendMessageResponse response = JAXBuddy.deserializeXMLStream(SendMessageResponse.class, iStr);
return response.getMessageId();
} catch (JAXBException ex) {
throw new SQSException("Problem parsing returned message.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Attempts to receive a message from the queue. The queue default visibility timeout
* is used.
*
* @return the message object
*/
public Message receiveMessage() throws SQSException {
Message amessage[] = receiveMessages(BigInteger.valueOf(1L), ((BigInteger) (null)));
if(amessage.length > 0)
return amessage[0];
else
return null;
}
/**
* Attempts to receive a message from the queue.
*
* @param visibilityTimeout the duration (in seconds) the retrieved message is hidden from
* subsequent calls to retrieve.
* @return the message object
*/
public Message receiveMessage(int visibilityTimeout) throws SQSException {
Message amessage[] = receiveMessages(BigInteger.valueOf(1L), BigInteger.valueOf(visibilityTimeout));
if(amessage.length > 0)
return amessage[0];
else
return null;
}
/**
* Attempts to retrieve a number of messages from the queue. If less than that are availble,
* the max returned is the number of messages in the queue, but not necessarily all messages
* in the queue will be returned. The queue default visibility timeout is used.
*
* @param numMessages the maximum number of messages to return
* @return an array of message objects
*/
public Message[] receiveMessages(int numMessages) throws SQSException {
return receiveMessages(BigInteger.valueOf(numMessages), ((BigInteger) (null)));
}
/**
* Attempts to retrieve a number of messages from the queue. If less than that are availble,
* the max returned is the number of messages in the queue, but not necessarily all messages
* in the queue will be returned.
*
* @param numMessages the maximum number of messages to return
* @param visibilityTimeout the duration (in seconds) the retrieved message is hidden from
* subsequent calls to retrieve.
* @return an array of message objects
*/
public Message[] receiveMessages(int numMessages, int visibilityTimeout) throws SQSException {
return receiveMessages(BigInteger.valueOf(numMessages), BigInteger.valueOf(visibilityTimeout));
}
/**
* Internal implementation of receiveMessages.
*
* @param
* @return
*/
protected Message[] receiveMessages(BigInteger numMessages, BigInteger visibilityTimeout) throws SQSException {
try {
String request = queueId+"/front?NumberOfMessages="+numMessages;
if (visibilityTimeout != null) request += "&VisibilityTimeout="+visibilityTimeout;
HttpURLConnection conn = makeRequest("GET", request, super.headers);
if (conn.getResponseCode() < 400) {
InputStream iStr = conn.getInputStream();
ReceiveMessageResponse response = JAXBuddy.deserializeXMLStream(ReceiveMessageResponse.class, iStr);
if (response.getMessages() == null) {
return new Message[0];
}
else {
ArrayList<Message> msgs = new ArrayList();
for (com.xerox.amazonws.typica.jaxb.Message msg : response.getMessages()) {
msgs.add(new Message(msg.getMessageId(), msg.getMessageBody()));
}
return msgs.toArray(new Message [msgs.size()]);
}
}
else {
return new Message[0];
}
} catch (JAXBException ex) {
throw new SQSException("Problem parsing returned message.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Returns a specified message. This does not affect and is not affected by the visibility
* timeout of either the queue or the message.
*
* @param
* @return
*/
public Message peekMessage(String msgId) throws SQSException {
try {
InputStream iStr = makeRequest("GET", queueId+"/"+msgId, super.headers).getInputStream();
PeekMessageResponse response = JAXBuddy.deserializeXMLStream(PeekMessageResponse.class, iStr);
com.xerox.amazonws.typica.jaxb.Message msg = response.getMessage();
if (msg == null) {
return null;
}
else {
return new Message(msg.getMessageId(), msg.getMessageBody());
}
} catch (JAXBException ex) {
throw new SQSException("Problem parsing returned message.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Deletes the message identified by message object on the queue this object represents.
*
* @param msg the message to be deleted
*/
public void deleteMessage(Message msg) throws SQSException {
deleteMessage(msg.getMessageId());
}
/**
* Deletes the message identified by msgid on the queue this object represents.
*
* @param msgId the id of the message to be deleted
*/
public void deleteMessage(String msgId) throws SQSException {
try {
HttpURLConnection conn = makeRequest("DELETE", queueId+"/"+msgId, super.headers);
if (conn.getResponseCode() < 400) {
InputStream iStr = conn.getInputStream();
DeleteMessageResponse response = JAXBuddy.deserializeXMLStream(DeleteMessageResponse.class, iStr);
}
else {
throw new SQSException("Error deleting message id="+msgId);
}
} catch (JAXBException ex) {
throw new SQSException("Problem parsing returned message.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Deletes the message queue represented by this object.
*/
public void deleteQueue() throws SQSException {
int respCode;
try {
HttpURLConnection conn = makeRequest("DELETE", queueId, super.headers);
if ((respCode = conn.getResponseCode()) < 400) {
InputStream iStr = conn.getInputStream();
DeleteQueueResponse response = JAXBuddy.deserializeXMLStream(DeleteQueueResponse.class, iStr);
}
else {
throw new SQSException("Error deleting queue, response code = "+respCode);
}
} catch (JAXBException ex) {
throw new SQSException("Problem parsing returned message.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Gets the visibility timeout for the queue.
*/
public int getVisibilityTimeout() throws SQSException {
try {
String request = queueId+"/";
HttpURLConnection conn = makeRequest("GET", request, super.headers);
if (conn.getResponseCode() < 400) {
InputStream iStr = conn.getInputStream();
GetVisibilityTimeoutResponse response = JAXBuddy.deserializeXMLStream(GetVisibilityTimeoutResponse.class, iStr);
if (response.getResponseStatus().getStatusCode().equals("Success")) {
return response.getVisibilityTimeout().intValue();
}
else {
throw new SQSException("Error getting timeout. Response msg = "+response.getResponseStatus().getMessage());
}
}
else {
throw new SQSException("Error getting timeout. Response code = "+conn.getResponseCode());
}
} catch (JAXBException ex) {
throw new SQSException("Problem getting the visilibity timeout.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Placeholder. Not implemented.
*/
public void setVisibilityTimeout(int timeout) throws SQSException {
try {
String request = queueId+"?VisibilityTimeout="+timeout;
HttpURLConnection conn = makeRequest("PUT", request, super.headers);
if (conn.getResponseCode() < 400) {
InputStream iStr = conn.getInputStream();
SetVisibilityTimeoutResponse response = JAXBuddy.deserializeXMLStream(SetVisibilityTimeoutResponse.class, iStr);
if (response.getResponseStatus().getStatusCode().equals("Success")) {
return;
}
else {
throw new SQSException("Error setting timeout. Response msg = "+response.getResponseStatus().getMessage());
}
}
else {
throw new SQSException("Error setting timeout. Response code = "+conn.getResponseCode());
}
} catch (JAXBException ex) {
throw new SQSException("Problem setting the visibility timeout.", ex);
} catch (MalformedURLException ex) {
throw new SQSException(ex.getMessage(), ex);
} catch (IOException ex) {
throw new SQSException(ex.getMessage(), ex);
}
}
/**
* Placeholder. Not implemented by REST.
*/
public void setVisibilityTimeout(String msgId, int timeout) throws SQSException {
}
/**
* Placeholder. Not implemented by REST.
*/
public void setVisibilityTimeout(String[] msgIds, int timeout) throws SQSException {
}
/* grants not supported in REST at this time
public void addGrantByEmail(String, String) throws Exception {
}
public void addGrantByCustomerId(String, String, String) throws Exception {
}
public void removeGrantByEmailAddress(String, String) throws Exception {
}
public void removeGrantByCustomerId(String, String) throws Exception {
}
public Grant[] listGrants(Grantee, String) throws Exception {
}
*/
public static List<MessageQueue> createList(String [] queueUrls, String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure, String server) throws SQSException {
ArrayList<MessageQueue> ret = new ArrayList<MessageQueue>();
for (int i=0; i<queueUrls.length; i++) {
ret.add(new MessageQueue(queueUrls[i], awsAccessKeyId, awsSecretAccessKey, isSecure, server));
}
return ret;
}
}
|
// Note: the original stuff was well packaged with Java style,
// but I (the main developer) prefer to keep an easiest for me
// way to have all sources and example in the same place
// Removed stuff:
// "package net.sourceforge.mediainfo;"
// directory was /net/sourceforge/mediainfo
import static java.util.Collections.singletonMap;
import java.lang.reflect.Method;
import com.sun.jna.FunctionMapper;
import com.sun.jna.Library;
import com.sun.jna.Native;
import com.sun.jna.NativeLibrary;
import com.sun.jna.Pointer;
import com.sun.jna.WString;
class MediaInfo
{
static
{
// libmediainfo for linux depends on libzen
try
{
// We need to load dependencies first, because we know where our native libs are (e.g. Java Web Start Cache).
// If we do not, the system will look for dependencies, but only in the library path.
String os=System.getProperty("os.name");
if (os!=null && !os.toLowerCase().startsWith("windows") && !os.toLowerCase().startsWith("mac"))
NativeLibrary.getInstance("zen");
}
catch (LinkageError e)
{
//Logger.getLogger(MediaInfo.class.getName()).warning("Failed to preload libzen");
}
}
//Internal stuff
interface MediaInfoDLL_Internal extends Library
{
MediaInfoDLL_Internal INSTANCE = (MediaInfoDLL_Internal) Native.loadLibrary("mediainfo", MediaInfoDLL_Internal.class, singletonMap(OPTION_FUNCTION_MAPPER, new FunctionMapper()
{
@Override
public String getFunctionName(NativeLibrary lib, Method method)
{
// MediaInfo_New(), MediaInfo_Open() ...
return "MediaInfo_" + method.getName();
}
}
));
//Constructor/Destructor
Pointer New();
void Delete(Pointer Handle);
//File
int Open(Pointer Handle, WString file);
void Close(Pointer Handle);
//Infos
WString Inform(Pointer Handle);
WString Get(Pointer Handle, int StreamKind, int StreamNumber, WString parameter, int infoKind, int searchKind);
WString GetI(Pointer Handle, int StreamKind, int StreamNumber, int parameterIndex, int infoKind);
int Count_Get(Pointer Handle, int StreamKind, int StreamNumber);
//Options
WString Option(Pointer Handle, WString option, WString value);
}
private Pointer Handle;
public enum StreamKind {
General,
Video,
Audio,
Text,
Chapters,
Image,
Menu;
}
//Enums
public enum InfoKind {
/**
* Unique name of parameter.
*/
Name,
/**
* Value of parameter.
*/
Text,
/**
* Unique name of measure unit of parameter.
*/
Measure,
Options,
/**
* Translated name of parameter.
*/
Name_Text,
/**
* Translated name of measure unit.
*/
Measure_Text,
/**
* More information about the parameter.
*/
Info,
/**
* How this parameter is supported, could be N (No), B (Beta), R (Read only), W
* (Read/Write).
*/
HowTo,
/**
* Domain of this piece of information.
*/
Domain;
}
//Constructor/Destructor
public MediaInfo()
{
Handle = MediaInfoDLL_Internal.INSTANCE.New();
}
public void dispose()
{
if (Handle == null)
throw new IllegalStateException();
MediaInfoDLL_Internal.INSTANCE.Delete(Handle);
Handle = null;
}
@Override
protected void finalize() throws Throwable
{
if (Handle != null)
dispose();
}
//File
/**
* Open a file and collect information about it (technical information and tags).
*
* @param file full name of the file to open
* @return 1 if file was opened, 0 if file was not not opened
*/
public int Open(String File_Name)
{
return MediaInfoDLL_Internal.INSTANCE.Open(Handle, new WString(File_Name));
}
/**
* Close a file opened before with Open().
*
*/
public void Close()
{
MediaInfoDLL_Internal.INSTANCE.Close(Handle);
}
//Information
/**
* Get all details about a file.
*
* @return All details about a file in one string
*/
public String Inform()
{
return MediaInfoDLL_Internal.INSTANCE.Inform(Handle).toString();
}
/**
* Get a piece of information about a file (parameter is a string).
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in Kind of Stream (first, second...)
* @param parameter Parameter you are looking for in the Stream (Codec, width, bitrate...),
* in string format ("Codec", "Width"...)
* @return a string about information you search, an empty string if there is a problem
*/
public String Get(StreamKind StreamKind, int StreamNumber, String parameter)
{
return Get(StreamKind, StreamNumber, parameter, InfoKind.Text, InfoKind.Name);
}
/**
* Get a piece of information about a file (parameter is a string).
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in Kind of Stream (first, second...)
* @param parameter Parameter you are looking for in the Stream (Codec, width, bitrate...),
* in string format ("Codec", "Width"...)
* @param infoKind Kind of information you want about the parameter (the text, the measure,
* the help...)
* @param searchKind Where to look for the parameter
*/
public String Get(StreamKind StreamKind, int StreamNumber, String parameter, InfoKind infoKind)
{
return Get(StreamKind, StreamNumber, parameter, infoKind, InfoKind.Name);
}
/**
* Get a piece of information about a file (parameter is a string).
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in Kind of Stream (first, second...)
* @param parameter Parameter you are looking for in the Stream (Codec, width, bitrate...),
* in string format ("Codec", "Width"...)
* @param infoKind Kind of information you want about the parameter (the text, the measure,
* the help...)
* @param searchKind Where to look for the parameter
* @return a string about information you search, an empty string if there is a problem
*/
public String Get(StreamKind StreamKind, int StreamNumber, String parameter, InfoKind infoKind, InfoKind searchKind)
{
return MediaInfoDLL_Internal.INSTANCE.Get(Handle, StreamKind.ordinal(), StreamNumber, new WString(parameter), infoKind.ordinal(), searchKind.ordinal()).toString();
}
/**
* Get a piece of information about a file (parameter is an integer).
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in Kind of Stream (first, second...)
* @param parameter Parameter you are looking for in the Stream (Codec, width, bitrate...),
* in integer format (first parameter, second parameter...)
* @return a string about information you search, an empty string if there is a problem
*/
public String get(StreamKind StreamKind, int StreamNumber, int parameterIndex)
{
return Get(StreamKind, StreamNumber, parameterIndex, InfoKind.Text);
}
/**
* Get a piece of information about a file (parameter is an integer).
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in Kind of Stream (first, second...)
* @param parameter Parameter you are looking for in the Stream (Codec, width, bitrate...),
* in integer format (first parameter, second parameter...)
* @param infoKind Kind of information you want about the parameter (the text, the measure,
* the help...)
* @return a string about information you search, an empty string if there is a problem
*/
public String Get(StreamKind StreamKind, int StreamNumber, int parameterIndex, InfoKind infoKind)
{
return MediaInfoDLL_Internal.INSTANCE.GetI(Handle, StreamKind.ordinal(), StreamNumber, parameterIndex, infoKind.ordinal()).toString();
}
/**
* Count of Streams of a Stream kind (StreamNumber not filled), or count of piece of
* information in this Stream.
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @return number of Streams of the given Stream kind
*/
public int Count_Get(StreamKind StreamKind)
{
return MediaInfoDLL_Internal.INSTANCE.Count_Get(Handle, StreamKind.ordinal(), -1);
}
/**
* Count of Streams of a Stream kind (StreamNumber not filled), or count of piece of
* information in this Stream.
*
* @param StreamKind Kind of Stream (general, video, audio...)
* @param StreamNumber Stream number in this kind of Stream (first, second...)
* @return number of Streams of the given Stream kind
*/
public int Count_Get(StreamKind StreamKind, int StreamNumber)
{
return MediaInfoDLL_Internal.INSTANCE.Count_Get(Handle, StreamKind.ordinal(), StreamNumber);
}
//Options
/**
* Configure or get information about MediaInfo.
*
* @param Option The name of option
* @return Depends on the option: by default "" (nothing) means No, other means Yes
*/
public String Option(String Option)
{
return MediaInfoDLL_Internal.INSTANCE.Option(Handle, new WString(Option), new WString("")).toString();
}
/**
* Configure or get information about MediaInfo.
*
* @param Option The name of option
* @param Value The value of option
* @return Depends on the option: by default "" (nothing) means No, other means Yes
*/
public String Option(String Option, String Value)
{
return MediaInfoDLL_Internal.INSTANCE.Option(Handle, new WString(Option), new WString(Value)).toString();
}
/**
* Configure or get information about MediaInfo (Static version).
*
* @param Option The name of option
* @return Depends on the option: by default "" (nothing) means No, other means Yes
*/
public static String Option_Static(String Option)
{
return MediaInfoDLL_Internal.INSTANCE.Option(MediaInfoDLL_Internal.INSTANCE.New(), new WString(Option), new WString("")).toString();
}
/**
* Configure or get information about MediaInfo(Static version).
*
* @param Option The name of option
* @param Value The value of option
* @return Depends on the option: by default "" (nothing) means No, other means Yes
*/
public static String Option_Static(String Option, String Value)
{
return MediaInfoDLL_Internal.INSTANCE.Option(MediaInfoDLL_Internal.INSTANCE.New(), new WString(Option), new WString(Value)).toString();
}
}
|
// modification, are permitted provided that the following conditions are met:
// documentation and/or other materials provided with the distribution.
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.db;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import static org.junit.jupiter.api.Assertions.assertEquals;
class DbUtilTest {
@Nested
@DisplayName("tests for close(Statement statement)")
class CloseStatement {
@Test
void close_with_null_statement() {
DbUtil.close((Statement) null);
}
@Test
void close_with_thrown_exception() throws SQLException {
Statement mock = Mockito.mock(Statement.class);
Mockito.doThrow(SQLException.class).when(mock).close();
DbUtil.close(mock);
}
@Test
void close_without_exception() throws SQLException {
Statement mock = Mockito.mock(Statement.class);
DbUtil.close(mock);
}
}
@Nested
@DisplayName("tests for close(ResultSet resultSet)")
class CloseResultSet {
@Test
void close_with_null_resultSet() {
DbUtil.close((ResultSet) null);
}
@Test
void close_with_thrown_exception() throws SQLException {
ResultSet mock = Mockito.mock(ResultSet.class);
Mockito.doThrow(SQLException.class).when(mock).close();
DbUtil.close(mock);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).close();
}
@Test
void close_without_exception() throws SQLException {
ResultSet mock = Mockito.mock(ResultSet.class);
DbUtil.close(mock);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).close();
}
}
@Nested
@DisplayName("tests for getFirstLong(ResultSet resultSet)")
class GetFirstLong {
@Test
void getFirstLong_with_empty_resultset() throws SQLException {
final long expected = -1L;
ResultSet mock = Mockito.mock(ResultSet.class);
Mockito.when(mock.next()).thenReturn(Boolean.FALSE);
final long actual = DbUtil.getFirstLong(mock);
// asserts
assertEquals(expected, actual);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).next();
Mockito.verify(mock, Mockito.times(0)).getLong(0);
}
@Test
void getFirstLong_with_filled_resultset() throws SQLException {
final long expected = 23L;
ResultSet mock = Mockito.mock(ResultSet.class);
Mockito.when(mock.next()).thenReturn(Boolean.TRUE);
Mockito.when(mock.getLong(1)).thenReturn(expected);
final long actual = DbUtil.getFirstLong(mock);
// asserts
assertEquals(expected, actual);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).next();
Mockito.verify(mock, Mockito.times(1)).getLong(1);
}
}
@Nested
@DisplayName("tests for getFirstInt(ResultSet resultSet)")
class GetFirstInt {
@Test
void getFirstLong_with_empty_resultset() throws SQLException {
final int expected = -1;
ResultSet mock = Mockito.mock(ResultSet.class);
Mockito.when(mock.next()).thenReturn(Boolean.FALSE);
final long actual = DbUtil.getFirstInt(mock);
// asserts
assertEquals(expected, actual);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).next();
Mockito.verify(mock, Mockito.times(0)).getLong(0);
}
@Test
void getFirstLong_with_filled_resultset() throws SQLException {
final int expected = 23;
ResultSet mock = Mockito.mock(ResultSet.class);
Mockito.when(mock.next()).thenReturn(Boolean.TRUE);
Mockito.when(mock.getInt(1)).thenReturn(expected);
final long actual = DbUtil.getFirstInt(mock);
// asserts
assertEquals(expected, actual);
// Mock verify
Mockito.verify(mock, Mockito.times(1)).next();
Mockito.verify(mock, Mockito.times(1)).getInt(1);
}
}
}
|
package org.xbill.DNS;
import java.io.*;
import java.util.*;
import org.xbill.DNS.utils.*;
/**
* A cache of DNS records. The cache obeys TTLs, so items are purged after
* their validity period is complete. Negative answers are cached, to
* avoid repeated failed DNS queries. The credibility of each RRset is
* maintained, so that more credible records replace less credible records,
* and lookups can specify the minimum credibility of data they are requesting.
* @see RRset
* @see Credibility
*
* @author Brian Wellington
*/
public class Cache extends NameSet {
private class Element {
Name name;
RRset rrset;
short type;
byte credibility;
long timeIn;
long ttl;
int srcid;
Thread tid;
private void
setValues(Name name, RRset rrset, short type, byte credibility,
long ttl, int srcid)
{
this.name = name;
this.rrset = rrset;
this.type = type;
this.credibility = credibility;
this.timeIn = System.currentTimeMillis();
this.ttl = ttl;
this.srcid = srcid;
this.tid = Thread.currentThread();
}
public
Element(Name name, long ttl, byte cred, int src, short type) {
setValues(name, null, type, cred, ttl, src);
}
public
Element(Record r, byte cred, int src) {
RRset set = new RRset();
set.addRR(r);
setValues(r.getName(), set, set.getType(), cred, r.getTTL(),
src);
}
public
Element(RRset r, byte cred, int src) {
setValues(r.getName(), r, r.getType(), cred, r.getTTL(), src);
}
public final void
update(Record r) {
rrset.addRR(r);
timeIn = System.currentTimeMillis();
if (ttl < 0)
ttl = (long)r.getTTL() & 0xFFFFFFFFL;
}
public void
deleteRecord(Record r) {
rrset.deleteRR(r);
}
public final boolean
expiredTTL() {
long now = System.currentTimeMillis();
long expire = timeIn + (1000 * ttl);
return (now > expire);
}
public final boolean
TTL0Ours() {
return (ttl == 0 && tid == Thread.currentThread());
}
public final boolean
TTL0NotOurs() {
return (ttl == 0 && tid != Thread.currentThread());
}
public final String
toString() {
StringBuffer sb = new StringBuffer();
if (rrset != null)
sb.append(rrset);
else if (type == 0)
sb.append("NXDOMAIN " + name);
else
sb.append("NXRRSET " + name + " " + Type.string(type));
sb.append(" cl = ");
sb.append(credibility);
return sb.toString();
}
}
private class CacheCleaner extends Thread {
public
CacheCleaner() {
setDaemon(true);
setName("CacheCleaner");
}
public void
run() {
while (true) {
long now = System.currentTimeMillis();
long next = now + cleanInterval * 60 * 1000;
while (now < next) {
try {
Thread.sleep(next - now);
}
catch (InterruptedException e) {
now = System.currentTimeMillis();
}
}
Iterator it = names();
while (it.hasNext()) {
Name name = (Name) it.next();
TypeMap tm = findName(name);
if (tm == null)
continue;
Object [] elements;
elements = tm.getAll();
if (elements == null)
continue;
for (int i = 0; i < elements.length; i++) {
Element element = (Element) elements[i];
if (element.ttl == 0 &&
element.timeIn >= now)
continue;
if (element.expiredTTL())
removeSet(name, element.type,
element);
}
}
}
}
}
private Verifier verifier;
private boolean secure;
private int maxncache = -1;
private long cleanInterval = 30;
private Thread cleaner;
private short dclass;
/**
* Creates an empty Cache
*
* @param dclass The dns class of this cache
* @see DClass
*/
public
Cache(short dclass) {
super(true);
cleaner = new CacheCleaner();
this.dclass = dclass;
}
/**
* Creates an empty Cache for class IN.
* @see DClass
*/
public
Cache() {
this(DClass.IN);
}
/** Empties the Cache. */
public void
clearCache() {
clear();
}
/**
* Creates a Cache which initially contains all records in the specified file.
*/
public
Cache(String file) throws IOException {
super(true);
cleaner = new CacheCleaner();
Master m = new Master(file);
Record record;
while ((record = m.nextRecord()) != null)
addRecord(record, Credibility.HINT, m);
}
/**
* Adds a record to the Cache.
* @param r The record to be added
* @param cred The credibility of the record
* @param o The source of the record (this could be a Message, for example)
* @see Record
*/
public void
addRecord(Record r, byte cred, Object o) {
Name name = r.getName();
short type = r.getRRsetType();
if (!Type.isRR(type))
return;
int src = (o != null) ? o.hashCode() : 0;
Element element = (Element) findExactSet(name, type);
if (element == null || cred > element.credibility)
addSet(name, type, element = new Element(r, cred, src));
else if (cred == element.credibility) {
if (element.srcid != src) {
element.rrset.clear();
element.srcid = src;
}
element.update(r);
}
}
/**
* Adds an RRset to the Cache.
* @param r The RRset to be added
* @param cred The credibility of these records
* @param o The source of this RRset (this could be a Message, for example)
* @see RRset
*/
public void
addRRset(RRset rrset, byte cred, Object o) {
Name name = rrset.getName();
short type = rrset.getType();
int src = (o != null) ? o.hashCode() : 0;
if (verifier != null)
rrset.setSecurity(verifier.verify(rrset, this));
if (secure && rrset.getSecurity() < DNSSEC.Secure)
return;
Element element = (Element) findExactSet(name, type);
if (element == null || cred > element.credibility)
addSet(name, type, new Element(rrset, cred, src));
}
/**
* Adds a negative entry to the Cache.
* @param name The name of the negative entry
* @param type The type of the negative entry
* @param ttl The ttl of the negative entry
* @param cred The credibility of the negative entry
* @param o The source of this data
*/
public void
addNegative(short rcode, Name name, short type, long ttl, byte cred, Object o) {
if (rcode == Rcode.NXDOMAIN)
type = 0;
int src = (o != null) ? o.hashCode() : 0;
Element element = (Element) findExactSet(name, type);
if (element == null || cred > element.credibility)
addSet(name, type, new Element(name, ttl, cred, src, type));
}
private void
logLookup(Name name, short type, String msg) {
System.err.println("lookupRecords(" + name + " " +
Type.string(type) + "): " + msg);
}
/**
* Looks up Records in the Cache. This follows CNAMEs and handles negatively
* cached data.
* @param name The name to look up
* @param type The type to look up
* @param minCred The minimum acceptable credibility
* @return A SetResponse object
* @see SetResponse
* @see Credibility
*/
public SetResponse
lookupRecords(Name name, short type, byte minCred) {
SetResponse cr = null;
boolean verbose = Options.check("verbosecache");
Object o = findSets(name, type);
if (verbose)
logLookup(name, type, "Starting");
if (o == null || o instanceof TypeMap) {
/*
* The name exists, but the type was not found. Or, the
* name does not exist and no parent does either. Punt.
*/
if (verbose)
logLookup(name, type, "no information found");
return SetResponse.ofType(SetResponse.UNKNOWN);
}
Object [] objects;
if (o instanceof Element)
objects = new Object[] {o};
else
objects = (Object[]) o;
int nelements = 0;
for (int i = 0; i < objects.length; i++) {
Element element = (Element) objects[i];
if (element.TTL0Ours()) {
removeSet(name, type, element);
nelements++;
}
else if (element.TTL0NotOurs()) {
if (verbose) {
logLookup(name, type, element.toString());
logLookup(name, type, "0 TTL: ignoring");
}
objects[i] = null;
}
else if (element.expiredTTL()) {
if (verbose) {
logLookup(name, type, element.toString());
logLookup(name, type, "expired: ignoring");
}
removeSet(name, type, element);
objects[i] = null;
}
else if (element.credibility < minCred) {
if (verbose) {
logLookup(name, type, element.toString());
logLookup(name, type, "not credible: ignoring");
}
objects[i] = null;
}
else {
nelements++;
}
}
if (nelements == 0) {
/* We have data, but can't use it. Punt. */
if (verbose)
logLookup(name, type, "no useful data found");
return SetResponse.ofType(SetResponse.UNKNOWN);
}
/*
* We have something at the name. It could be the answer,
* a CNAME, DNAME, or NS, or a negative cache entry.
*
* Ignore wildcards, since it's pretty unlikely that any will be
* cached. The occasional extra query is easily balanced by the
* reduced number of lookups.
*/
for (int i = 0; i < objects.length; i++) {
if (objects[i] == null)
continue;
Element element = (Element) objects[i];
if (verbose)
logLookup(name, type, element.toString());
RRset rrset = element.rrset;
/* Is this a negatively cached entry? */
if (rrset == null) {
/*
* If this is an NXDOMAIN entry, return NXDOMAIN.
*/
if (element.type == 0) {
if (verbose)
logLookup(name, type, "NXDOMAIN");
return SetResponse.ofType(SetResponse.NXDOMAIN);
}
/*
* If we're not looking for type ANY, return NXRRSET.
* Otherwise ignore this.
*/
if (type != Type.ANY) {
if (verbose)
logLookup(name, type, "NXRRSET");
return SetResponse.ofType(SetResponse.NXRRSET);
} else {
if (verbose)
logLookup(name, type,
"ANY query; " +
"ignoring NXRRSET");
continue;
}
}
short rtype = rrset.getType();
Name rname = rrset.getName();
if (name.equals(rname)) {
if (type != Type.CNAME && type != Type.ANY &&
rtype == Type.CNAME)
{
if (verbose)
logLookup(name, type, "cname");
return new SetResponse(SetResponse.CNAME,
rrset);
} else if (type != Type.NS && type != Type.ANY &&
rtype == Type.NS)
{
if (verbose)
logLookup(name, type,
"exact delegation");
return new SetResponse(SetResponse.DELEGATION,
rrset);
} else {
if (verbose)
logLookup(name, type, "exact match");
if (cr == null)
cr = new SetResponse
(SetResponse.SUCCESSFUL);
cr.addRRset(rrset);
}
}
else if (name.subdomain(rname)) {
if (rtype == Type.DNAME) {
if (verbose)
logLookup(name, type, "dname");
return new SetResponse(SetResponse.DNAME,
rrset);
} else if (rtype == Type.NS) {
if (verbose)
logLookup(name, type,
"parent delegation");
return new SetResponse(SetResponse.DELEGATION,
rrset);
} else {
if (verbose)
logLookup(name, type,
"ignoring rrset (" +
rname + " " +
Type.string(rtype) + ")");
}
} else {
if (verbose)
logLookup(name, type,
"ignoring rrset (" + rname + " " +
Type.string(rtype) + ")");
}
}
/*
* As far as I can tell, the only legitimate time cr will be null is
* if we queried for ANY and only saw negative responses, but not an
* NXDOMAIN. Return UNKNOWN.
*/
if (cr == null && type == Type.ANY)
return SetResponse.ofType(SetResponse.UNKNOWN);
else if (cr == null)
throw new IllegalStateException("looking up (" + name + " " +
Type.string(type) + "): " +
"cr == null.");
return cr;
}
private RRset []
findRecords(Name name, short type, byte minCred) {
SetResponse cr = lookupRecords(name, type, minCred);
if (cr.isSuccessful())
return cr.answers();
else
return null;
}
/**
* Looks up credible Records in the Cache (a wrapper around lookupRecords).
* Unlike lookupRecords, this given no indication of why failure occurred.
* @param name The name to look up
* @param type The type to look up
* @return An array of RRsets, or null
* @see Credibility
*/
public RRset []
findRecords(Name name, short type) {
return findRecords(name, type, Credibility.NONAUTH_ANSWER);
}
/**
* Looks up Records in the Cache (a wrapper around lookupRecords). Unlike
* lookupRecords, this given no indication of why failure occurred.
* @param name The name to look up
* @param type The type to look up
* @return An array of RRsets, or null
* @see Credibility
*/
public RRset []
findAnyRecords(Name name, short type) {
return findRecords(name, type, Credibility.NONAUTH_ADDITIONAL);
}
private void
verifyRecords(Cache tcache) {
Iterator it;
it = tcache.names();
while (it.hasNext()) {
Name name = (Name) it.next();
TypeMap tm = tcache.findName(name);
if (tm == null)
continue;
Object [] elements;
elements = tm.getAll();
if (elements == null)
continue;
for (int i = 0; i < elements.length; i++) {
Element element = (Element) elements[i];
RRset rrset = element.rrset;
/* for now, ignore negative cache entries */
if (rrset == null)
continue;
if (verifier != null)
rrset.setSecurity(verifier.verify(rrset, this));
if (rrset.getSecurity() < DNSSEC.Secure)
continue;
addSet(name, rrset.getType(), element);
}
}
}
private final byte
getCred(Name recordName, Name queryName, short section, boolean isAuth) {
byte cred;
if (section == Section.ANSWER) {
if (isAuth && recordName.equals(queryName))
cred = Credibility.AUTH_ANSWER;
else if (isAuth)
cred = Credibility.AUTH_NONAUTH_ANSWER;
else
cred = Credibility.NONAUTH_ANSWER;
} else if (section == Section.AUTHORITY) {
if (isAuth)
cred = Credibility.AUTH_AUTHORITY;
else
cred = Credibility.NONAUTH_AUTHORITY;
} else if (section == Section.ADDITIONAL) {
if (isAuth)
cred = Credibility.AUTH_ADDITIONAL;
else
cred = Credibility.NONAUTH_ADDITIONAL;
} else
throw new IllegalArgumentException("getCred: invalid section");
return cred;
}
/**
* Adds all data from a Message into the Cache. Each record is added with
* the appropriate credibility, and negative answers are cached as such.
* @param in The Message to be added
* @see Message
*/
public void
addMessage(Message in) {
boolean isAuth = in.getHeader().getFlag(Flags.AA);
Name queryName = in.getQuestion().getName();
Name lookupName = queryName;
short queryType = in.getQuestion().getType();
short queryClass = in.getQuestion().getDClass();
byte cred;
short rcode = in.getHeader().getRcode();
boolean haveAnswer = false;
Record [] answers, auth, addl;
if (secure) {
Cache c = new Cache(dclass);
c.addMessage(in);
verifyRecords(c);
return;
}
if (rcode != Rcode.NOERROR && rcode != Rcode.NXDOMAIN)
return;
answers = in.getSectionArray(Section.ANSWER);
while (!haveAnswer || queryType == Type.ANY) {
boolean restart = false;
for (int i = 0; i < answers.length; i++) {
short type = answers[i].getType();
short rrtype = answers[i].getRRsetType();
Name name = answers[i].getName();
cred = getCred(name, queryName, Section.ANSWER, isAuth);
if (type == Type.CNAME && name.equals(lookupName)) {
addRecord(answers[i], cred, in);
CNAMERecord cname = (CNAMERecord) answers[i];
lookupName = cname.getTarget();
restart = true;
} else if (rrtype == Type.CNAME &&
name.equals(lookupName))
{
addRecord(answers[i], cred, in);
} else if (type == Type.DNAME &&
lookupName.subdomain(name))
{
addRecord(answers[i], cred, in);
DNAMERecord dname = (DNAMERecord) answers[i];
try {
lookupName =
lookupName.fromDNAME(dname);
}
catch (NameTooLongException e) {
break;
}
restart = true;
} else if (rrtype == Type.DNAME &&
lookupName.subdomain(name))
{
addRecord(answers[i], cred, in);
} else if ((rrtype == queryType ||
queryType == Type.ANY) &&
name.equals(lookupName))
{
addRecord(answers[i], cred, in);
haveAnswer = true;
}
}
if (!restart)
break;
}
auth = in.getSectionArray(Section.AUTHORITY);
if (!haveAnswer) {
/* This is a negative response */
SOARecord soa = null;
for (int i = 0; i < auth.length; i++) {
if (auth[i].getType() == Type.SOA &&
lookupName.subdomain(auth[i].getName()))
{
soa = (SOARecord) auth[i];
break;
}
}
if (soa != null) {
/* This is a negative response. */
long soattl = (long)soa.getTTL() & 0xFFFFFFFFL;
long soamin = (long)soa.getMinimum() & 0xFFFFFFFFL;
long ttl = Math.min(soattl, soamin);
if (maxncache >= 0)
ttl = Math.min(ttl, maxncache);
cred = getCred(soa.getName(), queryName,
Section.AUTHORITY, isAuth);
if (rcode == Rcode.NXDOMAIN)
addNegative(rcode, lookupName, (short)0,
ttl, cred, in);
else
addNegative(rcode, lookupName, queryType,
ttl, cred, in);
}
}
for (int i = 0; i < auth.length; i++) {
short type = auth[i].getRRsetType();
Name name = auth[i].getName();
if ((type == Type.NS || type == Type.SOA) &&
lookupName.subdomain(name))
{
cred = getCred(name, queryName, Section.AUTHORITY,
isAuth);
addRecord(auth[i], cred, in);
}
/* NXT records are not cached yet. */
}
addl = in.getSectionArray(Section.ADDITIONAL);
for (int i = 0; i < addl.length; i++) {
short type = addl[i].getRRsetType();
if (type != Type.A && type != Type.AAAA && type != Type.A6)
continue;
/* XXX check the name */
Name name = addl[i].getName();
cred = getCred(name, queryName, Section.ADDITIONAL, isAuth);
addRecord(addl[i], cred, in);
}
}
/**
* Flushes an RRset from the cache
* @param name The name of the records to be flushed
* @param type The type of the records to be flushed
* @see RRset
*/
void
flushSet(Name name, short type) {
Element element = (Element) findExactSet(name, type);
if (element == null || element.rrset == null)
return;
removeSet(name, type, element);
}
/**
* Flushes all RRsets with a given name from the cache
* @param name The name of the records to be flushed
* @see RRset
*/
void
flushName(Name name) {
removeName(name);
}
/**
* Defines a module to be used for data verification (DNSSEC). An
* implementation is found in org.xbill.DNSSEC.security.DNSSECVerifier,
* which requires Java 2 or above and the Java Cryptography Extensions.
*/
public void
setVerifier(Verifier v) {
verifier = v;
}
/**
* Mandates that all data stored in this Cache must be verified and proven
* to be secure, using a verifier (as defined in setVerifier).
*/
public void
setSecurePolicy() {
secure = true;
}
/**
* Sets the maximum length of time that a negative response will be stored
* in this Cache. A negative value disables this feature (that is, sets
* no limit).
*/
public void
setMaxNCache(int seconds) {
maxncache = seconds;
}
/**
* Sets the interval (in minutes) that all expired records will be expunged
* the cache. The default is 30 minutes. 0 or a negative value disables this
* feature.
*/
public void
setCleanInterval(int minutes) {
cleanInterval = minutes;
if (cleanInterval <= 0)
cleaner = null;
else if (cleaner == null)
cleaner = new CacheCleaner();
}
}
|
package io.jooby;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigParseOptions;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.management.ManagementFactory;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Application environment contains configuration object and active environment names.
*
* The active environment names serve the purpose of allowing loading different configuration files
* depending on the environment. Also, {@link Extension} modules might configure application
* services differently depending on the environment too. For example: turn on/off caches,
* reload files, etc.
*
* The <code>application.env</code> property controls the active environment names.
*
* @since 2.0.0
* @author edgar
*/
public class Environment {
private final List<String> actives;
private final Config conf;
private final ClassLoader classLoader;
/**
* Creates a new environment.
*
* @param classLoader Class loader.
* @param conf Application configuration.
* @param actives Active environment names.
*/
public Environment(@Nonnull ClassLoader classLoader, @Nonnull Config conf,
@Nonnull String... actives) {
this(classLoader, conf, Arrays.asList(actives));
}
/**
* Creates a new environment.
*
* @param classLoader Class loader.
* @param conf Application configuration.
* @param actives Active environment names.
*/
public Environment(@Nonnull ClassLoader classLoader, @Nonnull Config conf,
@Nonnull List<String> actives) {
this.classLoader = classLoader;
this.actives = actives.stream()
.map(String::trim)
.map(String::toLowerCase)
.collect(Collectors.toList());
this.conf = conf;
}
/**
* Get a property under the given key or use the given default value when missing.
*
* @param key Property key.
* @param defaults Default value.
* @return Property or default value.
*/
public @Nonnull String getProperty(@Nonnull String key, @Nonnull String defaults) {
if (hasPath(conf, key)) {
return conf.getString(key);
}
return defaults;
}
/**
* Get a property under the given key or <code>null</code> when missing.
*
* @param key Property key.
* @return Property value or <code>null</code> when missing.
*/
public @Nullable String getProperty(@Nonnull String key) {
if (hasPath(conf, key)) {
return conf.getString(key);
}
return null;
}
/**
* List all the properties under the given key. Example:
*
* <pre>
* user.name = "name"
* user.password = "pass"
* </pre>
*
* A call to <code>getProperties("user")</code> give you a map like:
* <code>{user.name: name, user.password: pass}</code>
*
* @param key Key.
* @return Properties under that key or empty map.
*/
public @Nonnull Map<String, String> getProperties(@Nonnull String key) {
return getProperties(key, key);
}
/**
* List all the properties under the given key. Example:
*
* <pre>
* user.name = "name"
* user.password = "pass"
* </pre>
*
* A call to <code>getProperties("user", "u")</code> give you a map like:
* <code>{u.name: name, u.password: pass}</code>
*
* @param key Key.
* @param prefix Prefix to use or <code>null</code> for none.
* @return Properties under that key or empty map.
*/
public @Nonnull Map<String, String> getProperties(@Nonnull String key, @Nullable String prefix) {
if (hasPath(conf, key)) {
Map<String, String> settings = new HashMap<>();
String p = prefix == null || prefix.length() == 0 ? "" : prefix + ".";
conf.getConfig(key).entrySet().stream()
.forEach(e -> {
Object value = e.getValue().unwrapped();
if (value instanceof List) {
value = ((List) value).stream().collect(Collectors.joining(", "));
}
String k = p + e.getKey();
settings.put(k, value.toString());
});
return settings;
}
return Collections.emptyMap();
}
/**
* Application configuration.
*
* @return Application configuration.
*/
public @Nonnull Config getConfig() {
return conf;
}
/**
* Active environment names.
*
* @return Active environment names.
*/
public @Nonnull List<String> getActiveNames() {
return Collections.unmodifiableList(actives);
}
/**
* Test is the given environment names are active.
*
* @param name Environment name.
* @param names Optional environment names.
* @return True if any of the given names is active.
*/
public boolean isActive(@Nonnull String name, String... names) {
return this.actives.contains(name.toLowerCase())
|| Stream.of(names).map(String::toLowerCase).anyMatch(this.actives::contains);
}
/**
* Application class loader.
*
* @return Application class loader.
*/
public @Nonnull ClassLoader getClassLoader() {
return classLoader;
}
/**
* Loaded class or empty.
*
* @param className Class name.
* @return Load a class or get an empty value.
*/
public @Nonnull Optional<Class> loadClass(@Nonnull String className) {
try {
return Optional.of(classLoader.loadClass(className));
} catch (ClassNotFoundException x) {
return Optional.empty();
}
}
@Override public String toString() {
return actives + "\n" + toString(conf).trim();
}
private String toString(final Config conf) {
return configTree(conf.origin().description());
}
private String configTree(final String description) {
return configTree(description.split(":\\s+\\d+,|,"), 0);
}
private String configTree(final String[] sources, final int i) {
char[] pad = new char[i];
Arrays.fill(pad, ' ');
if (i < sources.length) {
return new StringBuilder()
.append(pad)
.append(" ")
.append(sources[i].replace("merge of", "").trim())
.append("\n")
.append(configTree(sources, i + 1))
.toString();
}
return "";
}
private static boolean hasPath(Config config, String key) {
try {
return config.hasPath(key);
} catch (ConfigException x) {
return false;
}
}
/**
* Creates a {@link Config} object from {@link System#getProperties()}.
*
* @return Configuration object.
*/
public static @Nonnull Config systemProperties() {
return ConfigFactory.parseProperties(System.getProperties(),
ConfigParseOptions.defaults().setOriginDescription("system properties"));
}
/**
* Creates a {@link Config} object from {@link System#getenv()}.
*
* @return Configuration object.
*/
public static @Nonnull Config systemEnv() {
return ConfigFactory.systemEnvironment();
}
/**
* This method search for an application.conf file in three location
* (first-listed are higher priority):
*
* <ul>
* <li>${user.dir}/conf: This is a file system location, useful is you want to externalize
* configuration (outside of jar file).</li>
* <li>${user.dir}: This is a file system location, useful is you want to externalize
* configuration (outside of jar file)</li>
* <li>classpath:// (root of classpath). No external configuration, configuration file lives
* inside the jar file</li>
* </ul>
*
* Property overrides is done in the following order (first-listed are higher priority):
*
* <ul>
* <li>Program arguments</li>
* <li>System properties</li>
* <li>Environment variables</li>
* <li>Environment property file</li>
* <li>Property file</li>
* </ul>
*
* @param options Options like basedir, filename, etc.
* @return A new environment.
*/
public static @Nonnull Environment loadEnvironment(@Nonnull EnvironmentOptions options) {
Config sys = systemProperties()
.withFallback(systemEnv());
List<String> actives = options.getActiveNames();
String filename = options.getFilename();
String extension;
int ext = filename.lastIndexOf('.');
if (ext <= 0) {
extension = ".conf";
} else {
extension = filename.substring(ext);
filename = filename.substring(0, ext);
}
Path userdir = Paths.get(System.getProperty("user.dir"));
/** Application file: */
String[] names = new String[actives.size() + 1];
for (int i = 0; i < actives.size(); i++) {
names[i] = filename + "." + actives.get(i).trim().toLowerCase() + extension;
}
names[actives.size()] = filename + extension;
Config application = resolveConfig(options, userdir, names);
// check if there is a local env set
if (application.hasPath("application.env")) {
String env = application.getString("application.env");
// Override environment only if the active environment is set to `dev`
if (!actives.contains(env) && (actives.contains("dev") && actives.size() == 1)) {
Config envConfig = resolveConfig(options, userdir,
filename + "." + env.toLowerCase() + extension);
if (envConfig != null) {
application = envConfig.withFallback(application);
actives = Collections.singletonList(env.toLowerCase());
}
}
}
Config result = sys
.withFallback(application)
.withFallback(defaults())
.resolve();
return new Environment(options.getClassLoader(), result, actives);
}
private static Config resolveConfig(@Nonnull EnvironmentOptions options, Path userdir,
String... names) {
Config application = ConfigFactory.empty();
String basedir = options.getBasedir();
Path[] rootdirs;
String[] cpdirs;
if (basedir == null) {
rootdirs = new Path[]{userdir.resolve("conf"), userdir};
cpdirs = new String[]{"conf", ""};
} else {
rootdirs = new Path[]{Paths.get(basedir)};
cpdirs = new String[]{basedir};
}
for (String name : names) {
Config it = fileConfig(userdir, rootdirs, name);
if (it == null) {
// classpath
it = classpathConfig(options.getClassLoader(), cpdirs, name);
}
if (it != null) {
application = application.withFallback(it);
}
}
return application;
}
/**
* Creates a default configuration properties with some common values like: application.tmpdir,
* application.charset and pid (process ID).
*
* @return A configuration object.
*/
public static @Nonnull Config defaults() {
Path tmpdir = Paths.get(System.getProperty("user.dir"), "tmp");
Map<String, String> defaultMap = new HashMap<>();
defaultMap.put("application.tmpdir", tmpdir.toString());
defaultMap.put("application.charset", "UTF-8");
String pid = pid();
if (pid != null) {
System.setProperty("PID", pid);
defaultMap.put("pid", pid);
}
return ConfigFactory.parseMap(defaultMap, "defaults");
}
/**
* Find JVM process ID.
* @return JVM process ID or <code>null</code>.
*/
public static @Nullable String pid() {
String pid = System.getenv().getOrDefault("PID", System.getProperty("PID"));
if (pid == null) {
pid = ManagementFactory.getRuntimeMXBean().getName();
int i = pid.indexOf("@");
if (i > 0) {
pid = pid.substring(0, i);
}
}
return pid;
}
private static Config fileConfig(Path userdir, Path[] basedirs, String name) {
for (Path basedir : basedirs) {
Path file = basedir.resolve(name);
if (Files.exists(file)) {
String origin = file.startsWith(userdir)
? userdir.relativize(file).toString()
: file.toString();
return ConfigFactory.parseFile(file.toFile(),
ConfigParseOptions.defaults().setOriginDescription(origin));
}
}
return null;
}
private static Config classpathConfig(ClassLoader classLoader, String[] basedirs, String name) {
for (String basedir : basedirs) {
String file = basedir.isEmpty()
? name
: Stream.concat(Stream.of(basedir.split("/")), Stream.of(name))
.collect(Collectors.joining("/"));
Config config = ConfigFactory.parseResources(classLoader, file,
ConfigParseOptions.defaults().setOriginDescription("classpath://" + file));
if (!config.isEmpty()) {
return config;
}
}
return null;
}
}
|
package team.black.fruitswirl;
import org.flixel.FlxSprite;
public class Fruit extends FlxSprite {
public static final int STATE_NONE = 0x00000000;
public static final int STATE_NORMAL = 0x00000001;
public static final int STATE_FIRE = 0x00000010;
public static final int STATE_ELEC = 0x00000100;
public static final int STATE_LOCKING = 0x00001000;
public static final int STATE_LOCKED = 0x00010000;
public static final int STATE_DESTROY = 0x00100000;
public static final int STATE_FALLING = 0x01000000;
public static final int STATE_FLIPPING = 0x10000000;
public static final int FLIP_NONE = 0x0000;
public static final int FLIP_LEFT = 0x0001;
public static final int FLIP_RIGHT = 0x0010;
public static final int FLIP_UP = 0x0100;
public static final int FLIP_DOWN = 0x1000;
private int currentState;
private int flipDirection;
public Fruit(float _ix, float _iy){
super(_ix, _iy);
setCurrentState(STATE_NONE);
setFlipDirection(FLIP_NONE);
}
public Fruit(float _ix, float _iy, int _initState){
super(_ix, _iy);
setCurrentState(_initState);
setFlipDirection(FLIP_NONE);
}
@Override
public void update(){
super.update();
}
public int getCurrentState() {
return currentState;
}
public void setCurrentState(int curState) {
this.currentState = curState;
}
public int getFlipDirection() {
return flipDirection;
}
public void setFlipDirection(int flipDirection) {
this.flipDirection = flipDirection;
}
}
|
package org.workcraft;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.RhinoException;
import org.mozilla.javascript.WrappedException;
import org.workcraft.exceptions.OperationCancelledException;
import org.workcraft.gui.MainWindow;
import org.workcraft.utils.DesktopApi;
import org.workcraft.utils.LogUtils;
import org.workcraft.utils.ResourceUtils;
import org.workcraft.workspace.FileFilters;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.LinkedList;
public class Console {
static {
// Enable font anti-aliasing
System.setProperty("awt.useSystemAAFontSettings","on");
System.setProperty("swing.aatext", "true");
//Allows menu bar of OS X to be used instead of being in the Workcraft main window.
if (DesktopApi.getOs().isMac()) {
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.eawt.quitStrategy", "CLOSE_ALL_WINDOWS");
}
}
public static void main(String[] args) {
final Framework framework = Framework.getInstance();
LinkedList<String> arglist = new LinkedList<>(Arrays.asList(args));
// Process -version and -help options
for (String arg : args) {
if (arg.equals(Info.OPTION_VERSION)) {
System.out.println(Info.getVersion());
return;
}
if (arg.equals(Info.OPTION_HELP)) {
System.out.println(Info.getHelp());
return;
}
}
// Process -nogui, -noconfig and -dir: options
boolean startGui = true;
boolean useConfig = true;
for (String arg : args) {
if (arg.equals(Info.OPTION_NOGUI)) {
startGui = false;
arglist.remove(arg);
}
if (arg.equals(Info.OPTION_NOCONFIG)) {
useConfig = false;
arglist.remove(arg);
}
if (arg.startsWith(Info.OPTION_DIR)) {
String path = arg.substring(Info.OPTION_DIR.length());
framework.setWorkingDirectory(path);
arglist.remove(arg);
}
}
System.out.println(Info.getFullTitle());
System.out.println(Info.getCopyright());
System.out.println();
// NOTE: JavaScript and Plugins needs to be initialised before GUI (because of assigning PropertyProviders)
// and before config (because of plugin-specific settings).
framework.init();
// NOTE: Scripts should run after JavaScript, plugins, config (and possibly before GUI).
try {
for (String scriptName : ResourceUtils.getResources("scripts/")) {
LogUtils.logMessage(" Executing script: " + scriptName);
framework.execJavaScriptResource(scriptName);
}
} catch (IOException | URISyntaxException e) {
LogUtils.logError("Cannot read script files: " + e.getMessage());
}
// NOTE: Config needs to be loaded before GUI.
if (useConfig) {
framework.loadConfig();
}
if (startGui) {
framework.startGUI();
}
if (framework.isInGuiMode()) {
for (String arg: arglist) {
if (arg.endsWith(FileFilters.DOCUMENT_EXTENSION)) {
MainWindow mainWindow = framework.getMainWindow();
File file = framework.getFileByAbsoluteOrRelativePath(arg);
mainWindow.openWork(file);
}
}
}
// Process -exec: option
for (String arg: args) {
if (arg.startsWith(Info.OPTION_EXEC)) {
arglist.remove(arg);
framework.setArgs(arglist);
try {
String execParameter = arg.substring(Info.OPTION_EXEC.length());
File execFile = framework.getFileByAbsoluteOrRelativePath(execParameter);
if ((execFile != null) && execFile.exists() && execFile.isFile() && execFile.canRead()) {
LogUtils.logMessage("Executing script file " + execParameter + "...");
framework.execJavaScriptFile(execFile);
} else {
LogUtils.logMessage("Executing raw script:\n" + execParameter);
framework.execJavaScript(execParameter);
}
} catch (WrappedException e) {
e.getWrappedException().printStackTrace();
System.exit(1);
} catch (IOException | RhinoException e) {
LogUtils.logError(e.getMessage());
System.exit(1);
}
}
}
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
while (true) {
if (framework.shutdownRequested()) {
try {
framework.shutdownGUI();
if (useConfig) {
framework.saveConfig();
}
} catch (OperationCancelledException e) {
framework.abortShutdown();
}
if (!framework.shutdownRequested()) {
continue;
}
LogUtils.logMessage("Shutting down...");
System.exit(0);
}
if (framework.isInGuiMode()) {
try {
Thread.sleep(100);
} catch (InterruptedException e1) {
}
} else {
System.out.print("js>");
try {
String line = in.readLine();
Object result = framework.execJavaScript(line);
Context.enter();
String out = Context.toString(result);
Context.exit();
if (!out.equals("undefined")) {
System.out.println(out);
}
} catch (WrappedException e) {
System.err.println(e.getWrappedException().getMessage());
} catch (IOException | RhinoException e) {
System.err.println(e.getMessage());
}
}
}
}
}
|
package uk.co.placona.helloWorld;
public class Cronometro {
//Cada cronometro debe almacenar 2 variables internas. Una para segundos y otra para minutos.
//Cuando se superan los 60 segundos, se debe incrementar en 1 la cantidad de minutos transcurridos
private int segundos , minutos ;
// Reiniciar (que pone el contador en cero)
public void reiniciar(){
segundos = minutos = 0;
}
// IncrementarTiempo (que avanza el cronometro en 1 segundo)
public void incrementarTiempo(){
if (segundos == 59 )
{
segundos = 0;
minutos += 1;
}
else segundos += 1;
}
public String mostrarTiempo(){
return minutos+ " minutos " + segundos+" segundos";
}
public int getSegundos(){
return segundos;
}
public int getMinutos(){
return minutos;
}
}
|
package uk.co.placona.helloWorld;
public class HelloWorld {
public String sayHello() {
return "Hello World";
}
}
|
package uk.org.ownage.dmdirc.actions;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import uk.org.ownage.dmdirc.FrameContainer;
import uk.org.ownage.dmdirc.commandparser.CommandWindow;
import uk.org.ownage.dmdirc.logger.ErrorLevel;
import uk.org.ownage.dmdirc.logger.Logger;
/**
* Describes a single action.
* @author chris
*/
public class Action {
/** The group this action belongs to. */
private String group;
/** The name of this action. */
private String name;
/** The file containing this action. */
private final File file;
/** The properties read for this action. */
private Properties properties;
/** The ActionType that triggers this action. */
private ActionType trigger;
/** The commands to execute if this action is triggered. */
private String[] response;
/** The conditions for this action. */
private final List<ActionCondition> conditions = new ArrayList<ActionCondition>();
/**
* Creates a new instance of Action. The group and name specified must
* be the group and name of a valid action already saved to disk.
* @param group The group the action belongs to
* @param name The name of the action
*/
public Action(final String group, final String name) {
this.group = group;
this.name = name;
final String fs = System.getProperty("file.separator");
final String location = ActionManager.getDirectory() + group + fs + name;
file = new File(location);
try {
final FileInputStream inputStream = new FileInputStream(file);
properties = new Properties();
properties.load(inputStream);
loadAction();
} catch (IOException ex) {
Logger.error(ErrorLevel.ERROR, "Unable to load action: " + group + "/" + name, ex);
}
}
/**
* Loads the various attributes of this action from the properties instance.
*/
private void loadAction() {
boolean valid = true;
// Read the trigger
if (properties.containsKey("trigger")) {
trigger = ActionManager.getActionType(properties.getProperty("trigger"));
if (trigger == null) {
error("Invalid trigger specified");
valid = false;
}
} else {
valid = false;
}
// Read the response
if (properties.containsKey("response")) {
response = properties.getProperty("response").split("\n");
} else {
error("No response specified");
properties.list(System.out);
valid = false;
}
// Read the conditions
int numConditions = 0;
if (properties.containsKey("conditions")) {
try {
numConditions = Integer.parseInt(properties.getProperty("conditions"));
} catch (NumberFormatException ex) {
error("Invalid number of conditions specified");
valid = false;
}
}
for (int i = 0; i < numConditions; i++) {
valid = valid & readCondition(i);
}
if (valid) {
ActionManager.registerAction(this);
}
}
/**
* Reads the specified condition.
* @param condition Condition number to read
* @return True if the condition was read successfully.
*/
private boolean readCondition(final int condition) {
// It may help to close your eyes while reading this method.
int arg = -1;
ActionComponent component = null;
ActionComparison comparison = null;
String target = "";
if (properties.containsKey("condition" + condition + "-arg")) {
try {
arg = Integer.parseInt(properties.getProperty("condition" + condition + "-arg"));
} catch (NumberFormatException ex) {
error("Invalid argument number for condition " + condition);
return false;
}
}
if (arg < 0 || arg >= trigger.getType().getArity()) {
error("Invalid argument number for condition " + condition);
return false;
}
if (properties.containsKey("condition" + condition + "-component")) {
try {
component = ActionComponent.valueOf(properties.getProperty("condition" + condition + "-component"));
} catch (IllegalArgumentException ex) {
error("Invalid component for condition " + condition);
return false;
}
if (!component.appliesTo().equals(trigger.getType().getArgTypes()[arg])) {
error("Component cannot be applied to specified arg in condition " + condition);
return false;
}
} else {
error("No component specified for condition " + condition);
return false;
}
if (properties.containsKey("condition" + condition + "-comparison")) {
try {
comparison = ActionComparison.valueOf(properties.getProperty("condition" + condition + "-comparison"));
} catch (IllegalArgumentException ex) {
error("Invalid comparison for condition " + condition);
return false;
}
if (!comparison.appliesTo().equals(component.getType())) {
error("Comparison cannot be applied to specified component in condition " + condition);
return false;
}
} else {
error("No comparison specified for condition " + condition);
return false;
}
if (properties.containsKey("condition" + condition + "-target")) {
target = properties.getProperty("condition" + condition + "-target");
} else {
error("No target specified for condition " + condition);
return false;
}
conditions.add(new ActionCondition(arg, component, comparison, target));
return true;
}
/**
* Raises a trivial error, informing the user of the problem.
* @param message The message to be raised
*/
private void error(final String message) {
Logger.error(ErrorLevel.TRIVIAL, "Unable to parse action " + group + "/" + name + ": " + message);
}
/**
* Retrieves this action's trigger.
* @return The action type that triggers this action
*/
public ActionType getTrigger() {
return trigger;
}
/**
* Retrieves this action's group name.
* @return This action's group name
*/
public String getGroup() {
return group;
}
/**
* Triggers this action.
* @param arguments The arguments from the action that caused this trigger.
*/
public void trigger(final Object ... arguments) {
for (ActionCondition condition : conditions) {
if (!condition.test(arguments)) {
return;
}
}
for (String command : response) {
final CommandWindow cw = ((FrameContainer) arguments[0]).getFrame();
cw.getCommandParser().parseCommand(cw, ActionManager.substituteVars(command, arguments));
}
}
}
|
package jalse.actions;
import static jalse.actions.Actions.requireNotStopped;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A manual-tick implementation of {@link ActionEngine}. ManualActionEngine uses no additional
* threads and will not run any actions until {@link #resume()} is called. When the engine is
* ticking all jobs that should be executed will be (even if past their estimated schedule time).
*
* @author Elliot Ford
*
*/
public class ManualActionEngine implements ActionEngine {
/**
* Manual action context.
*
* @author Elliot Ford
*
* @param <T>
* Actor type.
*/
public class ManualContext<T> extends AbstractManualActionContext<T> {
/**
* Creates a new ManualActionContext instance.
*
* @param action
* Action this context is for.
*/
protected ManualContext(final Action<T> action) {
super(ManualActionEngine.this, action, bindings);
}
@Override
protected void addAsWork() {
addWork(this);
}
@Override
protected void removeAsWork() {
removeWork(this);
}
}
private static final Logger logger = Logger.getLogger(ManualActionEngine.class.getName());
private final ManualWorkQueue<ManualContext<?>> workQueue;
private final MutableActionBindings bindings;
private final AtomicBoolean ticking;
private final AtomicBoolean stopped;
/**
* Creates a new instance of ManualActionEngine.
*/
public ManualActionEngine() {
workQueue = new ManualWorkQueue<>();
bindings = new DefaultActionBindings();
ticking = new AtomicBoolean();
stopped = new AtomicBoolean();
}
/**
* Adds work to the engine.
*
* @param context
* Work to add.
*
* @return Whether the work was not already in the queue.
*
* @see Actions#requireNotStopped(ActionEngine)
*/
protected boolean addWork(final ManualContext<?> context) {
requireNotStopped(this);
return workQueue.addWaitingWork(context);
}
@Override
public MutableActionBindings getBindings() {
return bindings;
}
/**
* Gets the engine's work queue.
*
* @return Manual work queue.
*/
protected ManualWorkQueue<ManualContext<?>> getWorkQueue() {
return workQueue;
}
@Override
public boolean isPaused() {
return !ticking.get();
}
@Override
public boolean isStopped() {
return stopped.get();
}
@Override
public <T> ManualContext<T> newContext(final Action<T> action) {
return new ManualContext<>(action);
}
@Override
public void pause() {}
/**
* Removes work from the engine.
*
* @param context
* Work to remove.
* @return Whether the work was added before.
*/
protected boolean removeWork(final ManualContext<?> context) {
return workQueue.removeWaitingWork(context);
}
@Override
public void resume() {
requireNotStopped(this);
if (!ticking.getAndSet(true)) {
return;
}
final List<ManualContext<?>> batch = new ArrayList<>();
// Create batch of work
for (;;) {
final ManualContext<?> work = workQueue.pollReadyWork();
if (work == null) { // No more ready work
break;
}
batch.add(work);
}
// Perform batch
try {
batch.forEach(work -> {
try {
work.performAction();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
} catch (final Exception e) {
logger.log(Level.WARNING, "Error performing action", e);
}
});
} finally {
ticking.set(false);
}
}
@Override
public void stop() {
requireNotStopped(this);
ticking.set(false);
workQueue.getWaitingWork().forEach(AbstractManualActionContext::cancel);
stopped.set(true);
}
}
|
import java.io.IOException;
import java.util.Random;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.proxy.ProxyServlet.Transparent;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
public class ProxyMain extends Transparent {
private static final long serialVersionUID = 1L;
private boolean dropRequests = false;
private boolean delayRequests = false;
private Random rd = new Random();
@Override
public void init(ServletConfig config) throws ServletException {
System.out.println("proxyTo " + config.getInitParameter("proxyTo"));
super.init(config);
System.out.println("Proxy init done");
}
@Override
public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException {
// System.out.println(">>> got a request !");
// Drop test
if (dropRequests && rd.nextBoolean())
return;
super.service(req, res);
}
@Override
protected void customizeProxyRequest(Request proxyRequest, HttpServletRequest request) {
if (delayRequests) {
try {
// System.out.println("Start wait");
Thread.sleep(1000);
// System.out.println("End wait");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
super.customizeProxyRequest(proxyRequest, request);
}
public static void main(String... args) throws Exception {
if (args.length != 3) {
showHelp();
return;
}
try {
int controlPort = Integer.parseInt(args[0]);
int proxyPort = Integer.parseInt(args[1]);
String proxyTo = args[2];
Server proxyServer = new Server(proxyPort);
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
proxyServer.setHandler(context);
ServletHolder helloServletHolder = new ServletHolder(new ProxyMain());
helloServletHolder.setInitParameter("proxyTo", proxyTo);
helloServletHolder.setInitParameter("prefix", "/");
|
package travelplanner;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class AccountController {
private String user;
private String password;
public AccountController(String user, String password)
{
this.user = user;
this.password = password;
}
public String login()
{
String ret = "";
Connection connection = null;
try {
String dbPath = "C:/Users/Administratr/workspace/GIT_JAVA/TravelPlaner/pa1415_group.e2_travelplanner.db";
connection = DriverManager.getConnection("jdbc:sqlite:" + dbPath);
Statement statement = connection.createStatement();
statement.setQueryTimeout(30); // set timeout to 30 sec.
ResultSet rs = statement.executeQuery("SELECT first_name, last_name,"
+ " email, user_password_hash FROM users WHERE email = '"
+ this.user + "'");
//LOGIN
String pwdHash = rs.getString("user_password_hash");
if(pwdHash.equals(password))
{
ret = this.user;
String fullName = (rs.getString("first_name") + " " + rs.getString("last_name"));
}else{
ret = "PASSWORD";
}
} catch(SQLException e) {
// if the error message is "out of memory",
// it probably means no database file is found
System.err.println(e.getMessage());
ret = e.getMessage();
return ret; //Returnerar felmeddelande
} finally {
try {
if(connection != null)
connection.close();
} catch(SQLException e) {
// connection close failed.
System.err.println(e);
}
}
return ret;
}
public void testDB()
{
Connection connection = null;
try {
String dbPath = "C:/Users/Administratr/workspace/GIT_JAVA/TravelPlaner/pa1415_group.e2_travelplanner.db";
connection = DriverManager.getConnection("jdbc:sqlite:" + dbPath);
Statement statement = connection.createStatement();
statement.setQueryTimeout(30); // set timeout to 30 sec.
ResultSet rs = statement.executeQuery("SELECT first_name, last_name,"
+ " email, user_password_hash FROM users WHERE email = '"
+ this.user + "'");
// read the result set
System.out.println("first_name = " + rs.getString("first_name"));
System.out.println("last_name = " + rs.getString("last_name"));
System.out.println("email = " + rs.getString("email"));
System.out.println("user_password_hash = " + rs.getString("user_password_hash"));
//LOGIN
String pwdHash = rs.getString("user_password_hash");
if(pwdHash.equals(password))
{
String ret = this.user;
String fullName = (rs.getString("first_name") + " " + rs.getString("last_name"));
}
} catch(SQLException e) {
// if the error message is "out of memory",
// it probably means no database file is found
System.err.println(e.getMessage());
} finally {
try {
if(connection != null)
connection.close();
} catch(SQLException e) {
// connection close failed.
System.err.println(e);
}
}
}
public boolean register(String firstName, String lastName)
{
boolean success = false;
success = addUserToDB(firstName, lastName);
return success;
}
private boolean addUserToDB(String firstName, String lastName)
{
boolean ret = false;
String user = this.user;
String password = this.password;
Connection connection = null;
try {
connection = DriverManager.getConnection("jdbc:sqlite:" + dbPath);
Statement statement = connection.createStatement();
statement.setQueryTimeout(30); // set timeout to 30 sec.
statement.executeUpdate("INSERT INTO users VALUES(" + null + ", " +
"'" + user + "', " +
"'" + password + "', " +
"'" + firstName + "', " +
"'" + lastName + "' )");
ret = true;
} catch(SQLException e) {
// if the error message is "out of memory",
// it probably means no database file is found
System.err.println(e.getMessage());
} finally {
try {
if(connection != null)
connection.close();
} catch(SQLException e) {
// connection close failed.
System.err.println(e);
}
}
return ret;
}
private String getUser()
{
return "tmp";
}
/** TODO **/
/**ADD fetchUserFromDB Function**/
}
|
package dk.itst.saml.sts;
import java.io.IOException;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.XMLConstants;
import javax.xml.crypto.MarshalException;
import javax.xml.crypto.dsig.CanonicalizationMethod;
import javax.xml.crypto.dsig.DigestMethod;
import javax.xml.crypto.dsig.Reference;
import javax.xml.crypto.dsig.SignatureMethod;
import javax.xml.crypto.dsig.SignedInfo;
import javax.xml.crypto.dsig.Transform;
import javax.xml.crypto.dsig.XMLSignature;
import javax.xml.crypto.dsig.XMLSignatureException;
import javax.xml.crypto.dsig.XMLSignatureFactory;
import javax.xml.crypto.dsig.dom.DOMSignContext;
import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory;
import javax.xml.crypto.dsig.spec.C14NMethodParameterSpec;
import javax.xml.crypto.dsig.spec.ExcC14NParameterSpec;
import javax.xml.crypto.dsig.spec.TransformParameterSpec;
import javax.xml.namespace.QName;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.opensaml.common.xml.SAMLConstants;
import org.opensaml.saml2.core.Assertion;
import org.opensaml.saml2.core.Attribute;
import org.opensaml.saml2.core.AttributeStatement;
import org.opensaml.saml2.core.NameID;
import org.opensaml.saml2.core.Subject;
import org.opensaml.saml2.core.SubjectConfirmation;
import org.opensaml.saml2.core.SubjectConfirmationData;
import org.opensaml.ws.soap.soap11.Envelope;
import org.opensaml.ws.wsaddressing.Address;
import org.opensaml.ws.wsaddressing.EndpointReference;
import org.opensaml.ws.wspolicy.AppliesTo;
import org.opensaml.ws.wssecurity.BinarySecurityToken;
import org.opensaml.ws.wssecurity.Created;
import org.opensaml.ws.wssecurity.Expires;
import org.opensaml.ws.wssecurity.KeyIdentifier;
import org.opensaml.ws.wssecurity.Security;
import org.opensaml.ws.wssecurity.SecurityTokenReference;
import org.opensaml.ws.wssecurity.WSSecurityConstants;
import org.opensaml.ws.wstrust.Claims;
import org.opensaml.ws.wstrust.Lifetime;
import org.opensaml.ws.wstrust.OnBehalfOf;
import org.opensaml.ws.wstrust.RequestSecurityToken;
import org.opensaml.ws.wstrust.RequestSecurityTokenResponse;
import org.opensaml.ws.wstrust.RequestSecurityTokenResponseCollection;
import org.opensaml.ws.wstrust.RequestedAttachedReference;
import org.opensaml.ws.wstrust.RequestedSecurityToken;
import org.opensaml.ws.wstrust.RequestedUnattachedReference;
import org.opensaml.ws.wstrust.TokenType;
import org.opensaml.xml.Namespace;
import org.opensaml.xml.XMLObject;
import org.opensaml.xml.security.x509.BasicX509Credential;
import org.opensaml.xml.security.x509.X509Credential;
import org.opensaml.xml.signature.KeyInfo;
import org.opensaml.xml.signature.X509Certificate;
import org.opensaml.xml.signature.X509Data;
import org.opensaml.xml.util.XMLHelper;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import dk.itst.oiosaml.common.OIOSAMLConstants;
import dk.itst.oiosaml.common.SAMLUtil;
import dk.itst.oiosaml.configuration.SAMLConfiguration;
import dk.itst.oiosaml.liberty.ActAs;
import dk.itst.oiosaml.liberty.ClaimType;
import dk.itst.oiosaml.security.CredentialRepository;
import dk.itst.oiosaml.sp.NameIDFormat;
import dk.itst.oiosaml.sp.model.OIOAssertion;
import dk.itst.oiosaml.sp.service.util.Utils;
import dk.itst.oiosaml.sp.util.AttributeUtil;
import dk.itst.oiosaml.trust.OIOSoapEnvelope;
import dk.itst.oiosaml.trust.SigningPolicy;
import dk.itst.oiosaml.trust.TrustBootstrap;
import dk.itst.oiosaml.trust.TrustConstants;
import dk.itst.oiosaml.trust.internal.SignatureFactory;
public class TokenService extends HttpServlet {
private static CredentialRepository credentialRepository = new CredentialRepository();
private static final Logger log = Logger.getLogger(TokenService.class);
private Configuration cfg;
@Override
public void init(ServletConfig config) throws ServletException {
TrustBootstrap.bootstrap();
SAMLConfiguration.setConfigurationName("sts");
SAMLConfiguration.setHomeProperty(null);
cfg = SAMLConfiguration.getSystemConfiguration();
log.info("Configured OIOSAML to " + System.getProperty("user.home") + "/.oiosaml/sts.properties");
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
BasicX509Credential credential = credentialRepository.getCredential(SAMLConfiguration.getStringPrefixedWithBRSHome(cfg, "sts.certificate.location"), cfg.getString("sts.certificate.password"));
String xml = IOUtils.toString(req.getInputStream());
log.debug("Received request: " + xml);
OIOSoapEnvelope env = new OIOSoapEnvelope((Envelope) SAMLUtil.unmarshallElementFromString(xml));
BinarySecurityToken bst = SAMLUtil.getFirstElement(env.getHeaderElement(Security.class), BinarySecurityToken.class);
RequestSecurityToken rst = (RequestSecurityToken) env.getBody();
OnBehalfOf obo = SAMLUtil.getFirstElement(rst, OnBehalfOf.class);
ActAs actAs = SAMLUtil.getFirstElement(rst, ActAs.class);
Assertion bootstrapAssertion = null;
if (obo != null && obo.getUnknownXMLObject() instanceof Assertion) {
bootstrapAssertion = (Assertion) obo.getUnknownXMLObject();
}
if (actAs != null && actAs.getUnknownXMLObject() instanceof Assertion) {
bootstrapAssertion = (Assertion) actAs.getUnknownXMLObject();
}
OIOAssertion bootstrap = null;
if (bootstrapAssertion != null) {
bootstrap = new OIOAssertion(bootstrapAssertion);
} else {
log.error("No SAML Assertion in OnBehalfOf");
}
DateTime expire;
Lifetime lifetime = SAMLUtil.getFirstElement(rst, Lifetime.class);
if (lifetime != null && lifetime.getExpires() != null) {
expire = lifetime.getExpires().getDateTime();
} else {
expire = new DateTime().plusMinutes(5);
}
OIOSoapEnvelope res = OIOSoapEnvelope.buildResponse(new SigningPolicy(true), env);
Envelope tmp = (Envelope) SAMLUtil.unmarshallElementFromString(res.toXML());
tmp.getHeader().getUnknownXMLObjects().remove(tmp.getHeader().getUnknownXMLObjects(new QName("urn:liberty:sb:2006-08", "Framework")).get(0));
res = new OIOSoapEnvelope(tmp, true, new SigningPolicy(true));
RequestSecurityTokenResponseCollection rstrc = SAMLUtil.buildXMLObject(RequestSecurityTokenResponseCollection.class);
RequestSecurityTokenResponse rstr = SAMLUtil.buildXMLObject(RequestSecurityTokenResponse.class);
rstrc.getRequestSecurityTokenResponses().add(rstr);
String to = setAppliesTo(rst, rstr);
rstr.setContext(rst.getContext());
TokenType tokenType = SAMLUtil.buildXMLObject(TokenType.class);
tokenType.setValue(TrustConstants.TOKEN_TYPE_SAML_20);
rstr.getUnknownXMLObjects().add(tokenType);
Lifetime lt = SAMLUtil.buildXMLObject(Lifetime.class);
Expires expires = SAMLUtil.buildXMLObject(Expires.class);
Created created = SAMLUtil.buildXMLObject(Created.class);
created.setDateTime(new DateTime());
expires.setDateTime(expire);
lt.setExpires(expires);
lt.setCreated(created);
rstr.getUnknownXMLObjects().add(lt);
RequestedSecurityToken requestedSecurityToken = SAMLUtil.buildXMLObject(RequestedSecurityToken.class);
rstr.getUnknownXMLObjects().add(requestedSecurityToken);
Assertion assertion = generateAssertion(req, bootstrap, to, bst.getValue(), credential, expire, SAMLUtil.getFirstElement(rst, Claims.class));
requestedSecurityToken.setUnknownXMLObject(assertion);
RequestedAttachedReference attached = SAMLUtil.buildXMLObject(RequestedAttachedReference.class);
attached.setSecurityTokenReference(generateTokenReference(assertion));
rstr.getUnknownXMLObjects().add(attached);
RequestedUnattachedReference unattachedReference = SAMLUtil.buildXMLObject(RequestedUnattachedReference.class);
unattachedReference.setSecurityTokenReference(generateTokenReference(assertion));
rstr.addNamespace(new Namespace(TrustConstants.WSSE11_NS, "wsse11"));
rstr.getUnknownXMLObjects().add(unattachedReference);
res.setBody(rstrc);
res.setTimestamp(5);
res.setAction("http://docs.oasis-open.org/ws-sx/ws-trust/200512/RSTRC/IssueFinal");
res.setTo("http:
try {
xml = XMLHelper.nodeToString(res.sign(credential));
resp.setContentType("text/xml; charset=utf-8");
resp.setContentLength(xml.getBytes("UTF-8").length);
IOUtils.write(xml, resp.getOutputStream(), "UTF-8");
log.debug("Response: " + xml);
} catch (Exception e) {
e.printStackTrace();
}
}
private String setAppliesTo(RequestSecurityToken rst, RequestSecurityTokenResponse rstr) {
EndpointReference epr = SAMLUtil.getFirstElement(SAMLUtil.getFirstElement(rst, AppliesTo.class), EndpointReference.class);
log.debug("AppliesTo EPR: " + epr);
if (epr == null) return null;
AppliesTo appliesTo = SAMLUtil.buildXMLObject(AppliesTo.class);
EndpointReference reference = SAMLUtil.buildXMLObject(EndpointReference.class);
Address addr = SAMLUtil.buildXMLObject(Address.class);
reference.setAddress(addr);
addr.setValue(epr.getAddress().getValue());
appliesTo.getUnknownXMLObjects().add(reference);
rstr.getUnknownXMLObjects().add(appliesTo);
return epr.getAddress().getValue();
}
private SecurityTokenReference generateTokenReference(Assertion assertion) {
SecurityTokenReference tokenReference = SAMLUtil.buildXMLObject(SecurityTokenReference.class);
KeyIdentifier keyIdentifier = SAMLUtil.buildXMLObject(KeyIdentifier.class);
keyIdentifier.setValue(assertion.getID());
keyIdentifier.getUnknownAttributes().put(TrustConstants.VALUE_TYPE, TrustConstants.SAMLID);
keyIdentifier.setEncodingType(null);
tokenReference.getUnknownAttributes().put(TrustConstants.TOKEN_TYPE, TrustConstants.TOKEN_TYPE_SAML_20);
tokenReference.getUnknownXMLObjects().add(keyIdentifier);
return tokenReference;
}
private Assertion generateAssertion(HttpServletRequest req, OIOAssertion bootstrap, String to, String x509, BasicX509Credential credential, DateTime expire, Claims claims) {
Assertion a = SAMLUtil.buildXMLObject(Assertion.class);
a.setID(Utils.generateUUID());
a.setIssueInstant(new DateTime(DateTimeZone.UTC));
a.setIssuer(SAMLUtil.createIssuer(cfg.getString("sts.entityId")));
Subject subject = SAMLUtil.buildXMLObject(Subject.class);
if (bootstrap != null) {
NameID nameID = bootstrap.getAssertion().getSubject().getNameID();
NameID nameId = SAMLUtil.createNameID(nameID.getValue());
nameId.setFormat(nameID.getFormat());
subject.setNameID(nameId);
}
SubjectConfirmation confirmation = SAMLUtil.buildXMLObject(SubjectConfirmation.class);
confirmation.setMethod(OIOSAMLConstants.METHOD_HOK);
confirmation.setNameID(SAMLUtil.createNameID(req.getRequestURL().toString()));
confirmation.getNameID().setFormat(NameIDFormat.ENTITY.getFormat());
SubjectConfirmationData data = SAMLUtil.buildXMLObject(SubjectConfirmationData.class);
data.getUnknownAttributes().put(new QName("http:
KeyInfo keyInfo = SAMLUtil.buildXMLObject(KeyInfo.class);
X509Data x509data = SAMLUtil.buildXMLObject(X509Data.class);
X509Certificate cert = SAMLUtil.buildXMLObject(X509Certificate.class);
cert.setValue(x509);
x509data.getX509Certificates().add(cert);
keyInfo.getX509Datas().add(x509data);
data.getUnknownXMLObjects().add(keyInfo);
confirmation.setSubjectConfirmationData(data);
subject.getSubjectConfirmations().add(confirmation);
a.setSubject(subject);
a.setConditions(SAMLUtil.createAudienceCondition(to));
a.getConditions().setNotOnOrAfter(expire);
if (bootstrap != null && (claims == null || (claims != null && !claims.getUnknownXMLObjects().isEmpty()))) {
for (AttributeStatement as : bootstrap.getAssertion().getAttributeStatements()) {
AttributeStatement newAs = SAMLUtil.buildXMLObject(AttributeStatement.class);
for (Attribute attr : as.getAttributes()) {
if (!hasClaim(attr.getName(), claims)) continue;
Attribute newAttr = SAMLUtil.buildXMLObject(Attribute.class);
newAs.getAttributes().add(newAttr);
newAttr.setFriendlyName(attr.getFriendlyName());
newAttr.setName(attr.getName());
newAttr.setNameFormat(attr.getNameFormat());
newAttr.getAttributeValues().add(AttributeUtil.createAttributeValue(AttributeUtil.extractAttributeValueValue(attr)));
}
if (!newAs.getAttributes().isEmpty()) {
a.getAttributeStatements().add(newAs);
}
}
}
a.addNamespace(new Namespace(XMLConstants.W3C_XML_SCHEMA_NS_URI, "xs"));
a.addNamespace(new Namespace(XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, "xsi"));
// OIOAssertion oa = new OIOAssertion(SAMLUtil.clone(a));
// oa.sign(credential);
// return (Assertion) SAMLUtil.unmarshallElementFromString(oa.toXML());
try {
Element signed = sign(a, credential);
log.debug("Signed assertion: " + XMLHelper.nodeToString(signed));
return (Assertion) SAMLUtil.unmarshallElement(signed);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public Element sign(Assertion assertion, X509Credential credential) throws NoSuchAlgorithmException, InvalidAlgorithmParameterException, MarshalException, XMLSignatureException {
Map<XMLObject, String> references = new HashMap<XMLObject, String>();
references.put(assertion, assertion.getID());
XMLSignatureFactory xsf = SignatureFactory.getInstance();
CanonicalizationMethod canonicalizationMethod = xsf.newCanonicalizationMethod(CanonicalizationMethod.EXCLUSIVE, (C14NMethodParameterSpec) null);
SignatureMethod signatureMethod = xsf.newSignatureMethod(SignatureMethod.RSA_SHA1, null);
KeyInfoFactory keyInfoFactory = xsf.getKeyInfoFactory();
javax.xml.crypto.dsig.keyinfo.KeyInfo ki = keyInfoFactory.newKeyInfo(Collections.singletonList(keyInfoFactory.newX509Data(Collections.singletonList(credential.getEntityCertificate()))));
List<Reference> refs = new ArrayList<Reference>();
DigestMethod digestMethod = xsf.newDigestMethod(DigestMethod.SHA1, null);
List<Transform> transforms = new ArrayList<Transform>(2);
transforms.add(xsf.newTransform("http://www.w3.org/2000/09/xmldsig#enveloped-signature", (TransformParameterSpec)null));
transforms.add(xsf.newTransform("http://www.w3.org/2001/10/xml-exc-c14n#",(ExcC14NParameterSpec)null));
for (Map.Entry<XMLObject, String> ref : references.entrySet()) {
Reference r = xsf.newReference("#"+ref.getValue(), digestMethod, transforms, null, null);
refs.add(r);
}
SAMLUtil.marshallObject(assertion);
// Create the SignedInfo
SignedInfo signedInfo = xsf.newSignedInfo(canonicalizationMethod, signatureMethod, refs);
String signatureId = Utils.generateUUID();
XMLSignature signature = xsf.newXMLSignature(signedInfo, ki, null, signatureId, null);
String xml = XMLHelper.nodeToString(assertion.getDOM());
log.debug("Signing assertion: " + xml);
Element element = SAMLUtil.loadElementFromString(xml);
Node next = element.getElementsByTagNameNS(Subject.DEFAULT_ELEMENT_NAME.getNamespaceURI(), Subject.DEFAULT_ELEMENT_LOCAL_NAME).item(0);
DOMSignContext signContext = new DOMSignContext(credential.getPrivateKey(), element, next);
signContext.putNamespacePrefix(SAMLConstants.XMLSIG_NS, SAMLConstants.XMLSIG_PREFIX);
signContext.putNamespacePrefix(SAMLConstants.XMLENC_NS, SAMLConstants.XMLENC_PREFIX);
for (XMLObject o : references.keySet()) {
fixIdAttributes(element, o);
}
// Marshal, generate (and sign) the detached XMLSignature. The DOM
// Document will contain the XML Signature if this method returns
// successfully.
// HIERARCHY_REQUEST_ERR: Raised if this node is of a type that does not allow children of the type of the newChild node, or if the node to insert is one of this node's ancestors.
signature.sign(signContext);
return element;
}
private boolean hasClaim(String attribute, Claims claims) {
if (claims == null) return true;
List<XMLObject> types = claims.getUnknownXMLObjects(ClaimType.ELEMENT_NAME);
for (XMLObject t : types) {
ClaimType type = (ClaimType) t;
if (attribute.equals(type.getUri())) {
return true;
}
}
return false;
}
private void fixIdAttributes(Element env, XMLObject obj) {
if (obj == null) return;
if (log.isDebugEnabled()) log.debug("Fixing id attribute on " + obj);
NodeList nl = env.getElementsByTagNameNS(obj.getDOM().getNamespaceURI(), obj.getDOM().getLocalName());
for (int i = 0; i < nl.getLength(); i++) {
Element e = (Element) nl.item(i);
if (e.hasAttribute("ID")) {
e.setIdAttributeNS(null, "ID", true);
}
if (e.hasAttributeNS(WSSecurityConstants.WSU_NS, "Id")) {
e.setIdAttributeNS(WSSecurityConstants.WSU_NS, "Id", true);
}
}
}
}
|
package com.sometrik.framework;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import com.android.trivialdrivesample.util.IabException;
import com.android.trivialdrivesample.util.IabHelper;
import com.android.trivialdrivesample.util.IabHelper.IabAsyncInProgressException;
import com.android.trivialdrivesample.util.Inventory;
import com.sometrik.framework.FWActionBar.ActionBarItem;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.app.Activity;
import android.app.ActivityManager;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.ConfigurationInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetManager;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.v4.view.GravityCompat;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.view.animation.Animation;
import android.view.animation.TranslateAnimation;
import android.widget.FrameLayout;
public class FrameWork extends Activity {
private SharedPreferences prefs;
private SharedPreferences.Editor editor;
private FrameWork frameWork;
private double updateTimer = 0;
private IabHelper purchaseHelper;
private static final int RESULT_SETTINGS = 1;
private Inventory inventory;
private DisplayMetrics displayMetrics;
private View currentlyShowingView;
private Charset utf8_charset;
private FWDrawerLayout drawerLayout;
private int currentDrawerViewId;
private boolean drawMode = false;
public FWActionBar actionBar;
private Locale defaultLocale;
private float screenHeight;
private float screenWidth;
public Handler mainHandler;
private Intent dialogIntent;
private AlertDialog.Builder builder;
private AlertDialog alert;
private float windowYcoords;
public HashMap<Integer, NativeCommandHandler> views = new HashMap<Integer, NativeCommandHandler>();
private int appId = 0;
private int currentView = 0;
public static boolean transitionAnimation = false;
public BitmapCache bitmapCache;
private native void textChangedEvent(int id, byte[] textValue);
public native void intChangedEvent(int id, int changedInt, int changedInt2);
public native void visibilityChangedEvent(int id, boolean visible);
public native void keyPressed(int keyId, int viewId);
public native void touchEvent(int viewId, int mode, int fingerIndex, double timestamp, float x, float y);
public native void flushTouchEvent(double timestamp, int viewId, int mode);
public native void onInit(AssetManager assetManager, int xSize, int ySize, float displayScale, String email, String language, String country, String version);
public native void nativeSetSurface(Surface surface, int surfaceId, int gl_version, int width, int height);
public native void nativeSurfaceDestroyed(int surfaceId, int gl_version);
public native void nativeOnResume(int appId);
public native void nativeOnPause(int appId);
public native void nativeOnStop(int appId);
public native void nativeOnStart(int appId);
public native void nativeOnDestroy(int appId);
public native void nativeAddPreference(String key, String value);
public native void nativeScrollChanged(int viewId, int scrollPos, int scrollRem, int height);
public native void sendURLImageRequest(int viewId, String url, int width, int height, int internalFormat);
public native void sendImageRequest(int viewId, int width, int height, int internalFormat);
public native void cancelImageRequest(int viewId);
public native void showNativeDebug();
private native void setNativeActiveView(int activeView, boolean recordHistory);
private native void languageChanged(int appId, String language);
private native void memoryWarning(int appId);
public static native void onPurchaseEvent(int applicationId, String orderId, boolean newPurchase, double purchaseTime);
public static native void onResize(float width, float height, int viewId);
public static native void onUpdate(double timestamp, int viewId);
public static native void timerEvent(int viewId, int timerId);
@Override
protected void onCreate(Bundle savedInstanceState) {
System.out.println("onCreate called");
// Set default theme for the app. Commented default themes are dark versions
if (Build.VERSION.SDK_INT <= 10) {
// this.setTheme(android.R.style.Theme);
this.setTheme(android.R.style.Theme_Light);
} else if (Build.VERSION.SDK_INT >= 21) {
// this.setTheme(android.R.style.Theme_Material);
this.setTheme(android.R.style.Theme_Material_Light);
} else {
// this.setTheme(android.R.style.Theme_DeviceDefault);
// this.setTheme(android.R.style.Theme_Holo);
// this.setBackground(frame.getResources().getDrawable(android.R.drawable.dialog_holo_light_frame));
// this.setTheme(android.R.style.Theme_Holo);
this.setTheme(android.R.style.Theme_Holo_Light);
}
this.setTheme(android.R.style.Theme_Holo_Light);
super.onCreate(savedInstanceState);
utf8_charset = Charset.forName("UTF-8");
defaultLocale = Locale.getDefault();
System.out.println("Users preferred locale: " + defaultLocale.getCountry() + " Language: " + defaultLocale.getLanguage());
prefs = getSharedPreferences("app", Context.MODE_PRIVATE);
editor = prefs.edit();
// ActionBar is hidden by default with this
// actionBar = getActionBar();
// actionBar.hide();
getActionBar().hide();
getActionBar().setDisplayHomeAsUpEnabled(true);
// getActionBar().setHomeButtonEnabled(true);
// Init NavigationBar
drawerLayout = new FWDrawerLayout(this);
currentDrawerViewId = 0;
// Init for screen settings
setupDisplayMetrics();
bitmapCache = new BitmapCache(getAssets(), displayMetrics.scaledDensity);
final FrameWork framework = this;
mainHandler = new Handler() {
public void handleMessage(Message msg) {
if (msg.what == 1) {
try {
NativeCommandTransaction transaction = (NativeCommandTransaction) msg.obj;
for (NativeCommand command : transaction.getCommands()) {
command.apply(framework.views.get(command.getInternalId()));
}
} catch (Throwable t) {
t.printStackTrace();
}
} else if (msg.what == 2) {
try {
NativeCommandHandler view = framework.views.get(msg.arg1);
if (view != null) {
view.setBitmap((Bitmap)msg.obj);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
};
System.out.println("initing native on onCreate");
initNative();
}
public Boolean initializePurchaseHelper(String key, IabHelper.OnIabSetupFinishedListener listener) {
// Get PurchaseHelper. Requires App public key
purchaseHelper = new IabHelper(this, key);
purchaseHelper.startSetup(listener);
return true;
}
private boolean hasAccountPermission() {
return false;
}
private String getUserGoogleAccountEmail() {
if (hasAccountPermission()) {
Log.d("accountFinder", "Checking for user Google Account");
AccountManager manager = AccountManager.get(this);
Account[] accounts = manager.getAccounts();
String gmail = "";
for (Account account : accounts) {
if (account.type.equalsIgnoreCase("com.google")) {
gmail = account.name;
Log.d("accountFinder", "Google Account found: " + gmail);
break;
}
}
if (gmail.isEmpty()) {
Log.d("accountFinder", "No user Google Account found");
}
return gmail;
} else {
return "";
}
}
public Inventory getPurchaseHelperInventory() {
System.out.println("about to query purchaseHelper inventory");
try {
inventory = purchaseHelper.queryInventory();
return inventory;
} catch (IabException e) {
System.out.println("Exception getting inventory with message: " + e.getMessage());
e.printStackTrace();
}
return null;
}
private void initNative() {
System.out.println("Display scale: density = " + displayMetrics.scaledDensity + ", dpi = " + displayMetrics.densityDpi);
int xSize = displayMetrics.widthPixels;
int ySize = displayMetrics.heightPixels;
initNativePreferences();
String versionName = null;
try {
versionName = getPackageManager().getPackageInfo(getPackageName(), 0).versionName;
} catch (NameNotFoundException e) {
System.out.println("versionName not found");
e.printStackTrace();
}
onInit(getAssets(), xSize, ySize, displayMetrics.scaledDensity, getUserGoogleAccountEmail(), defaultLocale.getLanguage(), defaultLocale.getCountry(), versionName);
}
public void initNativePreferences() {
Map<String, String> allPrefs = (Map<String, String>) prefs.getAll();
if (allPrefs == null || allPrefs.size() == 0) {
System.out.println("no preferences found");
}
for (Entry<String, String> entry : allPrefs.entrySet()) {
System.out.println("found pref - key: " + entry.getKey() + " value: " + entry.getValue());
nativeAddPreference(entry.getKey(), entry.getValue());
}
}
public static String getApplicationName(Context context) {
ApplicationInfo applicationInfo = context.getApplicationInfo();
int stringId = applicationInfo.labelRes;
return stringId == 0 ? applicationInfo.nonLocalizedLabel.toString() : context.getString(stringId);
}
// Get screen settings
public DisplayMetrics setupDisplayMetrics() {
displayMetrics = new DisplayMetrics();
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
display.getMetrics(displayMetrics);
screenWidth = displayMetrics.widthPixels;
screenHeight = displayMetrics.heightPixels;
Resources resources = getResources();
int resourceId = resources.getIdentifier( "status_bar_height", "dimen", "android" );
int statusHeight = ( resourceId > 0 ) ? resources.getDimensionPixelSize( resourceId ) : 0;
System.out.println("statusHeight: " + statusHeight);
screenHeight -= statusHeight;
return displayMetrics;
}
public SharedPreferences.Editor getPreferencesEditor() { return editor; }
public void addToViewList(NativeCommandHandler view) {
views.put(view.getElementId(), view);
}
public void launchBrowser(String url) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
startActivity(browserIntent);
}
public void removeViewFromList(int viewId) {
NativeCommandHandler view = views.get(viewId);
if (view != null) view.deinitialize();
views.remove(viewId);
}
public void hideSoftKeyboard() {
// InputMethodManager imm = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE);
// if (views.get(getCurrentViewId()) instanceof View) {
// imm.hideSoftInputFromWindow(((View)views.get(getCurrentViewId())).getApplicationWindowToken(), 0);
}
public void sendNativeValueEvent(int elementId, int value, int value2) {
if (!transitionAnimation) {
System.out.println("click while not transitioning");
intChangedEvent(elementId, value, value2);
} else {
System.out.println("click while transitioning");
}
}
public void sendNativeValueEvent(int elementId, byte[] value) {
if (!transitionAnimation) {
textChangedEvent(elementId, value);
}
}
public void setCurrentView(final View view, Animation animation, final int newViewAnimationFromX) {
transitionAnimation = true;
// animation.setAnimationListener(new Animation.AnimationListener() {
// @Override
// public void onAnimationStart(Animation animation) {
currentView = view.getId();
currentlyShowingView = view;
if (drawerLayout.getChildCount() > 0) {
drawerLayout.removeAllViews();
}
if (currentDrawerViewId != 0) {
NativeCommandHandler drawerView = views.get(currentDrawerViewId);
if (drawerView != null) {
drawerLayout.addView(view);
drawerLayout.addView((View) drawerView);
setContentView(drawerLayout);
} else {
setContentView(view);
}
} else {
setContentView(view);
}
TranslateAnimation q;
// if (recordHistory) {
q = new TranslateAnimation(newViewAnimationFromX, 0, 0, 0);
// } else {
// q = new TranslateAnimation(-1000, 0, 0, 0);
setNativeActiveView(view.getId(), false);
q.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) { transitionAnimation = false; }
@Override
public void onAnimationRepeat(Animation animation) { }
@Override
public void onAnimationStart(Animation animation) { }
});
q.setDuration(200);
view.startAnimation(q);
// @Override
// public void onAnimationEnd(Animation animation) {
// @Override
// public void onAnimationRepeat(Animation animation) {
// View sadas = (View) views.get(currentView);
// sadas.startAnimation(animation);
}
public void setCurrentView(final View view) {
if (currentView == view.getId()){
System.out.println("view already set");
return;
}
currentView = view.getId();
currentlyShowingView = view;
if (drawerLayout.getChildCount() > 0) {
drawerLayout.removeAllViews();
}
System.out.println("currentDrawerViewId: " + currentDrawerViewId);
if (currentDrawerViewId != 0) {
NativeCommandHandler drawerView = views.get(currentDrawerViewId);
if (drawerView != null) {
drawerLayout.addView(view);
drawerLayout.addView((View) drawerView);
setContentView(drawerLayout);
} else {
System.out.println("no navigation drawer element found on id: " + currentDrawerViewId);
setContentView(view);
}
} else {
setContentView(view);
}
setNativeActiveView(view.getId(), false);
}
public int getCurrentViewId() { return currentView; }
public NativeSurface createNativeOpenGLView(final int id) {
final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
final ConfigurationInfo configurationInfo = activityManager.getDeviceConfigurationInfo();
final int gl_version = configurationInfo.reqGlEsVersion;
System.out.println("about to create native surface. gl_version: " + gl_version);
NativeSurface surfaceView = new NativeSurface(this);
surfaceView.setId(id);
surfaceView.setLayoutParams(new FrameLayout.LayoutParams((int)screenWidth, (int)screenHeight));
surfaceView.setOnTouchListener(new MyOnTouchListener(this, id));
SurfaceHolder holder = surfaceView.getHolder();
holder.addCallback(new Callback() {
public void surfaceDestroyed(SurfaceHolder holder) {
System.out.println("surfaceDestroyed");
nativeSurfaceDestroyed(id, gl_version);
}
public void surfaceCreated(SurfaceHolder holder) {
setupDisplayMetrics();
System.out.println("surfaceCreated. Width: " + screenWidth + " height: " + screenHeight + " id: " + id);
nativeSetSurface(holder.getSurface(), id, gl_version, (int)screenWidth, (int)screenHeight);
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
System.out.println("nativeSurfaceChanged. Width: " + width + " height: " + height + " id: " + id);
onResize(width, height, id);
System.out.println("native surface has been set");
}
});
System.out.println("...");
views.put(id, surfaceView);
if (currentView == 0){
System.out.println("no current view set. showing created surfaceView");
//Set value shows view
surfaceView.setValue(1);
}
System.out.println("native surface created");
return surfaceView;
}
public void setActionBarTitle(String title){
}
//Screen touchevent listener. Will send information to MyGLSurfaceView messagehandler
private class MyOnTouchListener implements OnTouchListener {
FrameWork frameWork;
int viewId;
public MyOnTouchListener(FrameWork frameWork, int viewId) {
this.frameWork = frameWork;
this.viewId = viewId;
}
public void onClick(View v) {
System.out.println("Click happened");
}
@Override
public boolean onTouch(View v, MotionEvent event) {
int action = event.getAction() & MotionEvent.ACTION_MASK;
int pointerIndex = (event.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
int fingerId = event.getPointerId(pointerIndex);
switch (action) {
//Touch event of screen touch-down for the first finger
case MotionEvent.ACTION_DOWN:
//Touch event of screen touch-down after the first touch
case MotionEvent.ACTION_POINTER_DOWN:
System.out.println("Liike alkoi: " + event.getX(event.getActionIndex()) + " " + event.getY(event.getActionIndex()) + " - id: " + fingerId);
touchEvent(viewId, 1, fingerId, System.currentTimeMillis() / 1000.0, (int) event.getX(event.getActionIndex()), (int) (event.getY(event.getActionIndex())));
flushTouchEvent(System.currentTimeMillis() / 1000.0, viewId, 1);
break;
//Touch event of finger moving
case MotionEvent.ACTION_MOVE:
int pointerCount = event.getPointerCount();
for (int i = 0; i < pointerCount; i++) {
// System.out.println("Liike: " + event.getX(i) + " " + event.getY(i) + " - id: " + i);
touchEvent(viewId, 2, i, System.currentTimeMillis() / 1000.0, (int) event.getX(i), (int) (event.getY(i)));
}
flushTouchEvent(System.currentTimeMillis() / 1000.0, viewId, 2);
break;
// touch event of first finger being removed from the screen
case MotionEvent.ACTION_UP:
// touch event of fingers other than the first leaving the screen
case MotionEvent.ACTION_POINTER_UP:
System.out.println("Liike loppui POINTER_UP: " + event.getX(event.getActionIndex()) + " " + event.getY(event.getActionIndex()) + " - id: " + fingerId);
touchEvent(viewId, 3, fingerId, System.currentTimeMillis() / 1000.0, (int) event.getX(event.getActionIndex()), (int) (event.getY(event.getActionIndex())));
flushTouchEvent(System.currentTimeMillis() / 1000.0, viewId, 3);
break;
}
return true;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (actionBar != null) {
ArrayList<ActionBarItem> itemList = actionBar.getItemList();
for (ActionBarItem item : itemList) {
System.out.println("onCreateOptionsMenu add new Item " + item.id);
MenuItem newItem = menu.add(0, item.id, 0, "item");
newItem.setIcon(item.picture).setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
}
}
System.out.println("onCreateOptionsMenu");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
System.out.println("onOptionsItemSelected Item " + actionBar.getElementId() + " " + item.getItemId() + " " + item.getGroupId());
intChangedEvent(actionBar.getElementId(), item.getItemId(), item.getItemId());
if (item.getItemId() == 16908332){
if (drawerLayout.isDrawerOpen(Gravity.LEFT)){
drawerLayout.closeDrawer(Gravity.LEFT);
} else {
drawerLayout.openDrawer(Gravity.LEFT);
}
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onKeyDown(int keycode, KeyEvent e) {
System.out.println("KeyEvent. KeyCode: " + keycode + " ViewId: " + findViewById(android.R.id.content).getRootView().getId());
// if (e.getAction() == KeyEvent.ACTION_MULTIPLE) {
// System.out.println("KeyEvent ACTION_MULTIPLE. KeyCode: " + keycode + " ViewId: " + findViewById(android.R.id.content).getRootView().getId());
// return true;
if (!transitionAnimation) {
if (keycode == KeyEvent.KEYCODE_BACK && drawerLayout != null) {
if (drawerLayout.isDrawerOpen(Gravity.LEFT)) {
drawerLayout.closeDrawer(Gravity.LEFT);
return true;
}
}
keyPressed(e.getKeyCode(), currentView);
}
return true;
}
private void createOptionsDialog(final int[] idArray, String[] names) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Options Menu");
builder.setItems(names, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
System.out.println("item selected: " + item);
System.out.println("item id: " + idArray[item]);
// optionSelected(idArray[item]);
}
});
AlertDialog alert = builder.create();
alert.show();
}
public float getScreenWidth(){
return screenWidth;
}
public float getScreenHeight(){
return screenHeight;
}
public void setAppId(int id) { this.appId = id; }
public int getAppId() { return appId; }
// returns database path
public String getDBPath(String dbName) {
System.out.println("getting DBPath _ db: " + dbName + " Path: " + String.valueOf(getDatabasePath(dbName)));
return String.valueOf(getDatabasePath(dbName));
}
public static void sendTransaction(FrameWork frameWork, NativeCommandTransaction commandTransaction) {
Message msg = Message.obtain(null, 1, commandTransaction);
frameWork.mainHandler.sendMessage(msg);
}
public static void sendBitmap(FrameWork frameWork, int internalId, Bitmap bitmap) {
Message msg = Message.obtain(null, 2, internalId, 0, bitmap);
frameWork.mainHandler.sendMessage(msg);
}
public void addToPrefs(String key, String value){
editor.putString(key, value);
editor.apply();
}
public String getFromPrefs(String key){
return prefs.getString(key, "");
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case RESULT_SETTINGS:
// showUserSettings();
break;
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
Locale locale = getResources().getConfiguration().locale;
if (locale.getLanguage() != defaultLocale.getLanguage()) {
System.out.println("Language change spotted");
System.out.println("Previous locale: " + defaultLocale.getCountry() + " Language: " + defaultLocale.getLanguage());
System.out.println("New locale: " + locale.getCountry() + " Language: " + locale.getLanguage());
languageChanged(appId, locale.getLanguage());
defaultLocale = locale;
}
super.onConfigurationChanged(newConfig);
displayMetrics = setupDisplayMetrics();
System.out.println("onConfigChange");
boolean isLandscape = false;
if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
System.out.println("Orientation conf portrait");
isLandscape = false;
onResize(screenWidth, screenHeight, currentView);
System.out.println("Orientation conf portrait. SWidth: " + screenWidth + " SHeight: " + screenHeight + " currentView: " + currentView);
} else if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
System.out.println("Orientation conf landscape");
isLandscape = true;
onResize(screenWidth, screenHeight, currentView);
System.out.println("Orientation conf landscape. SWidth: " + screenWidth + " SHeight: " + screenHeight + " currentView: " + currentView);
}
super.onConfigurationChanged(newConfig);
for (NativeCommandHandler handler : views.values()){
handler.onScreenOrientationChange(isLandscape);
}
}
public int measureViewLength(View view){
int widthMeasureSpec = View.MeasureSpec.makeMeasureSpec((int) screenWidth, View.MeasureSpec.AT_MOST);
int heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED);
view.measure(widthMeasureSpec, heightMeasureSpec);
return view.getMeasuredHeight();
}
public int measureViewWidth(View view){
int widthMeasureSpec = View.MeasureSpec.makeMeasureSpec((int) screenWidth, View.MeasureSpec.AT_MOST);
int heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED);
view.measure(widthMeasureSpec, heightMeasureSpec);
return view.getMeasuredWidth();
}
@Override
public void onSaveInstanceState(Bundle savedInstanceState) {
System.out.println("onSaveInstanceState");
super.onSaveInstanceState(savedInstanceState);
}
@Override
public void onRestoreInstanceState(Bundle savedInstanceState) {
System.out.println("onRestoreInstanceState");
super.onRestoreInstanceState(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
nativeOnResume(appId);
}
@Override
public void onPause() {
super.onPause();
nativeOnPause(appId);
}
@Override
public void onStop() {
super.onStop();
nativeOnStop(appId);
}
@Override
public void onStart() {
super.onStart();
nativeOnStart(appId);
}
@Override
public void onLowMemory() {
super.onLowMemory();
memoryWarning(appId);
}
@Override
public void onDestroy() {
System.out.println("onDestroy called");
// It's important to destroy native before the activity, since
// native stuff may wish to use Framework functionality in their
// destructors
nativeOnDestroy(appId);
if (purchaseHelper != null) {
try {
purchaseHelper.dispose();
} catch (IabAsyncInProgressException e) {
e.printStackTrace();
System.out.println("Error in disposing purchaseHelper with message: " + e.getMessage());
}
}
purchaseHelper = null;
super.onDestroy();
System.exit(0);
}
public IabHelper getPurchaseHelper() {
return purchaseHelper;
}
// Load JNI. Framework references to make file.
static {
System.out.println("Loading native library");
System.loadLibrary("framework");
System.out.println("native library loaded");
}
public static void handleNativeException(Throwable error) {
System.out.println("handling error");
error.printStackTrace();
System.out.println("error cause: " + error.getCause());
}
public Charset getCharset() {
return utf8_charset;
}
public int getCurrentDrawerViewId() {
return currentDrawerViewId;
}
public void setCurrentDrawerViewId(int currentDrawerViewId) {
this.currentDrawerViewId = currentDrawerViewId;
}
public FWDrawerLayout getDrawerLayout() {
return drawerLayout;
}
}
|
package net.kyori.text.format;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import net.kyori.text.Component;
import net.kyori.text.event.ClickEvent;
import net.kyori.text.event.HoverEvent;
import net.kyori.text.util.ToStringer;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import static java.util.Objects.requireNonNull;
public final class Style {
private static final Style EMPTY = new Style(null, TextDecoration.State.NOT_SET, TextDecoration.State.NOT_SET, TextDecoration.State.NOT_SET, TextDecoration.State.NOT_SET, TextDecoration.State.NOT_SET, null, null, null);
private static final TextDecoration[] DECORATIONS = TextDecoration.values();
private final @Nullable TextColor color;
private final TextDecoration.State obfuscated;
private final TextDecoration.State bold;
private final TextDecoration.State strikethrough;
private final TextDecoration.State underlined;
private final TextDecoration.State italic;
private final @Nullable ClickEvent clickEvent;
private final @Nullable HoverEvent hoverEvent;
private final @Nullable String insertion;
/**
* Creates a builder.
*
* @return a builder
*/
public static @NonNull Builder builder() {
return new Builder();
}
/**
* Gets an empty style.
*
* @return empty style
*/
public static @NonNull Style empty() {
return EMPTY;
}
/**
* Creates a style with color.
*
* @param color the style
* @return a style
*/
public static @NonNull Style of(final @Nullable TextColor color) {
return builder().color(color).build();
}
/**
* Creates a style with decorations.
*
* @param decorations the decorations
* @return a style
*/
public static @NonNull Style of(final TextDecoration@NonNull... decorations) {
final Builder builder = builder();
for(final TextDecoration decoration : decorations) {
builder.decoration(decoration, true);
}
return builder.build();
}
/**
* Creates a style with color and decorations.
*
* @param color the style
* @param decorations the decorations
* @return a style
*/
public static @NonNull Style of(final @Nullable TextColor color, final TextDecoration@NonNull... decorations) {
final Builder builder = builder();
builder.color(color);
for(final TextDecoration decoration : decorations) {
builder.decoration(decoration, true);
}
return builder.build();
}
private Style(final @Nullable TextColor color, final TextDecoration.State obfuscated, final TextDecoration.State bold, final TextDecoration.State strikethrough, final TextDecoration.State underlined, final TextDecoration.State italic, final @Nullable ClickEvent clickEvent, final @Nullable HoverEvent hoverEvent, final @Nullable String insertion) {
this.color = color;
this.obfuscated = obfuscated;
this.bold = bold;
this.strikethrough = strikethrough;
this.underlined = underlined;
this.italic = italic;
this.clickEvent = clickEvent;
this.hoverEvent = hoverEvent;
this.insertion = insertion;
}
/**
* Gets the color.
*
* @return the color
*/
public @Nullable TextColor color() {
return this.color;
}
/**
* Sets the color if there isn't one set already.
*
* @param color the color
* @return this builder
*/
public @NonNull Style colorIfAbsent(final @Nullable TextColor color) {
if(this.color == null) {
return this.color(color);
}
return this;
}
/**
* Sets the color.
*
* @param color the color
* @return a style
*/
public @NonNull Style color(final @Nullable TextColor color) {
return new Style(color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
}
/**
* Tests if this style has a decoration.
*
* @param decoration the decoration
* @return {@code true} if this style has the decoration, {@code false} if this
* style does not have the decoration
*/
public boolean hasDecoration(final @NonNull TextDecoration decoration) {
return this.decoration(decoration) == TextDecoration.State.TRUE;
}
/**
* Gets the state of a decoration on this style.
*
* @param decoration the decoration
* @return {@link TextDecoration.State#TRUE} if this style has the decoration,
* {@link TextDecoration.State#FALSE} if this style does not have the decoration,
* and {@link TextDecoration.State#NOT_SET} if not set
*/
public TextDecoration.@NonNull State decoration(final @NonNull TextDecoration decoration) {
switch(decoration) {
case BOLD: return this.bold;
case ITALIC: return this.italic;
case UNDERLINED: return this.underlined;
case STRIKETHROUGH: return this.strikethrough;
case OBFUSCATED: return this.obfuscated;
default: throw new IllegalArgumentException(String.format("unknown decoration '%s'", decoration));
}
}
/**
* Sets the state of a decoration on this style.
*
* @param decoration the decoration
* @param flag {@code true} if this style should have the decoration, {@code false} if
* this style should not have the decoration
* @return a style
*/
public @NonNull Style decoration(final @NonNull TextDecoration decoration, final boolean flag) {
return this.decoration(decoration, TextDecoration.State.byBoolean(flag));
}
/**
* Sets the value of a decoration on this style.
*
* @param decoration the decoration
* @param state {@link TextDecoration.State#TRUE} if this style should have the
* decoration, {@link TextDecoration.State#FALSE} if this style should not
* have the decoration, and {@link TextDecoration.State#NOT_SET} if the decoration
* should not have a set value
* @return a style
*/
public @NonNull Style decoration(final @NonNull TextDecoration decoration, final TextDecoration.@NonNull State state) {
switch(decoration) {
case BOLD: return new Style(this.color, this.obfuscated, requireNonNull(state, "flag"), this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
case ITALIC: return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, requireNonNull(state, "flag"), this.clickEvent, this.hoverEvent, this.insertion);
case UNDERLINED: return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, requireNonNull(state, "flag"), this.italic, this.clickEvent, this.hoverEvent, this.insertion);
case STRIKETHROUGH: return new Style(this.color, this.obfuscated, this.bold, requireNonNull(state, "flag"), this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
case OBFUSCATED: return new Style(this.color, requireNonNull(state, "flag"), this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
default: throw new IllegalArgumentException(String.format("unknown decoration '%s'", decoration));
}
}
/**
* Gets a set of decorations this style has.
*
* @return a set of decorations this style has
*/
public @NonNull Set<TextDecoration> decorations() {
return this.decorations(Collections.emptySet());
}
/**
* Gets a set of decorations this style has.
*
* @param defaultValues a set of default values
* @return a set of decorations this style has
*/
public @NonNull Set<TextDecoration> decorations(final @NonNull Set<TextDecoration> defaultValues) {
final Set<TextDecoration> decorations = EnumSet.noneOf(TextDecoration.class);
for(final TextDecoration decoration : DECORATIONS) {
final TextDecoration.State value = this.decoration(decoration);
if(value == TextDecoration.State.TRUE || (value == TextDecoration.State.NOT_SET && defaultValues.contains(decoration))) {
decorations.add(decoration);
}
}
return decorations;
}
/**
* Gets the click event.
*
* @return the click event
*/
public @Nullable ClickEvent clickEvent() {
return this.clickEvent;
}
/**
* Sets the click event.
*
* @param event the click event
* @return a style
*/
public @NonNull Style clickEvent(final @Nullable ClickEvent event) {
return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, event, this.hoverEvent, this.insertion);
}
/**
* Gets the hover event.
*
* @return the hover event
*/
public @Nullable HoverEvent hoverEvent() {
return this.hoverEvent;
}
/**
* Sets the hover event.
*
* @param event the hover event
* @return a style
*/
public @NonNull Style hoverEvent(final @Nullable HoverEvent event) {
return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, event, this.insertion);
}
/**
* Gets the string to be inserted when this style is shift-clicked.
*
* @return the insertion string
*/
public @Nullable String insertion() {
return this.insertion;
}
/**
* Sets the string to be inserted when this style is shift-clicked.
*
* @param insertion the insertion string
* @return a style
*/
public @NonNull Style insertion(final @Nullable String insertion) {
return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, insertion);
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @return a style
*/
public @NonNull Style merge(final @NonNull Style that) {
return this.merge(that, Merge.all());
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @param merges the parts to merge
* @return a style
*/
public @NonNull Style merge(final @NonNull Style that, final @NonNull Merge@NonNull... merges) {
return this.merge(that, Merge.of(merges));
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @param merges the parts to merge
* @return a style
*/
public @NonNull Style merge(final @NonNull Style that, final @NonNull Set<Merge> merges) {
if(this.isEmpty()) {
return that;
}
if(merges.isEmpty() || that.isEmpty()) {
return this;
}
final Builder builder = this.toBuilder();
builder.merge(that, merges);
return builder.build();
}
/**
* Merges the color from another style into this style.
*
* @param that the other style
* @return a style
* @deprecated use {@link #merge(Style, Set)} instead
*/
@Deprecated
public @NonNull Style mergeColor(final @NonNull Style that) {
return this.merge(that, Collections.singleton(Merge.COLOR));
}
/**
* Merges the decorations from another style into this style.
*
* @param that the other style
* @return a style
* @deprecated use {@link #merge(Style, Set)} instead
*/
@Deprecated
public @NonNull Style mergeDecorations(final @NonNull Style that) {
return this.merge(that, Collections.singleton(Merge.DECORATIONS));
}
/**
* Merges the events from another style into this style.
*
* @param that the other style
* @return a style
* @deprecated use {@link #merge(Style, Set)} instead
*/
@Deprecated
public @NonNull Style mergeEvents(final @NonNull Style that) {
return this.merge(that, Collections.singleton(Merge.EVENTS));
}
/**
* Tests if this style is empty.
*
* @return {@code true} if this style is empty, {@code false} if this
* style is not empty
*/
public boolean isEmpty() {
return this.color == null
&& this.obfuscated == TextDecoration.State.NOT_SET
&& this.bold == TextDecoration.State.NOT_SET
&& this.strikethrough == TextDecoration.State.NOT_SET
&& this.underlined == TextDecoration.State.NOT_SET
&& this.italic == TextDecoration.State.NOT_SET
&& this.clickEvent == null
&& this.hoverEvent == null
&& this.insertion == null;
}
/**
* Create a builder from this style.
*
* @return a builder
*/
public @NonNull Builder toBuilder() {
return new Builder(this);
}
@Override
public @NonNull String toString() {
final Map<String, Object> builder = new LinkedHashMap<>();
builder.put("color", this.color);
builder.put("obfuscated", this.obfuscated);
builder.put("bold", this.bold);
builder.put("strikethrough", this.strikethrough);
builder.put("underlined", this.underlined);
builder.put("italic", this.italic);
builder.put("clickEvent", this.clickEvent);
builder.put("hoverEvent", this.hoverEvent);
builder.put("insertion", this.insertion);
return ToStringer.toString(this, builder);
}
@Override
public boolean equals(final @Nullable Object other) {
if(this == other) return true;
if(!(other instanceof Style)) return false;
final Style that = (Style) other;
return this.color == that.color
&& Objects.equals(this.obfuscated, that.obfuscated)
&& Objects.equals(this.bold, that.bold)
&& Objects.equals(this.strikethrough, that.strikethrough)
&& Objects.equals(this.underlined, that.underlined)
&& Objects.equals(this.italic, that.italic)
&& Objects.equals(this.clickEvent, that.clickEvent)
&& Objects.equals(this.hoverEvent, that.hoverEvent)
&& Objects.equals(this.insertion, that.insertion);
}
@Override
public int hashCode() {
return Objects.hash(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
}
/**
* A merge choice.
*/
public enum Merge {
COLOR,
DECORATIONS,
EVENTS,
INSERTION;
static final Set<Merge> ALL = of(Merge.values());
/**
* Gets a merge set of all merge types.
*
* @return a merge set
*/
public static @NonNull Set<Merge> all() {
return ALL;
}
/**
* Creates a merge set.
*
* @param merges the merge parts
* @return a merge set
*/
public static @NonNull Set<Merge> of(final Merge... merges) {
final Set<Merge> set = EnumSet.noneOf(Merge.class);
Collections.addAll(set, merges);
return Collections.unmodifiableSet(set);
}
}
/**
* A style builder.
*/
public static class Builder {
/**
* The color.
*/
private @Nullable TextColor color;
/**
* If this component should have the {@link TextDecoration#OBFUSCATED obfuscated} decoration.
*/
private TextDecoration.State obfuscated = TextDecoration.State.NOT_SET;
/**
* If this component should have the {@link TextDecoration#BOLD bold} decoration.
*/
private TextDecoration.State bold = TextDecoration.State.NOT_SET;
/**
* If this component should have the {@link TextDecoration#STRIKETHROUGH strikethrough} decoration.
*/
private TextDecoration.State strikethrough = TextDecoration.State.NOT_SET;
/**
* If this component should have the {@link TextDecoration#UNDERLINED underlined} decoration.
*/
private TextDecoration.State underlined = TextDecoration.State.NOT_SET;
/**
* If this component should have the {@link TextDecoration#ITALIC italic} decoration.
*/
private TextDecoration.State italic = TextDecoration.State.NOT_SET;
/**
* The click event to apply to this component.
*/
private @Nullable ClickEvent clickEvent;
/**
* The hover event to apply to this component.
*/
private @Nullable HoverEvent hoverEvent;
/**
* The string to insert when this component is shift-clicked in chat.
*/
private @Nullable String insertion;
protected Builder() {
}
protected Builder(final @NonNull Component component) {
this(component.style());
}
protected Builder(final @NonNull Style style) {
this.color = style.color();
this.obfuscated = style.decoration(TextDecoration.OBFUSCATED);
this.bold = style.decoration(TextDecoration.BOLD);
this.strikethrough = style.decoration(TextDecoration.STRIKETHROUGH);
this.underlined = style.decoration(TextDecoration.UNDERLINED);
this.italic = style.decoration(TextDecoration.ITALIC);
this.clickEvent = style.clickEvent();
this.hoverEvent = style.hoverEvent();
this.insertion = style.insertion();
}
/**
* Sets the color.
*
* @param color the color
* @return this builder
*/
public @NonNull Builder color(final @Nullable TextColor color) {
this.color = color;
return this;
}
/**
* Sets the color if there isn't one set already.
*
* @param color the color
* @return this builder
*/
public @NonNull Builder colorIfAbsent(final @Nullable TextColor color) {
if(this.color == null) {
this.color = color;
}
return this;
}
/**
* Sets the state of a decoration on this style.
*
* @param decoration the decoration
* @param flag {@code true} if this style should have the decoration, {@code false} if
* this style should not have the decoration
* @return a style
*/
public @NonNull Builder decoration(final @NonNull TextDecoration decoration, final boolean flag) {
return this.decoration(decoration, TextDecoration.State.byBoolean(flag));
}
/**
* Sets the value of a decoration.
*
* @param decoration the decoration
* @param state {@link TextDecoration.State#TRUE} if this component should have the
* decoration, {@link TextDecoration.State#FALSE} if this component should not
* have the decoration, and {@link TextDecoration.State#NOT_SET} if the decoration
* should not have a set value
* @return this builder
*/
public @NonNull Builder decoration(final @NonNull TextDecoration decoration, final TextDecoration.@NonNull State state) {
switch(decoration) {
case BOLD: this.bold = requireNonNull(state, "flag"); return this;
case ITALIC: this.italic = requireNonNull(state, "flag"); return this;
case UNDERLINED: this.underlined = requireNonNull(state, "flag"); return this;
case STRIKETHROUGH: this.strikethrough = requireNonNull(state, "flag"); return this;
case OBFUSCATED: this.obfuscated = requireNonNull(state, "flag"); return this;
default: throw new IllegalArgumentException(String.format("unknown decoration '%s'", decoration));
}
}
/**
* Sets the click event.
*
* @param event the click event
* @return this builder
*/
public @NonNull Builder clickEvent(final @Nullable ClickEvent event) {
this.clickEvent = event;
return this;
}
/**
* Sets the hover event.
*
* @param event the hover event
* @return this builder
*/
public @NonNull Builder hoverEvent(final @Nullable HoverEvent event) {
this.hoverEvent = event;
return this;
}
/**
* Sets the string to be inserted.
*
* @param insertion the insertion string
* @return this builder
*/
public @NonNull Builder insertion(final @Nullable String insertion) {
this.insertion = insertion;
return this;
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @return a style
*/
public @NonNull Builder merge(final @NonNull Style that) {
return this.merge(that, Merge.all());
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @param merges the parts to merge
* @return a style
*/
public @NonNull Builder merge(final @NonNull Style that, final @NonNull Merge @NonNull ... merges) {
return this.merge(that, Merge.of(merges));
}
/**
* Merges from another style into this style.
*
* @param that the other style
* @param merges the parts to merge
* @return a style
*/
public @NonNull Builder merge(final @NonNull Style that, final @NonNull Set<Merge> merges) {
if(merges.contains(Merge.COLOR)) {
final TextColor color = that.color();
if(color != null) this.color(color);
}
if(merges.contains(Merge.DECORATIONS)) {
for(final TextDecoration decoration : DECORATIONS) {
final TextDecoration.State state = that.decoration(decoration);
if(state != TextDecoration.State.NOT_SET) this.decoration(decoration, state);
}
}
if(merges.contains(Merge.EVENTS)) {
final ClickEvent clickEvent = that.clickEvent();
if(clickEvent != null) this.clickEvent(clickEvent);
final HoverEvent hoverEvent = that.hoverEvent();
if(hoverEvent != null) this.hoverEvent(hoverEvent);
}
if(merges.contains(Merge.INSERTION)) {
final String insertion = that.insertion();
if(insertion != null) this.insertion(insertion);
}
return this;
}
/**
* Builds the style.
*
* @return the style
*/
public @NonNull Style build() {
return new Style(this.color, this.obfuscated, this.bold, this.strikethrough, this.underlined, this.italic, this.clickEvent, this.hoverEvent, this.insertion);
}
}
}
|
package org.postgresql.jdbc2;
// IMPORTANT NOTE: This file implements the JDBC 2 version of the driver.
// If you make any modifications to this file, you must make sure that the
// changes are also made (if relevent) to the related JDBC 1 class in the
// org.postgresql.jdbc1 package.
import java.sql.*;
import java.util.*;
import org.postgresql.Field;
/**
* This class provides information about the database as a whole.
*
* <p>Many of the methods here return lists of information in ResultSets. You
* can use the normal ResultSet methods such as getString and getInt to
* retrieve the data from these ResultSets. If a given form of metadata is
* not available, these methods should throw a SQLException.
*
* <p>Some of these methods take arguments that are String patterns. These
* arguments all have names such as fooPattern. Within a pattern String,
* "%" means match any substring of 0 or more characters, and "_" means
* match any one character. Only metadata entries matching the search
* pattern are returned. if a search pattern argument is set to a null
* ref, it means that argument's criteria should be dropped from the
* search.
*
* <p>A SQLException will be throws if a driver does not support a meta
* data method. In the case of methods that return a ResultSet, either
* a ResultSet (which may be empty) is returned or a SQLException is
* thrown.
*
* @see java.sql.DatabaseMetaData
*/
public class DatabaseMetaData implements java.sql.DatabaseMetaData
{
Connection connection; // The connection association
// These define various OID's. Hopefully they will stay constant.
static final int iVarcharOid = 1043; // OID for varchar
static final int iBoolOid = 16; // OID for bool
static final int iInt2Oid = 21; // OID for int2
static final int iInt4Oid = 23; // OID for int4
static final int VARHDRSZ = 4; // length for int4
// This is a default value for remarks
private static final byte defaultRemarks[]="no remarks".getBytes();
public DatabaseMetaData(Connection conn)
{
this.connection = conn;
}
/**
* Can all the procedures returned by getProcedures be called
* by the current user?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean allProceduresAreCallable() throws SQLException
{
return true; // For now...
}
/**
* Can all the tables returned by getTable be SELECTed by
* the current user?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean allTablesAreSelectable() throws SQLException
{
return true; // For now...
}
/**
* What is the URL for this database?
*
* @return the url or null if it cannott be generated
* @exception SQLException if a database access error occurs
*/
public String getURL() throws SQLException
{
return connection.getURL();
}
/**
* What is our user name as known to the database?
*
* @return our database user name
* @exception SQLException if a database access error occurs
*/
public String getUserName() throws SQLException
{
return connection.getUserName();
}
/**
* Is the database in read-only mode?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean isReadOnly() throws SQLException
{
return connection.isReadOnly();
}
/**
* Are NULL values sorted high?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean nullsAreSortedHigh() throws SQLException
{
return false;
}
/**
* Are NULL values sorted low?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean nullsAreSortedLow() throws SQLException
{
return false;
}
/**
* Are NULL values sorted at the start regardless of sort order?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean nullsAreSortedAtStart() throws SQLException
{
return false;
}
/**
* Are NULL values sorted at the end regardless of sort order?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean nullsAreSortedAtEnd() throws SQLException
{
return true;
}
/**
* What is the name of this database product - we hope that it is
* PostgreSQL, so we return that explicitly.
*
* @return the database product name
* @exception SQLException if a database access error occurs
*/
public String getDatabaseProductName() throws SQLException
{
return "PostgreSQL";
}
/**
* What is the version of this database product.
*
* @return the database version
* @exception SQLException if a database access error occurs
*/
public String getDatabaseProductVersion() throws SQLException
{
java.sql.ResultSet resultSet = connection.ExecSQL("select version()");
resultSet.next();
StringTokenizer versionParts = new StringTokenizer(resultSet.getString(1));
versionParts.nextToken(); /* "PostgreSQL" */
String versionNumber = versionParts.nextToken(); /* "X.Y.Z" */
return versionNumber;
}
/**
* What is the name of this JDBC driver? If we don't know this
* we are doing something wrong!
*
* @return the JDBC driver name
* @exception SQLException why?
*/
public String getDriverName() throws SQLException
{
return "PostgreSQL Native Driver";
}
/**
* What is the version string of this JDBC driver? Again, this is
* static.
*
* @return the JDBC driver name.
* @exception SQLException why?
*/
public String getDriverVersion() throws SQLException
{
return connection.this_driver.getVersion();
}
/**
* What is this JDBC driver's major version number?
*
* @return the JDBC driver major version
*/
public int getDriverMajorVersion()
{
return connection.this_driver.getMajorVersion();
}
/**
* What is this JDBC driver's minor version number?
*
* @return the JDBC driver minor version
*/
public int getDriverMinorVersion()
{
return connection.this_driver.getMinorVersion();
}
/**
* Does the database store tables in a local file? No - it
* stores them in a file on the server.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean usesLocalFiles() throws SQLException
{
return false;
}
/**
* Does the database use a file for each table? Well, not really,
* since it doesnt use local files.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean usesLocalFilePerTable() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case unquoted SQL identifiers
* as case sensitive and as a result store them in mixed case?
* A JDBC-Compliant driver will always return false.
*
* <p>Predicament - what do they mean by "SQL identifiers" - if it
* means the names of the tables and columns, then the answers
* given below are correct - otherwise I don't know.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsMixedCaseIdentifiers() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case unquoted SQL identifiers as
* case insensitive and store them in upper case?
*
* @return true if so
*/
public boolean storesUpperCaseIdentifiers() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case unquoted SQL identifiers as
* case insensitive and store them in lower case?
*
* @return true if so
*/
public boolean storesLowerCaseIdentifiers() throws SQLException
{
return true;
}
/**
* Does the database treat mixed case unquoted SQL identifiers as
* case insensitive and store them in mixed case?
*
* @return true if so
*/
public boolean storesMixedCaseIdentifiers() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case quoted SQL identifiers as
* case sensitive and as a result store them in mixed case? A
* JDBC compliant driver will always return true.
*
* <p>Predicament - what do they mean by "SQL identifiers" - if it
* means the names of the tables and columns, then the answers
* given below are correct - otherwise I don't know.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException
{
return true;
}
/**
* Does the database treat mixed case quoted SQL identifiers as
* case insensitive and store them in upper case?
*
* @return true if so
*/
public boolean storesUpperCaseQuotedIdentifiers() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case quoted SQL identifiers as case
* insensitive and store them in lower case?
*
* @return true if so
*/
public boolean storesLowerCaseQuotedIdentifiers() throws SQLException
{
return false;
}
/**
* Does the database treat mixed case quoted SQL identifiers as case
* insensitive and store them in mixed case?
*
* @return true if so
*/
public boolean storesMixedCaseQuotedIdentifiers() throws SQLException
{
return false;
}
/**
* What is the string used to quote SQL identifiers? This returns
* a space if identifier quoting isn't supported. A JDBC Compliant
* driver will always use a double quote character.
*
* <p>If an SQL identifier is a table name, column name, etc. then
* we do not support it.
*
* @return the quoting string
* @exception SQLException if a database access error occurs
*/
public String getIdentifierQuoteString() throws SQLException
{
return "\"";
}
public String getSQLKeywords() throws SQLException
{
return "abort,acl,add,aggregate,append,archive,arch_store,backward,binary,change,cluster,copy,database,delimiters,do,extend,explain,forward,heavy,index,inherits,isnull,light,listen,load,merge,nothing,notify,notnull,oids,purge,rename,replace,retrieve,returns,rule,recipe,setof,stdin,stdout,store,vacuum,verbose,version";
}
public String getNumericFunctions() throws SQLException
{
// XXX-Not Implemented
return "";
}
public String getStringFunctions() throws SQLException
{
// XXX-Not Implemented
return "";
}
public String getSystemFunctions() throws SQLException
{
// XXX-Not Implemented
return "";
}
public String getTimeDateFunctions() throws SQLException
{
// XXX-Not Implemented
return "";
}
/**
* This is the string that can be used to escape '_' and '%' in
* a search string pattern style catalog search parameters
*
* @return the string used to escape wildcard characters
* @exception SQLException if a database access error occurs
*/
public String getSearchStringEscape() throws SQLException
{
return "\\";
}
/**
* Get all the "extra" characters that can be used in unquoted
* identifier names (those beyond a-zA-Z0-9 and _)
*
* <p>From the file src/backend/parser/scan.l, an identifier is
* {letter}{letter_or_digit} which makes it just those listed
* above.
*
* @return a string containing the extra characters
* @exception SQLException if a database access error occurs
*/
public String getExtraNameCharacters() throws SQLException
{
return "";
}
/**
* Is "ALTER TABLE" with an add column supported?
* Yes for PostgreSQL 6.1
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsAlterTableWithAddColumn() throws SQLException
{
return true;
}
/**
* Is "ALTER TABLE" with a drop column supported?
* Peter 10/10/2000 This was set to true, but 7.1devel doesn't support it!
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsAlterTableWithDropColumn() throws SQLException
{
return false;
}
public boolean supportsColumnAliasing() throws SQLException
{
return true;
}
/**
* Are concatenations between NULL and non-NULL values NULL? A
* JDBC Compliant driver always returns true
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean nullPlusNonNullIsNull() throws SQLException
{
return true;
}
public boolean supportsConvert() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsConvert(int fromType, int toType) throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsTableCorrelationNames() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsDifferentTableCorrelationNames() throws SQLException
{
// XXX-Not Implemented
return false;
}
/**
* Are expressions in "ORDER BY" lists supported?
*
* <br>e.g. select * from t order by a + b;
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsExpressionsInOrderBy() throws SQLException
{
return true;
}
/**
* Can an "ORDER BY" clause use columns not in the SELECT?
* I checked it, and you can't.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsOrderByUnrelated() throws SQLException
{
return false;
}
/**
* Is some form of "GROUP BY" clause supported?
* I checked it, and yes it is.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsGroupBy() throws SQLException
{
return true;
}
/**
* Can a "GROUP BY" clause use columns not in the SELECT?
* I checked it - it seems to allow it
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsGroupByUnrelated() throws SQLException
{
return true;
}
/**
* Can a "GROUP BY" clause add columns not in the SELECT provided
* it specifies all the columns in the SELECT? Does anyone actually
* understand what they mean here?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsGroupByBeyondSelect() throws SQLException
{
return true; // For now...
}
/**
* Is the escape character in "LIKE" clauses supported? A
* JDBC compliant driver always returns true.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsLikeEscapeClause() throws SQLException
{
return true;
}
/**
* Are multiple ResultSets from a single execute supported?
* Well, I implemented it, but I dont think this is possible from
* the back ends point of view.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsMultipleResultSets() throws SQLException
{
return false;
}
/**
* Can we have multiple transactions open at once (on different
* connections?)
* I guess we can have, since Im relying on it.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsMultipleTransactions() throws SQLException
{
return true;
}
/**
* Can columns be defined as non-nullable. A JDBC Compliant driver
* always returns true.
*
* <p>This changed from false to true in v6.2 of the driver, as this
* support was added to the backend.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsNonNullableColumns() throws SQLException
{
return true;
}
public boolean supportsMinimumSQLGrammar() throws SQLException
{
return true;
}
/**
* Does this driver support the Core ODBC SQL grammar. We need
* SQL-92 conformance for this.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCoreSQLGrammar() throws SQLException
{
return false;
}
/**
* Does this driver support the Extended (Level 2) ODBC SQL
* grammar. We don't conform to the Core (Level 1), so we can't
* conform to the Extended SQL Grammar.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsExtendedSQLGrammar() throws SQLException
{
return false;
}
/**
* Does this driver support the ANSI-92 entry level SQL grammar?
* All JDBC Compliant drivers must return true. I think we have
* to support outer joins for this to be true.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsANSI92EntryLevelSQL() throws SQLException
{
return false;
}
/**
* Does this driver support the ANSI-92 intermediate level SQL
* grammar? Anyone who does not support Entry level cannot support
* Intermediate level.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsANSI92IntermediateSQL() throws SQLException
{
return false;
}
/**
* Does this driver support the ANSI-92 full SQL grammar?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsANSI92FullSQL() throws SQLException
{
return false;
}
/**
* Is the SQL Integrity Enhancement Facility supported?
* I haven't seen this mentioned anywhere, so I guess not
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsIntegrityEnhancementFacility() throws SQLException
{
return false;
}
/**
* Is some form of outer join supported? From my knowledge, nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsOuterJoins() throws SQLException
{
return true; // yes 7.1 does
}
/**
* Are full nexted outer joins supported? Well, we dont support any
* form of outer join, so this is no as well
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsFullOuterJoins() throws SQLException
{
return true; // yes in 7.1
}
/**
* Is there limited support for outer joins? (This will be true if
* supportFullOuterJoins is true)
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsLimitedOuterJoins() throws SQLException
{
return true; // yes in 7.1
}
/**
* What is the database vendor's preferred term for "schema" - well,
* we do not provide support for schemas, so lets just use that
* term.
*
* @return the vendor term
* @exception SQLException if a database access error occurs
*/
public String getSchemaTerm() throws SQLException
{
return "Schema";
}
/**
* What is the database vendor's preferred term for "procedure" -
* I kind of like "Procedure" myself.
*
* @return the vendor term
* @exception SQLException if a database access error occurs
*/
public String getProcedureTerm() throws SQLException
{
return "Procedure";
}
/**
* What is the database vendor's preferred term for "catalog"? -
* we dont have a preferred term, so just use Catalog
*
* @return the vendor term
* @exception SQLException if a database access error occurs
*/
public String getCatalogTerm() throws SQLException
{
return "Catalog";
}
/**
* Does a catalog appear at the start of a qualified table name?
* (Otherwise it appears at the end).
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean isCatalogAtStart() throws SQLException
{
return false;
}
/**
* What is the Catalog separator. Hmmm....well, I kind of like
* a period (so we get catalog.table definitions). - I don't think
* PostgreSQL supports catalogs anyhow, so it makes no difference.
*
* @return the catalog separator string
* @exception SQLException if a database access error occurs
*/
public String getCatalogSeparator() throws SQLException
{
// PM Sep 29 97 - changed from "." as we don't support catalogs.
return "";
}
/**
* Can a schema name be used in a data manipulation statement? Nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsSchemasInDataManipulation() throws SQLException
{
return false;
}
/**
* Can a schema name be used in a procedure call statement? Nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsSchemasInProcedureCalls() throws SQLException
{
return false;
}
/**
* Can a schema be used in a table definition statement? Nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsSchemasInTableDefinitions() throws SQLException
{
return false;
}
/**
* Can a schema name be used in an index definition statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsSchemasInIndexDefinitions() throws SQLException
{
return false;
}
/**
* Can a schema name be used in a privilege definition statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException
{
return false;
}
/**
* Can a catalog name be used in a data manipulation statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCatalogsInDataManipulation() throws SQLException
{
return false;
}
/**
* Can a catalog name be used in a procedure call statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCatalogsInProcedureCalls() throws SQLException
{
return false;
}
/**
* Can a catalog name be used in a table definition statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCatalogsInTableDefinitions() throws SQLException
{
return false;
}
/**
* Can a catalog name be used in an index definition?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCatalogsInIndexDefinitions() throws SQLException
{
return false;
}
/**
* Can a catalog name be used in a privilege definition statement?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException
{
return false;
}
/**
* We support cursors for gets only it seems. I dont see a method
* to get a positioned delete.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsPositionedDelete() throws SQLException
{
return false; // For now...
}
/**
* Is positioned UPDATE supported?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsPositionedUpdate() throws SQLException
{
return false; // For now...
}
public boolean supportsSelectForUpdate() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsStoredProcedures() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsSubqueriesInComparisons() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsSubqueriesInExists() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsSubqueriesInIns() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsSubqueriesInQuantifieds() throws SQLException
{
// XXX-Not Implemented
return false;
}
public boolean supportsCorrelatedSubqueries() throws SQLException
{
// XXX-Not Implemented
return false;
}
/**
* Is SQL UNION supported? Nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsUnion() throws SQLException
{
return true;
}
/**
* Is SQL UNION ALL supported? Nope.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsUnionAll() throws SQLException
{
return false;
}
/**
* In PostgreSQL, Cursors are only open within transactions.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsOpenCursorsAcrossCommit() throws SQLException
{
return false;
}
/**
* Do we support open cursors across multiple transactions?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsOpenCursorsAcrossRollback() throws SQLException
{
return false;
}
/**
* Can statements remain open across commits? They may, but
* this driver cannot guarentee that. In further reflection.
* we are talking a Statement object here, so the answer is
* yes, since the Statement is only a vehicle to ExecSQL()
*
* @return true if they always remain open; false otherwise
* @exception SQLException if a database access error occurs
*/
public boolean supportsOpenStatementsAcrossCommit() throws SQLException
{
return true;
}
/**
* Can statements remain open across rollbacks? They may, but
* this driver cannot guarentee that. In further contemplation,
* we are talking a Statement object here, so the answer is yes,
* since the Statement is only a vehicle to ExecSQL() in Connection
*
* @return true if they always remain open; false otherwise
* @exception SQLException if a database access error occurs
*/
public boolean supportsOpenStatementsAcrossRollback() throws SQLException
{
return true;
}
/**
* How many hex characters can you have in an inline binary literal
*
* @return the max literal length
* @exception SQLException if a database access error occurs
*/
public int getMaxBinaryLiteralLength() throws SQLException
{
return 0; // For now...
}
/**
* What is the maximum length for a character literal
* I suppose it is 8190 (8192 - 2 for the quotes)
*
* @return the max literal length
* @exception SQLException if a database access error occurs
*/
public int getMaxCharLiteralLength() throws SQLException
{
return 65535;
}
/**
* Whats the limit on column name length. The description of
* pg_class would say '32' (length of pg_class.relname) - we
* should probably do a query for this....but....
*
* @return the maximum column name length
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnNameLength() throws SQLException
{
return 32;
}
/**
* What is the maximum number of columns in a "GROUP BY" clause?
*
* @return the max number of columns
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnsInGroupBy() throws SQLException
{
return getMaxColumnsInTable();
}
/**
* What's the maximum number of columns allowed in an index?
* 6.0 only allowed one column, but 6.1 introduced multi-column
* indices, so, theoretically, its all of them.
*
* @return max number of columns
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnsInIndex() throws SQLException
{
return getMaxColumnsInTable();
}
/**
* What's the maximum number of columns in an "ORDER BY clause?
* Theoretically, all of them!
*
* @return the max columns
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnsInOrderBy() throws SQLException
{
return getMaxColumnsInTable();
}
/**
* What is the maximum number of columns in a "SELECT" list?
* Theoretically, all of them!
*
* @return the max columns
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnsInSelect() throws SQLException
{
return getMaxColumnsInTable();
}
/**
* What is the maximum number of columns in a table? From the
* create_table(l) manual page...
*
* <p>"The new class is created as a heap with no initial data. A
* class can have no more than 1600 attributes (realistically,
* this is limited by the fact that tuple sizes must be less than
* 8192 bytes)..."
*
* @return the max columns
* @exception SQLException if a database access error occurs
*/
public int getMaxColumnsInTable() throws SQLException
{
return 1600;
}
/**
* How many active connection can we have at a time to this
* database? Well, since it depends on postmaster, which just
* does a listen() followed by an accept() and fork(), its
* basically very high. Unless the system runs out of processes,
* it can be 65535 (the number of aux. ports on a TCP/IP system).
* I will return 8192 since that is what even the largest system
* can realistically handle,
*
* @return the maximum number of connections
* @exception SQLException if a database access error occurs
*/
public int getMaxConnections() throws SQLException
{
return 8192;
}
public int getMaxCursorNameLength() throws SQLException
{
return 32;
}
/**
* What is the maximum length of an index (in bytes)? Now, does
* the spec. mean name of an index (in which case its 32, the
* same as a table) or does it mean length of an index element
* (in which case its 8192, the size of a row) or does it mean
* the number of rows it can access (in which case it 2^32 -
* a 4 byte OID number)? I think its the length of an index
* element, personally, so Im setting it to 8192.
*
* @return max index length in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxIndexLength() throws SQLException
{
return 65535;
}
public int getMaxSchemaNameLength() throws SQLException
{
// XXX-Not Implemented
return 0;
}
/**
* What is the maximum length of a procedure name?
* (length of pg_proc.proname used) - again, I really
* should do a query here to get it.
*
* @return the max name length in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxProcedureNameLength() throws SQLException
{
return 32;
}
public int getMaxCatalogNameLength() throws SQLException
{
// XXX-Not Implemented
return 0;
}
/**
* What is the maximum length of a single row? (not including
* blobs). 65535 is defined in PostgreSQL.
*
* @return max row size in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxRowSize() throws SQLException
{
return 65535;
}
/**
* Did getMaxRowSize() include LONGVARCHAR and LONGVARBINARY
* blobs? We don't handle blobs yet
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean doesMaxRowSizeIncludeBlobs() throws SQLException
{
return false;
}
/**
* What is the maximum length of a SQL statement?
*
* @return max length in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxStatementLength() throws SQLException
{
return 65535;
}
/**
* How many active statements can we have open at one time to
* this database? Basically, since each Statement downloads
* the results as the query is executed, we can have many. However,
* we can only really have one statement per connection going
* at once (since they are executed serially) - so we return
* one.
*
* @return the maximum
* @exception SQLException if a database access error occurs
*/
public int getMaxStatements() throws SQLException
{
return 1;
}
/**
* What is the maximum length of a table name? This was found
* from pg_class.relname length
*
* @return max name length in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxTableNameLength() throws SQLException
{
return 32;
}
/**
* What is the maximum number of tables that can be specified
* in a SELECT? Theoretically, this is the same number as the
* number of tables allowable. In practice tho, it is much smaller
* since the number of tables is limited by the statement, we
* return 1024 here - this is just a number I came up with (being
* the number of tables roughly of three characters each that you
* can fit inside a 8192 character buffer with comma separators).
*
* @return the maximum
* @exception SQLException if a database access error occurs
*/
public int getMaxTablesInSelect() throws SQLException
{
return 1024;
}
/**
* What is the maximum length of a user name? Well, we generally
* use UNIX like user names in PostgreSQL, so I think this would
* be 8. However, showing the schema for pg_user shows a length
* for username of 32.
*
* @return the max name length in bytes
* @exception SQLException if a database access error occurs
*/
public int getMaxUserNameLength() throws SQLException
{
return 32;
}
/**
* What is the database's default transaction isolation level? We
* do not support this, so all transactions are SERIALIZABLE.
*
* @return the default isolation level
* @exception SQLException if a database access error occurs
* @see Connection
*/
public int getDefaultTransactionIsolation() throws SQLException
{
return Connection.TRANSACTION_READ_COMMITTED;
}
/**
* Are transactions supported? If not, commit and rollback are noops
* and the isolation level is TRANSACTION_NONE. We do support
* transactions.
*
* @return true if transactions are supported
* @exception SQLException if a database access error occurs
*/
public boolean supportsTransactions() throws SQLException
{
return true;
}
/**
* Does the database support the given transaction isolation level?
* We only support TRANSACTION_SERIALIZABLE and TRANSACTION_READ_COMMITTED
*
* @param level the values are defined in java.sql.Connection
* @return true if so
* @exception SQLException if a database access error occurs
* @see Connection
*/
public boolean supportsTransactionIsolationLevel(int level) throws SQLException
{
if (level == Connection.TRANSACTION_SERIALIZABLE ||
level == Connection.TRANSACTION_READ_COMMITTED)
return true;
else
return false;
}
/**
* Are both data definition and data manipulation transactions
* supported? I checked it, and could not do a CREATE TABLE
* within a transaction, so I am assuming that we don't
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException
{
return false;
}
/**
* Are only data manipulation statements withing a transaction
* supported?
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean supportsDataManipulationTransactionsOnly() throws SQLException
{
return true;
}
/**
* Does a data definition statement within a transaction force
* the transaction to commit? I think this means something like:
*
* <p><pre>
* CREATE TABLE T (A INT);
* INSERT INTO T (A) VALUES (2);
* BEGIN;
* UPDATE T SET A = A + 1;
* CREATE TABLE X (A INT);
* SELECT A FROM T INTO X;
* COMMIT;
* </pre><p>
*
* does the CREATE TABLE call cause a commit? The answer is no.
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean dataDefinitionCausesTransactionCommit() throws SQLException
{
return false;
}
/**
* Is a data definition statement within a transaction ignored?
* It seems to be (from experiment in previous method)
*
* @return true if so
* @exception SQLException if a database access error occurs
*/
public boolean dataDefinitionIgnoredInTransactions() throws SQLException
{
return true;
}
/**
* Get a description of stored procedures available in a catalog
*
* <p>Only procedure descriptions matching the schema and procedure
* name criteria are returned. They are ordered by PROCEDURE_SCHEM
* and PROCEDURE_NAME
*
* <p>Each procedure description has the following columns:
* <ol>
* <li><b>PROCEDURE_CAT</b> String => procedure catalog (may be null)
* <li><b>PROCEDURE_SCHEM</b> String => procedure schema (may be null)
* <li><b>PROCEDURE_NAME</b> String => procedure name
* <li><b>Field 4</b> reserved (make it null)
* <li><b>Field 5</b> reserved (make it null)
* <li><b>Field 6</b> reserved (make it null)
* <li><b>REMARKS</b> String => explanatory comment on the procedure
* <li><b>PROCEDURE_TYPE</b> short => kind of procedure
* <ul>
* <li> procedureResultUnknown - May return a result
* <li> procedureNoResult - Does not return a result
* <li> procedureReturnsResult - Returns a result
* </ul>
* </ol>
*
* @param catalog - a catalog name; "" retrieves those without a
* catalog; null means drop catalog name from criteria
* @param schemaParrern - a schema name pattern; "" retrieves those
* without a schema - we ignore this parameter
* @param procedureNamePattern - a procedure name pattern
* @return ResultSet - each row is a procedure description
* @exception SQLException if a database access error occurs
*/
public java.sql.ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException
{
// the field descriptors for the new ResultSet
Field f[] = new Field[8];
java.sql.ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
byte remarks[] = defaultRemarks;
f[0] = new Field(connection, "PROCEDURE_CAT", iVarcharOid, 32);
f[1] = new Field(connection, "PROCEDURE_SCHEM", iVarcharOid, 32);
f[2] = new Field(connection, "PROCEDURE_NAME", iVarcharOid, 32);
f[3] = f[4] = f[5] = null; // reserved, must be null for now
f[6] = new Field(connection, "REMARKS", iVarcharOid, 8192);
f[7] = new Field(connection, "PROCEDURE_TYPE", iInt2Oid, 2);
// If the pattern is null, then set it to the default
if(procedureNamePattern==null)
procedureNamePattern="%";
r = connection.ExecSQL("select proname, proretset from pg_proc where proname like '"+procedureNamePattern.toLowerCase()+"' order by proname");
while (r.next())
{
byte[][] tuple = new byte[8][0];
tuple[0] = null; // Catalog name
tuple[1] = null; // Schema name
tuple[2] = r.getBytes(1); // Procedure name
tuple[3] = tuple[4] = tuple[5] = null; // Reserved
tuple[6] = remarks; // Remarks
if (r.getBoolean(2))
tuple[7] = Integer.toString(java.sql.DatabaseMetaData.procedureReturnsResult).getBytes();
else
tuple[7] = Integer.toString(java.sql.DatabaseMetaData.procedureNoResult).getBytes();
v.addElement(tuple);
}
return new ResultSet(connection, f, v, "OK", 1);
}
/**
* Get a description of a catalog's stored procedure parameters
* and result columns.
*
* <p>Only descriptions matching the schema, procedure and parameter
* name criteria are returned. They are ordered by PROCEDURE_SCHEM
* and PROCEDURE_NAME. Within this, the return value, if any, is
* first. Next are the parameter descriptions in call order. The
* column descriptions follow in column number order.
*
* <p>Each row in the ResultSet is a parameter description or column
* description with the following fields:
* <ol>
* <li><b>PROCEDURE_CAT</b> String => procedure catalog (may be null)
* <li><b>PROCEDURE_SCHE</b>M String => procedure schema (may be null)
* <li><b>PROCEDURE_NAME</b> String => procedure name
* <li><b>COLUMN_NAME</b> String => column/parameter name
* <li><b>COLUMN_TYPE</b> Short => kind of column/parameter:
* <ul><li>procedureColumnUnknown - nobody knows
* <li>procedureColumnIn - IN parameter
* <li>procedureColumnInOut - INOUT parameter
* <li>procedureColumnOut - OUT parameter
* <li>procedureColumnReturn - procedure return value
* <li>procedureColumnResult - result column in ResultSet
* </ul>
* <li><b>DATA_TYPE</b> short => SQL type from java.sql.Types
* <li><b>TYPE_NAME</b> String => SQL type name
* <li><b>PRECISION</b> int => precision
* <li><b>LENGTH</b> int => length in bytes of data
* <li><b>SCALE</b> short => scale
* <li><b>RADIX</b> short => radix
* <li><b>NULLABLE</b> short => can it contain NULL?
* <ul><li>procedureNoNulls - does not allow NULL values
* <li>procedureNullable - allows NULL values
* <li>procedureNullableUnknown - nullability unknown
* <li><b>REMARKS</b> String => comment describing parameter/column
* </ol>
* @param catalog This is ignored in org.postgresql, advise this is set to null
* @param schemaPattern This is ignored in org.postgresql, advise this is set to null
* @param procedureNamePattern a procedure name pattern
* @param columnNamePattern a column name pattern
* @return each row is a stored procedure parameter or column description
* @exception SQLException if a database-access error occurs
* @see #getSearchStringEscape
*/
// Implementation note: This is required for Borland's JBuilder to work
public java.sql.ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) throws SQLException
{
if(procedureNamePattern==null)
procedureNamePattern="%";
if(columnNamePattern==null)
columnNamePattern="%";
// for now, this returns an empty result set.
Field f[] = new Field[13];
ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "PROCEDURE_CAT", iVarcharOid, 32);
f[1] = new Field(connection, "PROCEDURE_SCHEM", iVarcharOid, 32);
f[2] = new Field(connection, "PROCEDURE_NAME", iVarcharOid, 32);
f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32);
f[4] = new Field(connection, "COLUMN_TYPE", iInt2Oid, 2);
f[5] = new Field(connection, "DATA_TYPE", iInt2Oid, 2);
f[6] = new Field(connection, "TYPE_NAME", iVarcharOid, 32);
f[7] = new Field(connection, "PRECISION", iInt4Oid, 4);
f[8] = new Field(connection, "LENGTH", iInt4Oid, 4);
f[9] = new Field(connection, "SCALE", iInt2Oid, 2);
f[10] = new Field(connection, "RADIX", iInt2Oid, 2);
f[11] = new Field(connection, "NULLABLE", iInt2Oid, 2);
f[12] = new Field(connection, "REMARKS", iVarcharOid, 32);
// add query loop here
return new ResultSet(connection, f, v, "OK", 1);
}
/**
* Get a description of tables available in a catalog.
*
* <p>Only table descriptions matching the catalog, schema, table
* name and type criteria are returned. They are ordered by
* TABLE_TYPE, TABLE_SCHEM and TABLE_NAME.
*
* <p>Each table description has the following columns:
*
* <ol>
* <li><b>TABLE_CAT</b> String => table catalog (may be null)
* <li><b>TABLE_SCHEM</b> String => table schema (may be null)
* <li><b>TABLE_NAME</b> String => table name
* <li><b>TABLE_TYPE</b> String => table type. Typical types are "TABLE",
* "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL
* TEMPORARY", "ALIAS", "SYNONYM".
* <li><b>REMARKS</b> String => explanatory comment on the table
* </ol>
*
* <p>The valid values for the types parameter are:
* "TABLE", "INDEX", "SEQUENCE", "SYSTEM TABLE" and "SYSTEM INDEX"
*
* @param catalog a catalog name; For org.postgresql, this is ignored, and
* should be set to null
* @param schemaPattern a schema name pattern; For org.postgresql, this is ignored, and
* should be set to null
* @param tableNamePattern a table name pattern. For all tables this should be "%"
* @param types a list of table types to include; null returns
* all types
* @return each row is a table description
* @exception SQLException if a database-access error occurs.
*/
public java.sql.ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String types[]) throws SQLException
{
// Handle default value for types
if(types==null)
types = defaultTableTypes;
if(tableNamePattern==null)
tableNamePattern="%";
// the field descriptors for the new ResultSet
Field f[] = new Field[5];
java.sql.ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32);
f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32);
f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32);
f[3] = new Field(connection, "TABLE_TYPE", iVarcharOid, 32);
f[4] = new Field(connection, "REMARKS", iVarcharOid, 32);
// Now form the query
StringBuffer sql = new StringBuffer("select relname,oid,relkind from pg_class where (");
boolean notFirst=false;
for(int i=0;i<types.length;i++) {
for(int j=0;j<getTableTypes.length;j++)
if(getTableTypes[j][0].equals(types[i])) {
if(notFirst)
sql.append(" or ");
sql.append(getTableTypes[j][1]);
notFirst=true;
}
}
// Added by Stefan Andreasen <stefan@linux.kapow.dk>
// Now take the pattern into account
sql.append(") and relname like '");
sql.append(tableNamePattern.toLowerCase());
sql.append("'");
// Now run the query
r = connection.ExecSQL(sql.toString());
byte remarks[];
while (r.next())
{
byte[][] tuple = new byte[5][0];
// Fetch the description for the table (if any)
java.sql.ResultSet dr = connection.ExecSQL("select description from pg_description where objoid="+r.getInt(2));
if(((org.postgresql.ResultSet)dr).getTupleCount()==1) {
dr.next();
remarks = dr.getBytes(1);
} else
remarks = defaultRemarks;
dr.close();
String relKind;
switch (r.getBytes(3)[0]) {
case 'r':
relKind = "TABLE";
break;
case 'i':
relKind = "INDEX";
break;
case 'S':
relKind = "SEQUENCE";
break;
default:
relKind = null;
}
tuple[0] = null; // Catalog name
tuple[1] = null; // Schema name
tuple[2] = r.getBytes(1); // Table name
tuple[3] = relKind.getBytes(); // Table type
tuple[4] = remarks; // Remarks
v.addElement(tuple);
}
r.close();
return new ResultSet(connection, f, v, "OK", 1);
}
// This array contains the valid values for the types argument
// in getTables().
// Each supported type consists of it's name, and the sql where
// clause to retrieve that value.
// IMPORTANT: the query must be enclosed in ( )
private static final String getTableTypes[][] = {
{"TABLE", "(relkind='r' and relhasrules='f' and relname !~ '^pg_' and relname !~ '^xinv')"},
{"VIEW", "(relkind='v' and relname !~ '^pg_')"},
{"INDEX", "(relkind='i' and relname !~ '^pg_')"},
{"SEQUENCE", "(relkind='S' and relname !~ '^pg_')"},
{"SYSTEM TABLE", "(relkind='r' and relname ~ '^pg_')"},
{"SYSTEM INDEX", "(relkind='i' and relname ~ '^pg_')"}
};
// These are the default tables, used when NULL is passed to getTables
// The choice of these provide the same behaviour as psql's \d
private static final String defaultTableTypes[] = {
"TABLE","VIEW","INDEX","SEQUENCE"
};
/**
* Get the schema names available in this database. The results
* are ordered by schema name.
*
* <P>The schema column is:
* <OL>
* <LI><B>TABLE_SCHEM</B> String => schema name
* </OL>
*
* @return ResultSet each row has a single String column that is a
* schema name
*/
public java.sql.ResultSet getSchemas() throws SQLException
{
// We don't use schemas, so we simply return a single schema name "".
Field f[] = new Field[1];
Vector v = new Vector();
byte[][] tuple = new byte[1][0];
f[0] = new Field(connection,"TABLE_SCHEM",iVarcharOid,32);
tuple[0] = "".getBytes();
v.addElement(tuple);
return new ResultSet(connection,f,v,"OK",1);
}
/**
* Get the catalog names available in this database. The results
* are ordered by catalog name.
*
* <P>The catalog column is:
* <OL>
* <LI><B>TABLE_CAT</B> String => catalog name
* </OL>
*
* @return ResultSet each row has a single String column that is a
* catalog name
*/
public java.sql.ResultSet getCatalogs() throws SQLException
{
// We don't use catalogs, so we simply return a single catalog name "".
Field f[] = new Field[1];
Vector v = new Vector();
byte[][] tuple = new byte[1][0];
f[0] = new Field(connection,"TABLE_CAT",iVarcharOid,32);
tuple[0] = "".getBytes();
v.addElement(tuple);
return new ResultSet(connection,f,v,"OK",1);
}
/**
* Get the table types available in this database. The results
* are ordered by table type.
*
* <P>The table type is:
* <OL>
* <LI><B>TABLE_TYPE</B> String => table type. Typical types are "TABLE",
* "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
* "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
* </OL>
*
* @return ResultSet each row has a single String column that is a
* table type
*/
public java.sql.ResultSet getTableTypes() throws SQLException
{
Field f[] = new Field[1];
Vector v = new Vector();
f[0] = new Field(connection,new String("TABLE_TYPE"),iVarcharOid,32);
for(int i=0;i<getTableTypes.length;i++) {
byte[][] tuple = new byte[2][0];
tuple[0] = getTableTypes[i][0].getBytes();
v.addElement(tuple);
}
return new ResultSet(connection,f,v,"OK",1);
}
/**
* Get a description of table columns available in a catalog.
*
* <P>Only column descriptions matching the catalog, schema, table
* and column name criteria are returned. They are ordered by
* TABLE_SCHEM, TABLE_NAME and ORDINAL_POSITION.
*
* <P>Each column description has the following columns:
* <OL>
* <LI><B>TABLE_CAT</B> String => table catalog (may be null)
* <LI><B>TABLE_SCHEM</B> String => table schema (may be null)
* <LI><B>TABLE_NAME</B> String => table name
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>DATA_TYPE</B> short => SQL type from java.sql.Types
* <LI><B>TYPE_NAME</B> String => Data source dependent type name
* <LI><B>COLUMN_SIZE</B> int => column size. For char or date
* types this is the maximum number of characters, for numeric or
* decimal types this is precision.
* <LI><B>BUFFER_LENGTH</B> is not used.
* <LI><B>DECIMAL_DIGITS</B> int => the number of fractional digits
* <LI><B>NUM_PREC_RADIX</B> int => Radix (typically either 10 or 2)
* <LI><B>NULLABLE</B> int => is NULL allowed?
* <UL>
* <LI> columnNoNulls - might not allow NULL values
* <LI> columnNullable - definitely allows NULL values
* <LI> columnNullableUnknown - nullability unknown
* </UL>
* <LI><B>REMARKS</B> String => comment describing column (may be null)
* <LI><B>COLUMN_DEF</B> String => default value (may be null)
* <LI><B>SQL_DATA_TYPE</B> int => unused
* <LI><B>SQL_DATETIME_SUB</B> int => unused
* <LI><B>CHAR_OCTET_LENGTH</B> int => for char types the
* maximum number of bytes in the column
* <LI><B>ORDINAL_POSITION</B> int => index of column in table
* (starting at 1)
* <LI><B>IS_NULLABLE</B> String => "NO" means column definitely
* does not allow NULL values; "YES" means the column might
* allow NULL values. An empty string means nobody knows.
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schemaPattern a schema name pattern; "" retrieves those
* without a schema
* @param tableNamePattern a table name pattern
* @param columnNamePattern a column name pattern
* @return ResultSet each row is a column description
* @see #getSearchStringEscape
*/
public java.sql.ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException
{
// the field descriptors for the new ResultSet
Field f[] = new Field[18];
java.sql.ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32);
f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32);
f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32);
f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32);
f[4] = new Field(connection, "DATA_TYPE", iInt2Oid, 2);
f[5] = new Field(connection, "TYPE_NAME", iVarcharOid, 32);
f[6] = new Field(connection, "COLUMN_SIZE", iInt4Oid, 4);
f[7] = new Field(connection, "BUFFER_LENGTH", iVarcharOid, 32);
f[8] = new Field(connection, "DECIMAL_DIGITS", iInt4Oid, 4);
f[9] = new Field(connection, "NUM_PREC_RADIX", iInt4Oid, 4);
f[10] = new Field(connection, "NULLABLE", iInt4Oid, 4);
f[11] = new Field(connection, "REMARKS", iVarcharOid, 32);
f[12] = new Field(connection, "COLUMN_DEF", iVarcharOid, 32);
f[13] = new Field(connection, "SQL_DATA_TYPE", iInt4Oid, 4);
f[14] = new Field(connection, "SQL_DATETIME_SUB", iInt4Oid, 4);
f[15] = new Field(connection, "CHAR_OCTET_LENGTH", iVarcharOid, 32);
f[16] = new Field(connection, "ORDINAL_POSITION", iInt4Oid,4);
f[17] = new Field(connection, "IS_NULLABLE", iVarcharOid, 32);
// Added by Stefan Andreasen <stefan@linux.kapow.dk>
// If the pattern are null then set them to %
if (tableNamePattern == null) tableNamePattern="%";
if (columnNamePattern == null) columnNamePattern="%";
// Now form the query
r = connection.ExecSQL("select a.oid,c.relname,a.attname,a.atttypid,a.attnum,a.attnotnull,a.attlen,a.atttypmod from pg_class c, pg_attribute a where a.attrelid=c.oid and c.relname like '"+tableNamePattern.toLowerCase()+"' and a.attname like '"+columnNamePattern.toLowerCase()+"' and a.attnum>0 order by c.relname,a.attnum");
byte remarks[];
while(r.next()) {
byte[][] tuple = new byte[18][0];
// Fetch the description for the table (if any)
java.sql.ResultSet dr = connection.ExecSQL("select description from pg_description where objoid="+r.getInt(1));
if(((org.postgresql.ResultSet)dr).getTupleCount()==1) {
dr.next();
tuple[11] = dr.getBytes(1);
} else
tuple[11] = defaultRemarks;
dr.close();
tuple[0] = "".getBytes(); // Catalog name
tuple[1] = "".getBytes(); // Schema name
tuple[2] = r.getBytes(2); // Table name
tuple[3] = r.getBytes(3); // Column name
dr = connection.ExecSQL("select typname from pg_type where oid = "+r.getString(4));
dr.next();
String typname=dr.getString(1);
dr.close();
tuple[4] = Integer.toString(Field.getSQLType(typname)).getBytes(); // Data type
tuple[5] = typname.getBytes(); // Type name
// Column size
// Looking at the psql source,
// I think the length of a varchar as specified when the table was created
// should be extracted from atttypmod which contains this length + sizeof(int32)
if (typname.equals("bpchar") || typname.equals("varchar")) {
int atttypmod = r.getInt(8);
tuple[6] = Integer.toString(atttypmod != -1 ? atttypmod - VARHDRSZ : 0).getBytes();
} else
tuple[6] = r.getBytes(7);
tuple[7] = null; // Buffer length
tuple[8] = "0".getBytes(); // Decimal Digits - how to get this?
tuple[9] = "10".getBytes(); // Num Prec Radix - assume decimal
// tuple[10] is below
// tuple[11] is above
tuple[12] = null; // column default
tuple[13] = null; // sql data type (unused)
tuple[14] = null; // sql datetime sub (unused)
tuple[15] = tuple[6]; // char octet length
tuple[16] = r.getBytes(5); // ordinal position
String nullFlag = r.getString(6);
tuple[10] = Integer.toString(nullFlag.equals("f")?java.sql.DatabaseMetaData.columnNullable:java.sql.DatabaseMetaData.columnNoNulls).getBytes(); // Nullable
tuple[17] = (nullFlag.equals("f")?"YES":"NO").getBytes(); // is nullable
v.addElement(tuple);
}
r.close();
return new ResultSet(connection, f, v, "OK", 1);
}
/**
* Get a description of the access rights for a table's columns.
*
* <P>Only privileges matching the column name criteria are
* returned. They are ordered by COLUMN_NAME and PRIVILEGE.
*
* <P>Each privilige description has the following columns:
* <OL>
* <LI><B>TABLE_CAT</B> String => table catalog (may be null)
* <LI><B>TABLE_SCHEM</B> String => table schema (may be null)
* <LI><B>TABLE_NAME</B> String => table name
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>GRANTOR</B> => grantor of access (may be null)
* <LI><B>GRANTEE</B> String => grantee of access
* <LI><B>PRIVILEGE</B> String => name of access (SELECT,
* INSERT, UPDATE, REFRENCES, ...)
* <LI><B>IS_GRANTABLE</B> String => "YES" if grantee is permitted
* to grant to others; "NO" if not; null if unknown
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name; "" retrieves those without a schema
* @param table a table name
* @param columnNamePattern a column name pattern
* @return ResultSet each row is a column privilege description
* @see #getSearchStringEscape
*/
public java.sql.ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) throws SQLException
{
Field f[] = new Field[8];
Vector v = new Vector();
if(table==null)
table="%";
if(columnNamePattern==null)
columnNamePattern="%";
else
columnNamePattern=columnNamePattern.toLowerCase();
f[0] = new Field(connection,"TABLE_CAT",iVarcharOid,32);
f[1] = new Field(connection,"TABLE_SCHEM",iVarcharOid,32);
f[2] = new Field(connection,"TABLE_NAME",iVarcharOid,32);
f[3] = new Field(connection,"COLUMN_NAME",iVarcharOid,32);
f[4] = new Field(connection,"GRANTOR",iVarcharOid,32);
f[5] = new Field(connection,"GRANTEE",iVarcharOid,32);
f[6] = new Field(connection,"PRIVILEGE",iVarcharOid,32);
f[7] = new Field(connection,"IS_GRANTABLE",iVarcharOid,32);
// This is taken direct from the psql source
java.sql.ResultSet r = connection.ExecSQL("SELECT relname, relacl FROM pg_class, pg_user WHERE ( relkind = 'r' OR relkind = 'i') and relname !~ '^pg_' and relname !~ '^xin[vx][0-9]+' and usesysid = relowner and relname like '"+table.toLowerCase()+"' ORDER BY relname");
while(r.next()) {
byte[][] tuple = new byte[8][0];
tuple[0] = tuple[1]= "".getBytes();
DriverManager.println("relname=\""+r.getString(1)+"\" relacl=\""+r.getString(2)+"\"");
// For now, don't add to the result as relacl needs to be processed.
//v.addElement(tuple);
}
return new ResultSet(connection,f,v,"OK",1);
}
/**
* Get a description of the access rights for each table available
* in a catalog.
*
* <P>Only privileges matching the schema and table name
* criteria are returned. They are ordered by TABLE_SCHEM,
* TABLE_NAME, and PRIVILEGE.
*
* <P>Each privilige description has the following columns:
* <OL>
* <LI><B>TABLE_CAT</B> String => table catalog (may be null)
* <LI><B>TABLE_SCHEM</B> String => table schema (may be null)
* <LI><B>TABLE_NAME</B> String => table name
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>GRANTOR</B> => grantor of access (may be null)
* <LI><B>GRANTEE</B> String => grantee of access
* <LI><B>PRIVILEGE</B> String => name of access (SELECT,
* INSERT, UPDATE, REFRENCES, ...)
* <LI><B>IS_GRANTABLE</B> String => "YES" if grantee is permitted
* to grant to others; "NO" if not; null if unknown
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schemaPattern a schema name pattern; "" retrieves those
* without a schema
* @param tableNamePattern a table name pattern
* @return ResultSet each row is a table privilege description
* @see #getSearchStringEscape
*/
public java.sql.ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) throws SQLException
{
// XXX-Not Implemented
return null;
}
/**
* Get a description of a table's optimal set of columns that
* uniquely identifies a row. They are ordered by SCOPE.
*
* <P>Each column description has the following columns:
* <OL>
* <LI><B>SCOPE</B> short => actual scope of result
* <UL>
* <LI> bestRowTemporary - very temporary, while using row
* <LI> bestRowTransaction - valid for remainder of current transaction
* <LI> bestRowSession - valid for remainder of current session
* </UL>
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types
* <LI><B>TYPE_NAME</B> String => Data source dependent type name
* <LI><B>COLUMN_SIZE</B> int => precision
* <LI><B>BUFFER_LENGTH</B> int => not used
* <LI><B>DECIMAL_DIGITS</B> short => scale
* <LI><B>PSEUDO_COLUMN</B> short => is this a pseudo column
* like an Oracle ROWID
* <UL>
* <LI> bestRowUnknown - may or may not be pseudo column
* <LI> bestRowNotPseudo - is NOT a pseudo column
* <LI> bestRowPseudo - is a pseudo column
* </UL>
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name; "" retrieves those without a schema
* @param table a table name
* @param scope the scope of interest; use same values as SCOPE
* @param nullable include columns that are nullable?
* @return ResultSet each row is a column description
*/
// Implementation note: This is required for Borland's JBuilder to work
public java.sql.ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) throws SQLException
{
// for now, this returns an empty result set.
Field f[] = new Field[8];
ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "SCOPE", iInt2Oid, 2);
f[1] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32);
f[2] = new Field(connection, "DATA_TYPE", iInt2Oid, 2);
f[3] = new Field(connection, "TYPE_NAME", iVarcharOid, 32);
f[4] = new Field(connection, "COLUMN_SIZE", iInt4Oid, 4);
f[5] = new Field(connection, "BUFFER_LENGTH", iInt4Oid, 4);
f[6] = new Field(connection, "DECIMAL_DIGITS", iInt2Oid, 2);
f[7] = new Field(connection, "PSEUDO_COLUMN", iInt2Oid, 2);
return new ResultSet(connection, f, v, "OK", 1);
}
/**
* Get a description of a table's columns that are automatically
* updated when any value in a row is updated. They are
* unordered.
*
* <P>Each column description has the following columns:
* <OL>
* <LI><B>SCOPE</B> short => is not used
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types
* <LI><B>TYPE_NAME</B> String => Data source dependent type name
* <LI><B>COLUMN_SIZE</B> int => precision
* <LI><B>BUFFER_LENGTH</B> int => length of column value in bytes
* <LI><B>DECIMAL_DIGITS</B> short => scale
* <LI><B>PSEUDO_COLUMN</B> short => is this a pseudo column
* like an Oracle ROWID
* <UL>
* <LI> versionColumnUnknown - may or may not be pseudo column
* <LI> versionColumnNotPseudo - is NOT a pseudo column
* <LI> versionColumnPseudo - is a pseudo column
* </UL>
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name; "" retrieves those without a schema
* @param table a table name
* @return ResultSet each row is a column description
*/
public java.sql.ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException
{
// XXX-Not Implemented
return null;
}
/**
* Get a description of a table's primary key columns. They
* are ordered by COLUMN_NAME.
*
* <P>Each column description has the following columns:
* <OL>
* <LI><B>TABLE_CAT</B> String => table catalog (may be null)
* <LI><B>TABLE_SCHEM</B> String => table schema (may be null)
* <LI><B>TABLE_NAME</B> String => table name
* <LI><B>COLUMN_NAME</B> String => column name
* <LI><B>KEY_SEQ</B> short => sequence number within primary key
* <LI><B>PK_NAME</B> String => primary key name (may be null)
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name pattern; "" retrieves those
* without a schema
* @param table a table name
* @return ResultSet each row is a primary key column description
*/
public java.sql.ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException
{
return connection.createStatement().executeQuery("SELECT " +
"'' as TABLE_CAT," +
"'' AS TABLE_SCHEM," +
"bc.relname AS TABLE_NAME," +
"a.attname AS COLUMN_NAME," +
"a.attnum as KEY_SEQ,"+
"ic.relname as PK_NAME " +
" FROM pg_class bc, pg_class ic, pg_index i, pg_attribute a" +
" WHERE bc.relkind = 'r' " + // -- not indices
" and upper(bc.relname) = upper('"+table+"')" +
" and i.indrelid = bc.oid" +
" and i.indexrelid = ic.oid" +
" and ic.oid = a.attrelid" +
" and i.indisprimary='t' " +
" ORDER BY table_name, pk_name, key_seq"
);
}
private Vector importLoop(java.sql.ResultSet keyRelation) throws SQLException {
String s,s2;
String origTable=null, primTable=new String(""), schema;
int i;
Vector v;
s=keyRelation.getString(1);
s2=s;
System.out.println(s);
v=new Vector();
for (i=0;;i++) {
s=s.substring(s.indexOf("\\000")+4);
if (s.compareTo("")==0) {
System.out.println();
break;
}
s2=s.substring(0,s.indexOf("\\000"));
switch (i) {
case 0:
origTable=s2;
break;
case 1:
primTable=s2;
break;
case 2:
schema=s2;
break;
default:
v.add(s2);
}
}
java.sql.ResultSet rstmp=connection.ExecSQL("select * from "+origTable+" where 1=0");
java.sql.ResultSetMetaData origCols=rstmp.getMetaData();
String stmp;
Vector tuples=new Vector();
byte tuple[][];
// the foreign keys are only on even positions in the Vector.
for (i=0;i<v.size();i+=2) {
stmp=(String)v.elementAt(i);
for (int j=1;j<=origCols.getColumnCount();j++) {
if (stmp.compareTo(origCols.getColumnName(j))==0) {
tuple=new byte[14][0];
for (int k=0;k<14;k++)
tuple[k]=null;
//PKTABLE_NAME
tuple[2]=primTable.getBytes();
//PKTABLE_COLUMN
stmp=(String)v.elementAt(i+1);
tuple[3]=stmp.getBytes();
//FKTABLE_NAME
tuple[6]=origTable.getBytes();
//FKCOLUMN_NAME
tuple[7]=origCols.getColumnName(j).getBytes();
//KEY_SEQ
tuple[8]=Integer.toString(j).getBytes();
tuples.add(tuple);
System.out.println(origCols.getColumnName(j)+
": "+j+" -> "+primTable+": "+
(String)v.elementAt(i+1));
break;
}
}
}
return tuples;
}
/**
* Get a description of the primary key columns that are
* referenced by a table's foreign key columns (the primary keys
* imported by a table). They are ordered by PKTABLE_CAT,
* PKTABLE_SCHEM, PKTABLE_NAME, and KEY_SEQ.
*
* <P>Each primary key column description has the following columns:
* <OL>
* <LI><B>PKTABLE_CAT</B> String => primary key table catalog
* being imported (may be null)
* <LI><B>PKTABLE_SCHEM</B> String => primary key table schema
* being imported (may be null)
* <LI><B>PKTABLE_NAME</B> String => primary key table name
* being imported
* <LI><B>PKCOLUMN_NAME</B> String => primary key column name
* being imported
* <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null)
* <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null)
* <LI><B>FKTABLE_NAME</B> String => foreign key table name
* <LI><B>FKCOLUMN_NAME</B> String => foreign key column name
* <LI><B>KEY_SEQ</B> short => sequence number within foreign key
* <LI><B>UPDATE_RULE</B> short => What happens to
* foreign key when primary is updated:
* <UL>
* <LI> importedKeyCascade - change imported key to agree
* with primary key update
* <LI> importedKeyRestrict - do not allow update of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been updated
* </UL>
* <LI><B>DELETE_RULE</B> short => What happens to
* the foreign key when primary is deleted.
* <UL>
* <LI> importedKeyCascade - delete rows that import a deleted key
* <LI> importedKeyRestrict - do not allow delete of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been deleted
* </UL>
* <LI><B>FK_NAME</B> String => foreign key name (may be null)
* <LI><B>PK_NAME</B> String => primary key name (may be null)
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name pattern; "" retrieves those
* without a schema
* @param table a table name
* @return ResultSet each row is a primary key column description
* @see #getExportedKeys
*/
public java.sql.ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException
{
// Added by Ola Sundell <ola@miranda.org>
// FIXME: error checking galore!
java.sql.ResultSet rsret;
Field f[]=new Field[14];
byte tuple[][];
f[0]=new Field(connection, "PKTABLE_CAT", iVarcharOid, 32);
f[1]=new Field(connection, "PKTABLE_SCHEM", iVarcharOid, 32);
f[2]=new Field(connection, "PKTABLE_NAME", iVarcharOid, 32);
f[3]=new Field(connection, "PKCOLUMN_NAME", iVarcharOid, 32);
f[4]=new Field(connection, "FKTABLE_CAT", iVarcharOid, 32);
f[5]=new Field(connection, "FKTABLE_SCHEM", iVarcharOid, 32);
f[6]=new Field(connection, "FKTABLE_NAME", iVarcharOid, 32);
f[7]=new Field(connection, "FKCOLUMN_NAME", iVarcharOid, 32);
f[8]=new Field(connection, "KEY_SEQ", iInt2Oid, 2);
f[9]=new Field(connection, "UPDATE_RULE", iInt2Oid, 2);
f[10]=new Field(connection, "DELETE_RULE", iInt2Oid, 2);
f[11]=new Field(connection, "FK_NAME", iVarcharOid, 32);
f[12]=new Field(connection, "PK_NAME", iVarcharOid, 32);
f[13]=new Field(connection, "DEFERRABILITY", iInt2Oid, 2);
java.sql.ResultSet rs=connection.ExecSQL("select t.tgargs "+
"from pg_class as c, pg_trigger as t "+
"where c.relname like '"+table+"' and c.relfilenode=t.tgrelid");
Vector tuples=new Vector();
while (rs.next()) {
tuples.addAll(importLoop(rs));
}
rsret=new ResultSet(connection, f, tuples, "OK", 1);
return rsret;
}
/**
* Get a description of a foreign key columns that reference a
* table's primary key columns (the foreign keys exported by a
* table). They are ordered by FKTABLE_CAT, FKTABLE_SCHEM,
* FKTABLE_NAME, and KEY_SEQ.
*
* <P>Each foreign key column description has the following columns:
* <OL>
* <LI><B>PKTABLE_CAT</B> String => primary key table catalog (may be null)
* <LI><B>PKTABLE_SCHEM</B> String => primary key table schema (may be null)
* <LI><B>PKTABLE_NAME</B> String => primary key table name
* <LI><B>PKCOLUMN_NAME</B> String => primary key column name
* <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null)
* being exported (may be null)
* <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null)
* being exported (may be null)
* <LI><B>FKTABLE_NAME</B> String => foreign key table name
* being exported
* <LI><B>FKCOLUMN_NAME</B> String => foreign key column name
* being exported
* <LI><B>KEY_SEQ</B> short => sequence number within foreign key
* <LI><B>UPDATE_RULE</B> short => What happens to
* foreign key when primary is updated:
* <UL>
* <LI> importedKeyCascade - change imported key to agree
* with primary key update
* <LI> importedKeyRestrict - do not allow update of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been updated
* </UL>
* <LI><B>DELETE_RULE</B> short => What happens to
* the foreign key when primary is deleted.
* <UL>
* <LI> importedKeyCascade - delete rows that import a deleted key
* <LI> importedKeyRestrict - do not allow delete of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been deleted
* </UL>
* <LI><B>FK_NAME</B> String => foreign key identifier (may be null)
* <LI><B>PK_NAME</B> String => primary key identifier (may be null)
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name pattern; "" retrieves those
* without a schema
* @param table a table name
* @return ResultSet each row is a foreign key column description
* @see #getImportedKeys
*/
public java.sql.ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException
{
// XXX-Not Implemented
return null;
}
/**
* Get a description of the foreign key columns in the foreign key
* table that reference the primary key columns of the primary key
* table (describe how one table imports another's key.) This
* should normally return a single foreign key/primary key pair
* (most tables only import a foreign key from a table once.) They
* are ordered by FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, and
* KEY_SEQ.
*
* <P>Each foreign key column description has the following columns:
* <OL>
* <LI><B>PKTABLE_CAT</B> String => primary key table catalog (may be null)
* <LI><B>PKTABLE_SCHEM</B> String => primary key table schema (may be null)
* <LI><B>PKTABLE_NAME</B> String => primary key table name
* <LI><B>PKCOLUMN_NAME</B> String => primary key column name
* <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null)
* being exported (may be null)
* <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null)
* being exported (may be null)
* <LI><B>FKTABLE_NAME</B> String => foreign key table name
* being exported
* <LI><B>FKCOLUMN_NAME</B> String => foreign key column name
* being exported
* <LI><B>KEY_SEQ</B> short => sequence number within foreign key
* <LI><B>UPDATE_RULE</B> short => What happens to
* foreign key when primary is updated:
* <UL>
* <LI> importedKeyCascade - change imported key to agree
* with primary key update
* <LI> importedKeyRestrict - do not allow update of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been updated
* </UL>
* <LI><B>DELETE_RULE</B> short => What happens to
* the foreign key when primary is deleted.
* <UL>
* <LI> importedKeyCascade - delete rows that import a deleted key
* <LI> importedKeyRestrict - do not allow delete of primary
* key if it has been imported
* <LI> importedKeySetNull - change imported key to NULL if
* its primary key has been deleted
* </UL>
* <LI><B>FK_NAME</B> String => foreign key identifier (may be null)
* <LI><B>PK_NAME</B> String => primary key identifier (may be null)
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name pattern; "" retrieves those
* without a schema
* @param table a table name
* @return ResultSet each row is a foreign key column description
* @see #getImportedKeys
*/
public java.sql.ResultSet getCrossReference(String primaryCatalog, String primarySchema, String primaryTable, String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException
{
// XXX-Not Implemented
return null;
}
/**
* Get a description of all the standard SQL types supported by
* this database. They are ordered by DATA_TYPE and then by how
* closely the data type maps to the corresponding JDBC SQL type.
*
* <P>Each type description has the following columns:
* <OL>
* <LI><B>TYPE_NAME</B> String => Type name
* <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types
* <LI><B>PRECISION</B> int => maximum precision
* <LI><B>LITERAL_PREFIX</B> String => prefix used to quote a literal
* (may be null)
* <LI><B>LITERAL_SUFFIX</B> String => suffix used to quote a literal
(may be null)
* <LI><B>CREATE_PARAMS</B> String => parameters used in creating
* the type (may be null)
* <LI><B>NULLABLE</B> short => can you use NULL for this type?
* <UL>
* <LI> typeNoNulls - does not allow NULL values
* <LI> typeNullable - allows NULL values
* <LI> typeNullableUnknown - nullability unknown
* </UL>
* <LI><B>CASE_SENSITIVE</B> boolean=> is it case sensitive?
* <LI><B>SEARCHABLE</B> short => can you use "WHERE" based on this type:
* <UL>
* <LI> typePredNone - No support
* <LI> typePredChar - Only supported with WHERE .. LIKE
* <LI> typePredBasic - Supported except for WHERE .. LIKE
* <LI> typeSearchable - Supported for all WHERE ..
* </UL>
* <LI><B>UNSIGNED_ATTRIBUTE</B> boolean => is it unsigned?
* <LI><B>FIXED_PREC_SCALE</B> boolean => can it be a money value?
* <LI><B>AUTO_INCREMENT</B> boolean => can it be used for an
* auto-increment value?
* <LI><B>LOCAL_TYPE_NAME</B> String => localized version of type name
* (may be null)
* <LI><B>MINIMUM_SCALE</B> short => minimum scale supported
* <LI><B>MAXIMUM_SCALE</B> short => maximum scale supported
* <LI><B>SQL_DATA_TYPE</B> int => unused
* <LI><B>SQL_DATETIME_SUB</B> int => unused
* <LI><B>NUM_PREC_RADIX</B> int => usually 2 or 10
* </OL>
*
* @return ResultSet each row is a SQL type description
*/
public java.sql.ResultSet getTypeInfo() throws SQLException
{
java.sql.ResultSet rs = connection.ExecSQL("select typname from pg_type");
if(rs!=null) {
Field f[] = new Field[18];
ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "TYPE_NAME", iVarcharOid, 32);
f[1] = new Field(connection, "DATA_TYPE", iInt2Oid, 2);
f[2] = new Field(connection, "PRECISION", iInt4Oid, 4);
f[3] = new Field(connection, "LITERAL_PREFIX", iVarcharOid, 32);
f[4] = new Field(connection, "LITERAL_SUFFIX", iVarcharOid, 32);
f[5] = new Field(connection, "CREATE_PARAMS", iVarcharOid, 32);
f[6] = new Field(connection, "NULLABLE", iInt2Oid, 2);
f[7] = new Field(connection, "CASE_SENSITIVE", iBoolOid, 1);
f[8] = new Field(connection, "SEARCHABLE", iInt2Oid, 2);
f[9] = new Field(connection, "UNSIGNED_ATTRIBUTE", iBoolOid, 1);
f[10] = new Field(connection, "FIXED_PREC_SCALE", iBoolOid, 1);
f[11] = new Field(connection, "AUTO_INCREMENT", iBoolOid, 1);
f[12] = new Field(connection, "LOCAL_TYPE_NAME", iVarcharOid, 32);
f[13] = new Field(connection, "MINIMUM_SCALE", iInt2Oid, 2);
f[14] = new Field(connection, "MAXIMUM_SCALE", iInt2Oid, 2);
f[15] = new Field(connection, "SQL_DATA_TYPE", iInt4Oid, 4);
f[16] = new Field(connection, "SQL_DATETIME_SUB", iInt4Oid, 4);
f[17] = new Field(connection, "NUM_PREC_RADIX", iInt4Oid, 4);
// cache some results, this will keep memory useage down, and speed
// things up a little.
byte b9[] = "9".getBytes();
byte b10[] = "10".getBytes();
byte bf[] = "f".getBytes();
byte bnn[] = Integer.toString(typeNoNulls).getBytes();
byte bts[] = Integer.toString(typeSearchable).getBytes();
while(rs.next()) {
byte[][] tuple = new byte[18][];
String typname=rs.getString(1);
tuple[0] = typname.getBytes();
tuple[1] = Integer.toString(Field.getSQLType(typname)).getBytes();
tuple[2] = b9; // for now
tuple[6] = bnn; // for now
tuple[7] = bf; // false for now - not case sensitive
tuple[8] = bts;
tuple[9] = bf; // false for now - it's signed
tuple[10] = bf; // false for now - must handle money
tuple[11] = bf; // false for now - handle autoincrement
// 12 - LOCAL_TYPE_NAME is null
// 13 & 14 ?
// 15 & 16 are unused so we return null
tuple[17] = b10; // everything is base 10
v.addElement(tuple);
}
rs.close();
return new ResultSet(connection, f, v, "OK", 1);
}
return null;
}
/**
* Get a description of a table's indices and statistics. They are
* ordered by NON_UNIQUE, TYPE, INDEX_NAME, and ORDINAL_POSITION.
*
* <P>Each index column description has the following columns:
* <OL>
* <LI><B>TABLE_CAT</B> String => table catalog (may be null)
* <LI><B>TABLE_SCHEM</B> String => table schema (may be null)
* <LI><B>TABLE_NAME</B> String => table name
* <LI><B>NON_UNIQUE</B> boolean => Can index values be non-unique?
* false when TYPE is tableIndexStatistic
* <LI><B>INDEX_QUALIFIER</B> String => index catalog (may be null);
* null when TYPE is tableIndexStatistic
* <LI><B>INDEX_NAME</B> String => index name; null when TYPE is
* tableIndexStatistic
* <LI><B>TYPE</B> short => index type:
* <UL>
* <LI> tableIndexStatistic - this identifies table statistics that are
* returned in conjuction with a table's index descriptions
* <LI> tableIndexClustered - this is a clustered index
* <LI> tableIndexHashed - this is a hashed index
* <LI> tableIndexOther - this is some other style of index
* </UL>
* <LI><B>ORDINAL_POSITION</B> short => column sequence number
* within index; zero when TYPE is tableIndexStatistic
* <LI><B>COLUMN_NAME</B> String => column name; null when TYPE is
* tableIndexStatistic
* <LI><B>ASC_OR_DESC</B> String => column sort sequence, "A" => ascending
* "D" => descending, may be null if sort sequence is not supported;
* null when TYPE is tableIndexStatistic
* <LI><B>CARDINALITY</B> int => When TYPE is tableIndexStatisic then
* this is the number of rows in the table; otherwise it is the
* number of unique values in the index.
* <LI><B>PAGES</B> int => When TYPE is tableIndexStatisic then
* this is the number of pages used for the table, otherwise it
* is the number of pages used for the current index.
* <LI><B>FILTER_CONDITION</B> String => Filter condition, if any.
* (may be null)
* </OL>
*
* @param catalog a catalog name; "" retrieves those without a catalog
* @param schema a schema name pattern; "" retrieves those without a schema
* @param table a table name
* @param unique when true, return only indices for unique values;
* when false, return indices regardless of whether unique or not
* @param approximate when true, result is allowed to reflect approximate
* or out of data values; when false, results are requested to be
* accurate
* @return ResultSet each row is an index column description
*/
// Implementation note: This is required for Borland's JBuilder to work
public java.sql.ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) throws SQLException
{
// for now, this returns an empty result set.
Field f[] = new Field[13];
ResultSet r; // ResultSet for the SQL query that we need to do
Vector v = new Vector(); // The new ResultSet tuple stuff
f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32);
f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32);
f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32);
f[3] = new Field(connection, "NON_UNIQUE", iBoolOid, 1);
f[4] = new Field(connection, "INDEX_QUALIFIER", iVarcharOid, 32);
f[5] = new Field(connection, "INDEX_NAME", iVarcharOid, 32);
f[6] = new Field(connection, "TYPE", iInt2Oid, 2);
f[7] = new Field(connection, "ORDINAL_POSITION", iInt2Oid, 2);
f[8] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32);
f[9] = new Field(connection, "ASC_OR_DESC", iVarcharOid, 32);
f[10] = new Field(connection, "CARDINALITY", iInt4Oid, 4);
f[11] = new Field(connection, "PAGES", iInt4Oid, 4);
f[12] = new Field(connection, "FILTER_CONDITION", iVarcharOid, 32);
return new ResultSet(connection, f, v, "OK", 1);
}
// ** JDBC 2 Extensions **
/**
* New in 7.1 - we don't support deletes so this must be false!
*/
public boolean deletesAreDetected(int i) throws SQLException
{
return false;
}
/**
* New in 7.1 - we don't support deletes so this must be false!
*/
public boolean othersDeletesAreVisible(int i) throws SQLException
{
return false;
}
public java.sql.Connection getConnection() throws SQLException
{
return (java.sql.Connection)connection;
}
/**
* Return user defined types in a schema
*/
public java.sql.ResultSet getUDTs(String catalog,
String schemaPattern,
String typeNamePattern,
int[] types
) throws SQLException
{
throw org.postgresql.Driver.notImplemented();
}
/**
* New in 7.1 - we don't support visible inserts so this must be false!
*/
public boolean othersInsertsAreVisible(int type) throws SQLException
{
return false;
}
/**
* New in 7.1 - we don't support visible updates so this must be false!
*/
public boolean updatesAreDetected(int type) throws SQLException
{
return false;
}
/**
* New in 7.1 - we don't support visible updates so this must be false!
*/
public boolean othersUpdatesAreVisible(int type) throws SQLException
{
return false;
}
public boolean ownUpdatesAreVisible(int type) throws SQLException
{
return false;
}
public boolean ownInsertsAreVisible(int type) throws SQLException
{
return false;
}
public boolean insertsAreDetected(int type) throws SQLException
{
return false;
}
public boolean ownDeletesAreVisible(int type) throws SQLException
{
return false;
}
public boolean rowChangesAreDetected(int type) throws SQLException
{
return false;
}
public boolean rowChangesAreVisible(int type) throws SQLException
{
return false;
}
/**
* New in 7.1 - If this is for PreparedStatement yes, ResultSet no
*/
public boolean supportsBatchUpdates() throws SQLException
{
return true;
}
/**
* New in 7.1
*/
public boolean supportsResultSetConcurrency(int type,int concurrency) throws SQLException
{
// These combinations are not supported!
if(type==java.sql.ResultSet.TYPE_SCROLL_SENSITIVE)
return false;
// We don't yet support Updateable ResultSets
if(concurrency==java.sql.ResultSet.CONCUR_UPDATABLE)
return false;
// Everything else we do
return true;
}
public boolean supportsResultSetType(int type) throws SQLException
{
// The only type we don't support
return type!=java.sql.ResultSet.TYPE_SCROLL_SENSITIVE;
}
}
|
package controllers;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import io.sphere.sdk.categories.Category;
import io.sphere.sdk.categories.CategoryTree;
import io.sphere.sdk.client.PlayJavaClient;
import io.sphere.sdk.client.PlayJavaClientImpl;
import io.sphere.sdk.client.SphereRequestExecutor;
import io.sphere.sdk.client.SphereRequestExecutorTestDouble;
import io.sphere.sdk.queries.PagedQueryResult;
import io.sphere.sdk.utils.JsonUtils;
import play.Application;
import play.Configuration;
import plugins.Global;
import play.test.FakeApplication;
import play.test.WithApplication;
import static play.test.Helpers.fakeApplication;
public abstract class WithSunriseApplication extends WithApplication {
@Override
protected FakeApplication provideFakeApplication() {
return fakeApplication(new Global() {
@Override
protected Injector createInjector(final Application app) {
return Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bind(PlayJavaClient.class).toInstance(injectedClientInstance(app));
bind(CategoryTree.class).toInstance(injectedCategoryTree());
bind(Configuration.class).toInstance(app.configuration());
}
});
}
});
}
private CategoryTree injectedCategoryTree() {
final TypeReference<PagedQueryResult<Category>> typeReference = new TypeReference<PagedQueryResult<Category>>() {
};
final PagedQueryResult<Category> categoryPagedQueryResult =
JsonUtils.readObjectFromResource("categories.json", typeReference);
return CategoryTree.of(categoryPagedQueryResult.getResults());
}
protected PlayJavaClient injectedClientInstance(final Application app){
return new PlayJavaClientImpl(getConfiguration(app), getSphereRequestExecutor());
}
protected SphereRequestExecutor getSphereRequestExecutor() {
return new SphereRequestExecutorTestDouble() {
};
}
/**
* Override this to add additional settings
* @param app the application used
* @return a configuration containing the {@code app} configuration values and overridden values
*/
protected Configuration getConfiguration(Application app) {
return app.configuration();
}
}
|
package com.pixnfit.ws;
import android.util.Log;
import com.pixnfit.common.Image;
import com.pixnfit.common.Post;
import com.pixnfit.common.PostComment;
import com.pixnfit.common.PostType;
import com.pixnfit.common.State;
import com.pixnfit.common.User;
import com.pixnfit.common.Visibility;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
public class JSONWsParser {
private static final String TAG = JSONWsParser.class.getSimpleName();
public static Post parsePost(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
Post post = new Post();
post.id = json.getInt("id");
post.name = json.getString("name");
post.description = json.optString("description");
post.creator = parseUser(json.getJSONObject("creator"));
post.images = parseImageList(json.getJSONArray("images"));
post.postType = parsePostType(json.getJSONObject("postType"));
post.visibility = parseVisibility(json.getJSONObject("visibility"));
post.state = parseState(json.getJSONObject("state"));
post.dateCreated = parseDate(json.getString("dateCreated"));
return post;
}
}
public static Date parseDate(String date) {
if (date == null) {
return null;
} else {
// NOTE: SimpleDateFormat uses GMT[-+]hh:mm for the TZ which breaks
// things a bit. Before we go on we have to repair this.
try {
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssz", Locale.ENGLISH);
// this is zero time so we need to add that TZ indicator for
if (date.endsWith("Z")) {
date = date.substring(0, date.length() - 1) + "GMT-00:00";
} else {
int inset = 6;
String s0 = date.substring(0, date.length() - inset);
String s1 = date.substring(date.length() - inset, date.length());
date = s0 + "GMT" + s1;
}
return df.parse(date);
} catch (ParseException pe) {
Log.e(TAG, "parseDate: failed", pe);
return null;
}
}
}
public static User parseUser(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
User user = new User();
user.id = json.getInt("id");
user.username = json.getString("username");
user.imageUrl = json.getString("imageUrl");
return user;
}
}
public static List<Image> parseImageList(JSONArray array) throws JSONException {
if (array == null) {
return null;
} else {
List<Image> images = new ArrayList<>();
for (int index = 0; index < array.length(); index++) {
JSONObject jsonObject = array.getJSONObject(index);
images.add(parseImage(jsonObject));
}
return images;
}
}
public static Image parseImage(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
Image image = new Image();
image.id = json.getInt("id");
image.imageUrl = json.getString("imageUrl");
return image;
}
}
public static PostType parsePostType(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
PostType postType = new PostType();
postType.id = json.getInt("id");
postType.name = json.getString("name");
return postType;
}
}
public static Visibility parseVisibility(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
Visibility visibility = new Visibility();
visibility.id = json.getInt("id");
visibility.name = json.getString("name");
return visibility;
}
}
public static State parseState(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
State state = new State();
state.id = json.getInt("id");
state.name = json.getString("name");
return state;
}
}
public static PostComment parsePostComment(JSONObject json) throws JSONException {
if (json == null) {
return null;
} else {
PostComment postComment = new PostComment();
postComment.id = json.getInt("id");
postComment.name = json.getString("name");
postComment.description = json.getString("description");
postComment.postId = json.getLong("postId");
postComment.creator = parseUser(json.getJSONObject("creator"));
postComment.dateCreated = parseDate(json.getString("dateCreated"));
return postComment;
}
}
}
|
package com.st33fo.glideforktt;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.ContextMenu;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.android.volley.AuthFailureError;
import com.android.volley.NetworkResponse;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.InputStream;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookieStore;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
public class Feed extends AppCompatActivity {
private Toolbar myToolbar;
private Toolbar cardViewToolBar;
private RecyclerView feedRecyclerView;
private static String URL = "http:
private LinearLayoutManager myLinearLM;
private FeedObject feed;
private List<String> image_url = new ArrayList<String>();
private List<String> Username = new ArrayList<String>();
List<FeedObject> feeds = new ArrayList<FeedObject>();
List<String> links = new ArrayList<String>();
private final String KEY_RECYCLER_STATE = "recycler_state";
private Bundle mBundleRecyclerViewState;
private String sessionId;
private static RequestQueue myRequestQueue = null;
FeedAdapter feedAdapter;
private SwipeRefreshLayout swipe;
private AsyncTask activitystart;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_feed);
CookieHandler.setDefault(new CookieManager());
myToolbar = (Toolbar) findViewById(R.id.feedToolBar);
setSupportActionBar(myToolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
feedRecyclerView = (RecyclerView) findViewById(R.id.recyclerview);
swipe = (SwipeRefreshLayout) findViewById(R.id.swipe);
sessionId = SecuredSharePreference.getPrefCookies(Feed.this);
myLinearLM = new LinearLayoutManager(Feed.this);
feedRecyclerView.setLayoutManager(myLinearLM);
new getFeed().execute();
swipe.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
clearData();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
new getFeed().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
else
new getFeed().execute();
}
});
}
// public Document GetDocument(String site) throws Exception {
// final Document[] doc = new Document[1];
// final CountDownLatch cdl = new CountDownLatch(1);
// StringRequest documentRequest = new StringRequest( //
// Request.Method.GET, //
// site, //
// new Response.Listener<String>() {
// @Override
// public void onResponse(String response) {
// doc[0] = Jsoup.parse(response);
// cdl.countDown();
// new Response.ErrorListener() {
// @Override
// public void onErrorResponse(VolleyError error) {
// // Error handling
// System.out.println("Houston we have a problem ... !");
// error.printStackTrace();
// @Override
// public Map<String, String> getHeaders() throws AuthFailureError {
// Map<String, String> params = new HashMap<String, String>();
// params.put("User-Agent", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36");
// params.put("Cookie","PHPSESSID=" +sessionId);
// return params;
// if (myRequestQueue == null) {
// myRequestQueue = Volley.newRequestQueue(this);
// // Add the request to the queue...
// myRequestQueue.add(documentRequest);
// // ... and wait for the document.
// // NOTA: Be aware of user experience here. We don't want to freeze the app...
// cdl.await();
// return doc[0];
private class getFeed extends AsyncTask<Void, Void, Void> {
Document profileDocument;
@Override
protected void onPreExecute() {
super.onPreExecute();
clearData();
}
@Override
protected Void doInBackground(Void... params) {
try {
profileDocument = new GetDocument(Feed.this).GetDocument(URL);
Elements lists = profileDocument.getElementsByClass("topicindex");
for (Element message : lists) {
feed = new FeedObject();
String quote = message.select("blockquote[class=bbc_standard_quote]").text();
Elements quotes = message.select("blockquote[class=bbc_standard_quote]");
String quoteheader = message.select("div[class=quoteheader]").text();
String title = message.select("span[class=category_header]").text();
String link = message.select("span[class=category_header]").select("a[href]").attr("href");
String timeposted = message.select("div[class=post_date]").text();
String post = message.select("div[class=post_body]").text();
String section = title.substring(title.indexOf("["),title.indexOf("]")+1);
Log.i("System.out","The links are" +link);
title = title.replace(section,"");
title = title.replace("Re:","");
title = title.trim();
section = section.replace("[","");
section = section.replace("]","");
if(post.contains(quote)&&!quote.equals("")) {
post = post.replace(quote, "");
post = post.replace(quoteheader, "");
post = post.trim();
if(quoteheader.contains("Quote from:")) {
quoteheader = quoteheader.replace("Quote from: ", "");
}
quoteheader = " ~ " + quoteheader;
if (quoteheader.contains(" ago") && !quoteheader.contains("yesterday")) {
quoteheader = quoteheader.replace(quoteheader.substring(quoteheader.indexOf(" on "),
quoteheader.indexOf("ago") + 3), "");
}
int counter = 1;
List<String> quotelist = new ArrayList<String>();
for(Element topicquote: quotes){
quote = topicquote.select("blockquote[class=bbc_standard_quote]").text();
quotelist.add(quote);
counter++;
}
String finalquoteblocks = " ";
for(int x =0; x<quotelist.size();x++){
finalquoteblocks+=quotelist.get(x)+"\n";
}
feed.setQuote(finalquoteblocks + "\n" + quoteheader);
}
feed.setTitle(title);
feed.setPerson(section);
feed.setImage("");
feed.setTimeposted(timeposted);
feed.setMessage(post);
feed.setLink(link);
feeds.add(feed);
}
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
feedAdapter = new FeedAdapter(Feed.this, feeds);
feedRecyclerView.setAdapter(feedAdapter);
swipe.setRefreshing(false);
// new loadImages().execute();
}
}
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_feed, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
if (item.getItemId() == android.R.id.home) {
finish();
}
if (item.getItemId() == R.id.action_refresh) {
}
return super.onOptionsItemSelected(item);
}
public void clearData() {
Username.clear();
image_url.clear();
int size = this.feeds.size();
if (size > 0) {
for (int i = 0; i < size; i++) {
this.feeds.remove(0);
}
}
}
@Override
protected void onPause() {
super.onPause();
// save RecyclerView state
mBundleRecyclerViewState = new Bundle();
Parcelable listState = feedRecyclerView.getLayoutManager().onSaveInstanceState();
mBundleRecyclerViewState.putParcelable(KEY_RECYCLER_STATE, listState);
}
@Override
protected void onResume() {
super.onResume();
// restore RecyclerView state
if (mBundleRecyclerViewState != null) {
Parcelable listState = mBundleRecyclerViewState.getParcelable(KEY_RECYCLER_STATE);
feedRecyclerView.getLayoutManager().onRestoreInstanceState(listState);
}
}
}
|
package kaka.android.dn;
import android.text.Html;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserFactory;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
public class NewsManager
{
private Cache<HashSet<NewsItem>> itemCache = new Cache<HashSet<NewsItem>>("items");
private Cache<HashSet<String>> readCache = new Cache<HashSet<String>>("read items");
private ArrayList<NewsItem> items = new ArrayList<NewsItem>();
private HashSet<NewsItem> itemSet = new HashSet<NewsItem>();
private HashSet<NewsItem> newItems = new HashSet<NewsItem>();
private HashSet<String> readItems = new HashSet<String>();
private ArrayList<EventListener> eventListeners = new ArrayList<EventListener>();
private boolean isRefreshing;
public NewsManager() {
new Thread() {
public void run() {
loadFromCache();
}
}.start();
}
public void addEventListener(EventListener listener) {
eventListeners.add(listener);
}
public boolean removeEventListener(EventListener listener) {
return eventListeners.remove(listener);
}
private void notifyListeners(final Event e) {
App.runOnUiThread(new Runnable() {
public void run() {
for (EventListener el : eventListeners) {
el.onEvent(e);
}
}
});
}
public static interface EventListener {
public void onEvent(Event e);
}
public static enum Event {
REFRESHING,
REFRESHED_NEWS,
REFRESH_FAILED,
LOADED_CACHE,
READ_NEWS,
}
public void refresh() {
if (isRefreshing)
return;
isRefreshing = true;
notifyListeners(Event.REFRESHING);
new Thread() {
public void run() {
InputStream stream = fetch("http:
XmlPullParser parser = buildParser(stream);
if (stream != null && parser != null) {
ArrayList<NewsItem> list = parseXML(parser);
newItems.clear();
for (NewsItem i : list)
if (!itemSet.contains(i))
newItems.add(i);
addItems(list);
try {
stream.close();
} catch (Exception e) {
App.log.e(this, "Error while closing stream", e);
}
notifyListeners(Event.REFRESHED_NEWS);
saveToCache();
} else {
notifyListeners(Event.REFRESH_FAILED);
}
isRefreshing = false;
}
}.start();
}
public boolean isRefreshing() {
return isRefreshing;
}
private InputStream fetch(String urlString) {
try {
URL url = new URL(urlString);
HttpURLConnection conn = (HttpURLConnection)url.openConnection();
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setRequestMethod("GET");
conn.setDoInput(true);
conn.connect();
return conn.getInputStream();
} catch (Exception e) {
App.log.e(this, "Error while fetching data", e);
}
return null;
}
private XmlPullParser buildParser(InputStream stream) {
try {
XmlPullParser parser = XmlPullParserFactory.newInstance().newPullParser();
parser.setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, false);
parser.setInput(stream, null);
return parser;
} catch (Exception e) {
App.log.e(this, "Error while setting up parser", e);
}
return null;
}
private ArrayList<NewsItem> parseXML(XmlPullParser parser) {
ArrayList<NewsItem> list = new ArrayList<NewsItem>();
int event;
String text = null;
NewsItem item = null;
try {
event = parser.getEventType();
while (event != XmlPullParser.END_DOCUMENT) {
String name = parser.getName();
switch (event) {
case XmlPullParser.TEXT:
text = parser.getText();
break;
case XmlPullParser.START_TAG:
if (name.equals("item")) {
item = new NewsItem();
}
break;
case XmlPullParser.END_TAG:
if (item != null) {
if (name.equals("item")) {
list.add(item);
item = null;
} else if (name.equals("title")) {
item.setTitle(Html.fromHtml(text).toString());
} else if (name.equals("link")) {
item.setLink(text);
} else if (name.equals("description")) {
item.setDescription(Html.fromHtml(text).toString());
} else if (name.equals("pubDate")) {
item.setPubDate(text);
}
}
break;
}
event = parser.next();
}
} catch (Exception e) {
App.log.e(this, "Error while parsing XML", e);
}
return list;
}
public ArrayList<NewsItem> getItems() {
return items;
}
public NewsItem getItemById(String guid) {
for (NewsItem item : items)
if (item.getId().equals(guid))
return item;
return null;
}
public boolean isItemNew(NewsItem item) {
return newItems.contains(item);
}
public boolean isItemRead(NewsItem item) {
return readItems.contains(item.getId());
}
public void readItem(NewsItem item) {
readItems.add(item.getId());
new Thread() {
public void run() {
saveReadItemCache();
}
}.start();
notifyListeners(Event.READ_NEWS);
}
private void addItems(Collection<NewsItem> collection) {
if (collection == null)
return;
itemSet.addAll(collection);
items.clear();
for (NewsItem i : itemSet)
items.add(i);
Collections.sort(items, new Comparator<NewsItem>() {
@Override
public int compare(NewsItem lhs, NewsItem rhs) {
return rhs.getDate().compareTo(lhs.getDate());
}
});
}
private void loadFromCache() {
addItems(itemCache.load());
App.log.d(this, "Loaded %d items from cache", itemSet.size());
HashSet<String> read = readCache.load();
if (read != null)
readItems = read;
App.log.d(this, "Loaded %d read items from cache", readItems.size());
notifyListeners(Event.LOADED_CACHE);
}
private void saveToCache() {
{ // Filter out outdated news
HashSet<NewsItem> set = new HashSet<NewsItem>(itemSet.size());
long timeLimit = System.currentTimeMillis() - 1000 * 60 * 60 * 24 * 2;
for (NewsItem i : itemSet)
if (i.getDate().getTime() >= timeLimit)
set.add(i);
if (itemCache.save(set))
App.log.d(this, "Saved %d items to cache", set.size());
}
saveReadItemCache();
}
private void saveReadItemCache() {
// Filter out ids of no longer maintained news
HashSet<String> cached = new HashSet<String>();
for (NewsItem i : itemSet)
cached.add(i.getId());
HashSet<String> read = new HashSet<String>();
for (String id : readItems)
if (cached.contains(id))
read.add(id);
if (readCache.save(read))
App.log.d(this, "Saved %d read items to cache", read.size());
}
}
|
package org.exist.xquery;
import org.exist.TestUtils;
import org.exist.storage.BrokerPool;
import org.exist.storage.DBBroker;
import org.exist.util.Configuration;
import org.exist.util.XMLFilenameFilter;
import org.exist.xmldb.DatabaseInstanceManager;
import org.exist.xmldb.IndexQueryService;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.ResourceSet;
import org.xmldb.api.base.XMLDBException;
import org.xmldb.api.modules.CollectionManagementService;
import org.xmldb.api.modules.XMLResource;
import org.xmldb.api.modules.XQueryService;
import java.io.File;
import java.io.IOException;
public class OptimizerTest {
private final static String OPTIMIZE = "declare option exist:optimize 'enable=yes';";
private final static String NO_OPTIMIZE = "declare option exist:optimize 'enable=no';";
private final static String NAMESPACES = "declare namespace mods='http:
private static final String MSG_OPT_ERROR = "Optimized query should return same number of results.";
private final static String XML =
"<root>" +
" <a><b>one</b></a>" +
" <a><c><b>one</b></c></a>" +
" <c><a><c><b>two</b></c></a></c>" +
"</root>";
private final static String COLLECTION_CONFIG =
"<collection xmlns=\"http://exist-db.org/collection-config/1.0\">" +
" <index xmlns:mods=\"http:
" <fulltext default=\"none\">" +
" <create qname=\"LINE\"/>" +
" <create qname=\"SPEAKER\"/>" +
" <create qname=\"mods:title\"/>" +
" <create qname=\"mods:topic\"/>" +
" </fulltext>" +
" <create qname=\"b\" type=\"xs:string\"/>" +
" <create qname=\"SPEAKER\" type=\"xs:string\"/>" +
" <create qname=\"mods:internetMediaType\" type=\"xs:string\"/>" +
" </index>" +
"</collection>";
private static Collection testCollection;
@Test
public void nestedQuery() {
execute("/root/a[descendant::b = 'one']", true, "Inner b node should be returned.", 2);
execute("/root/a[b = 'one']", true, "Inner b node should not be returned.", 1);
execute("/root/a[b = 'one']", false, "Inner b node should not be returned.", 1);
}
@Test
public void simplePredicates() {
int r = execute("//SPEECH[LINE &= 'king']", false);
execute("//SPEECH[LINE &= 'king']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[SPEAKER = 'HAMLET']", false);
execute("//SPEECH[SPEAKER = 'HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[descendant::SPEAKER = 'HAMLET']", false);
execute("//SPEECH[descendant::SPEAKER = 'HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SCENE[descendant::LINE &= 'king']", false);
execute("//SCENE[descendant::LINE &= 'king']", true, MSG_OPT_ERROR, r);
r = execute("//LINE[. &= 'king']", false);
execute("//LINE[. &= 'king']", true, MSG_OPT_ERROR, r);
r = execute("//SPEAKER[. = 'HAMLET']", false);
execute("//SPEAKER[. = 'HAMLET']", true, MSG_OPT_ERROR, r);
// r = execute("//LINE[descendant-or-self::LINE &= 'king']", false);
// execute("//LINE[descendant-or-self::LINE &= 'king']", true, MSG_OPT_ERROR, r);
r = execute("//SPEAKER[descendant-or-self::SPEAKER = 'HAMLET']", false);
execute("//SPEAKER[descendant-or-self::SPEAKER = 'HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH/LINE[. &= 'king']", false);
execute("//SPEECH/LINE[. &= 'king']", true, MSG_OPT_ERROR, r);
execute("//*[SPEAKER = 'HAMLET']", true, MSG_OPT_ERROR, r);
}
@Test
public void namespaces() {
int r = execute("//mods:mods/mods:titleInfo[mods:title &= 'ethnic']", false);
execute("//mods:mods/mods:titleInfo[mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods/mods:physicalDescription[mods:internetMediaType &= 'application/pdf']", false);
execute("//mods:mods/mods:physicalDescription[mods:internetMediaType &= 'application/pdf']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods/mods:*[mods:title &= 'ethnic']", false);
execute("//mods:mods/mods:*[mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
}
@Test
public void simplePredicatesRegex() {
int r = execute("//SPEECH[LINE &= 'nor*']", false);
execute("//SPEECH[LINE &= 'nor*']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[LINE &= 'skirts nor*']", false);
execute("//SPEECH[LINE &= 'skirts nor*']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[near(LINE, 'skirts nor*', 2)]", false);
execute("//SPEECH[near(LINE, 'skirts nor*', 2)]", true, MSG_OPT_ERROR, r);
//Test old and new functions
r = execute("//SPEECH[fn:match-all(LINE, 'skirts', 'nor.*')]", false);
execute("//SPEECH[fn:match-all(LINE, 'skirts', 'nor.*')]", true, MSG_OPT_ERROR, r);
execute("//SPEECH[text:match-all(LINE, ('skirts', 'nor.*'))]", false, "Query should return same number of results.", r);
//Test old and new functions
r = execute("//SPEECH[fn:match-any(LINE, 'skirts', 'nor.*')]", false);
execute("//SPEECH[fn:match-any(LINE, 'skirts', 'nor.*')]", true, MSG_OPT_ERROR, r);
execute("//SPEECH[text:match-any(LINE, ('skirts', 'nor.*'), 'w')]", false, "Query should return same number of results.", r);
execute("//SPEECH[text:match-any(LINE, ('skirts', 'nor.*'), 'w')]", true, MSG_OPT_ERROR, r);
execute("//SPEECH[text:match-any(LINE, ('skirts', '^nor.*$'))]", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[matches(SPEAKER, '^HAM.*')]", false);
execute("//SPEECH[matches(SPEAKER, '^HAM.*')]", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[starts-with(SPEAKER, 'HAML')]", false);
execute("//SPEECH[starts-with(SPEAKER, 'HAML')]", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[ends-with(SPEAKER, 'EO')]", false);
execute("//SPEECH[ends-with(SPEAKER, 'EO')]", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[matches(descendant::SPEAKER, 'HAML.*')]", false);
execute("//SPEECH[matches(descendant::SPEAKER, 'HAML.*')]", true, MSG_OPT_ERROR, r);
}
@Test
public void twoPredicates() {
int r = execute("//SPEECH[LINE &= 'king'][SPEAKER='HAMLET']", false);
execute("//SPEECH[LINE &= 'king'][SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[SPEAKER='HAMLET'][LINE &= 'king']", false);
execute("//SPEECH[SPEAKER='HAMLET'][LINE &= 'king']", true, MSG_OPT_ERROR, r);
}
@Test
public void noOptimization() {
int r = execute("//mods:title[ancestor-or-self::mods:title &= 'ethnic']", false);
execute("//mods:title[ancestor-or-self::mods:title &= 'ethnic']", true, "Ancestor axis should not be optimized.", r);
r = execute("//node()[parent::mods:title &= 'ethnic']", false);
execute("//node()[parent::mods:title &= 'ethnic']", true, "Parent axis should not be optimized.", r);
r = execute("/root//b[parent::c/b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b[parent::c/b = 'two']", true, "Parent axis should not be optimized.", r);
r = execute("/root//b[ancestor::a/c/b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b[ancestor::a/c/b = 'two']", true, "Ancestor axis should not be optimized.", r);
r = execute("/root//b[ancestor::a/b = 'two']", false);
Assert.assertEquals(0, r);
execute("/root//b[ancestor::a/b = 'two']", true, "Ancestor axis should not be optimized.", r);
r = execute("/root//b[text()/parent::b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b[text()/parent::b = 'two']", true, "Parent axis should not be optimized.", r);
r = execute("/root//b[matches(text()/parent::b, 'two')]", false);
Assert.assertEquals(1, r);
execute("/root//b[matches(text()/parent::b, 'two')]", true, "Parent axis should not be optimized.", r);
}
@Test
public void complexPaths() {
int r = execute("//mods:mods[mods:titleInfo/mods:title &= 'ethnic']", false);
execute("//mods:mods[mods:titleInfo/mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[text:match-all(mods:titleInfo/mods:title, 'and')]", false);
execute("//mods:mods[text:match-all(mods:titleInfo/mods:title, 'and')]", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[./mods:titleInfo/mods:title &= 'ethnic']", false);
execute("//mods:mods[./mods:titleInfo/mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[*/mods:title &= 'ethnic']", false);
execute("//mods:mods[*/mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[.//mods:title &= 'ethnic']", false);
execute("//mods:mods[.//mods:title &= 'ethnic']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[mods:physicalDescription/mods:internetMediaType = 'text/html']", false);
execute("//mods:mods[mods:physicalDescription/mods:internetMediaType = 'text/html']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[./mods:physicalDescription/mods:internetMediaType = 'text/html']", false);
execute("//mods:mods[./mods:physicalDescription/mods:internetMediaType = 'text/html']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[*/mods:internetMediaType = 'text/html']", false);
execute("//mods:mods[*/mods:internetMediaType = 'text/html']", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[matches(mods:physicalDescription/mods:internetMediaType, 'text/html')]", false);
execute("//mods:mods[matches(mods:physicalDescription/mods:internetMediaType, 'text/html')]", true, MSG_OPT_ERROR, r);
r = execute("//mods:mods[matches(*/mods:internetMediaType, 'text/html')]", false);
execute("//mods:mods[matches(*/mods:internetMediaType, 'text/html')]", true, MSG_OPT_ERROR, r);
}
@Test
public void reversePaths() {
int r = execute("/root//b/parent::c[b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b/parent::c[b = 'two']", true, MSG_OPT_ERROR, r);
r = execute("//mods:url/ancestor::mods:mods[mods:titleInfo/mods:title &= 'and']", false);
Assert.assertEquals(13, r);
execute("//mods:url/ancestor::mods:mods[mods:titleInfo/mods:title &= 'and']", true, MSG_OPT_ERROR, r);
}
@Test @Ignore
public void reversePathsWithWildcard() {
//parent with wildcard
int r = execute("/root//b/parent::*[b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b/parent::*[b = 'two']", true, MSG_OPT_ERROR, r);
//ancestor with wildcard
r = execute("//mods:url/ancestor::*[mods:titleInfo/mods:title &= 'and']", false);
Assert.assertEquals(13, r);
execute("//mods:url/ancestor::*[mods:titleInfo/mods:title &= 'and']", true, MSG_OPT_ERROR, r);
}
@Test
public void reversePathsWithWildcard() {
//parent with wildcard
int r = execute("/root//b/parent::*[b = 'two']", false);
Assert.assertEquals(1, r);
execute("/root//b/parent::*[b = 'two']", true, MSG_OPT_ERROR, r);
//ancestor with wildcard
r = execute("//mods:url/ancestor::*[mods:titleInfo/mods:title &= 'and']", false);
Assert.assertEquals(13, r);
execute("//mods:url/ancestor::*[mods:titleInfo/mods:title &= 'and']", true, MSG_OPT_ERROR, r);
}
@Test
public void booleanOperator() {
int r = execute("//SPEECH[LINE &= 'king'][SPEAKER='HAMLET']", false);
execute("//SPEECH[LINE &= 'king' and SPEAKER='HAMLET']", false, MSG_OPT_ERROR, r);
execute("//SPEECH[LINE &= 'king' and SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[LINE &= 'king' or SPEAKER='HAMLET']", false);
execute("//SPEECH[LINE &= 'king' or SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[LINE &= 'love' and LINE &= \"woman's\" and SPEAKER='HAMLET']", false);
execute("//SPEECH[LINE &= 'love' and LINE &= \"woman's\" and SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[(LINE &= 'king' or LINE &= 'love') and SPEAKER='HAMLET']", false);
execute("//SPEECH[(LINE &= 'king' or LINE &= 'love') and SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[(LINE &= 'juliet' and LINE &= 'romeo') or SPEAKER='HAMLET']", false);
Assert.assertEquals(368, r);
execute("//SPEECH[(LINE &= 'juliet' and LINE &= 'romeo') or SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[(LINE &= 'juliet' and LINE &= 'romeo') and SPEAKER='HAMLET']", false);
Assert.assertEquals(0, r);
execute("//SPEECH[(LINE &= 'juliet' and LINE &= 'romeo') and SPEAKER='HAMLET']", true, MSG_OPT_ERROR, r);
r = execute("//SPEECH[LINE &= 'juliet' or (LINE &= 'king' and SPEAKER='HAMLET')]", false);
Assert.assertEquals(65, r);
execute("//SPEECH[LINE &= 'juliet' or (LINE &= 'king' and SPEAKER='HAMLET')]", true, MSG_OPT_ERROR, r);
execute("//SPEECH[true() and false()]", true, MSG_OPT_ERROR, 0);
execute("//SPEECH[true() and true()]", true, MSG_OPT_ERROR, 2628);
}
private int execute(String query, boolean optimize) {
try {
System.out.println("--- Query: " + query + "; Optimize: " + Boolean.toString(optimize));
XQueryService service = (XQueryService) testCollection.getService("XQueryService", "1.0");
if (optimize)
query = OPTIMIZE + query;
else
query = NO_OPTIMIZE + query;
query = NAMESPACES + query;
ResourceSet result = service.query(query);
System.out.println("-- Found: " + result.getSize());
return (int) result.getSize();
} catch (XMLDBException e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
return 0;
}
private void execute(String query, boolean optimize, String message, int expected) {
try {
System.out.println("--- Query: " + query + "; Optimize: " + Boolean.toString(optimize));
XQueryService service = (XQueryService) testCollection.getService("XQueryService", "1.0");
if (optimize)
query = NAMESPACES + OPTIMIZE + query;
else
query = NAMESPACES + NO_OPTIMIZE + query;
ResourceSet result = service.query(query);
System.out.println("-- Found: " + result.getSize());
Assert.assertEquals(message, expected, result.getSize());
} catch (XMLDBException e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@BeforeClass
public static void initDatabase() {
try {
//Since we use the deprecated fn:match-all() function, we have to be sure is is enabled
Configuration config = new Configuration();
config.setProperty(FunctionFactory.PROPERTY_DISABLE_DEPRECATED_FUNCTIONS, new Boolean(false));
BrokerPool.configure(1, 5, config);
// initialize driver
Class<?> cl = Class.forName("org.exist.xmldb.DatabaseImpl");
Database database = (Database) cl.newInstance();
database.setProperty("create-database", "true");
DatabaseManager.registerDatabase(database);
Collection root =
DatabaseManager.getCollection("xmldb:exist://" + DBBroker.ROOT_COLLECTION, "admin", null);
CollectionManagementService service =
(CollectionManagementService) root.getService("CollectionManagementService", "1.0");
testCollection = service.createCollection("test");
Assert.assertNotNull(testCollection);
IndexQueryService idxConf = (IndexQueryService) testCollection.getService("IndexQueryService", "1.0");
idxConf.configureCollection(COLLECTION_CONFIG);
XMLResource resource = (XMLResource) testCollection.createResource("test.xml", "XMLResource");
resource.setContent(XML);
testCollection.storeResource(resource);
String existHome = System.getProperty("exist.home");
File existDir = existHome==null ? new File(".") : new File(existHome);
File dir = new File(existDir, "samples/shakespeare");
if (!dir.canRead())
throw new IOException("Unable to read samples directory");
File[] files = dir.listFiles(new XMLFilenameFilter());
for (File file : files) {
System.out.println("Create resource from "+file.getAbsolutePath());
resource = (XMLResource) testCollection.createResource(file.getName(), "XMLResource");
resource.setContent(file);
testCollection.storeResource(resource);
}
dir = new File(existDir, "samples/mods");
if (!dir.canRead())
throw new IOException("Unable to read samples directory");
files = dir.listFiles(new XMLFilenameFilter());
for (File file : files) {
System.out.println("Create resource from "+file.getAbsolutePath());
resource = (XMLResource) testCollection.createResource(file.getName(), "XMLResource");
resource.setContent(file);
testCollection.storeResource(resource);
}
File eXistModsFilesDir = new File(dir, "eXist");
File eXistModsFiles[] = eXistModsFilesDir.listFiles(new XMLFilenameFilter());
for(File eXistModsFile : eXistModsFiles) {
System.out.println("Create resource from "+eXistModsFile.getAbsolutePath());
XMLResource doc =
(XMLResource) testCollection.createResource(
eXistModsFile.getName(), "XMLResource" );
doc.setContent(eXistModsFile);
testCollection.storeResource(doc);
}
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@AfterClass
public static void shutdownDB() {
try {
TestUtils.cleanupDB();
DatabaseInstanceManager dim =
(DatabaseInstanceManager) testCollection.getService(
"DatabaseInstanceManager", "1.0");
dim.shutdown();
} catch (XMLDBException e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
testCollection = null;
System.out.println("tearDown PASSED");
}
}
|
import ibis.satin.so.SharedObject;
class Shutdown extends Thread {
TranspositionTable tt;
Shutdown(TranspositionTable tt) {
this.tt = tt;
}
public void run() {
tt.stats();
}
}
final class TranspositionTable extends SharedObject implements
TranspositionTableIntr {
private static final boolean SUPPORT_TT = true;
private static final int SIZE = 1 << 23;
private static final int REPLICATED_DEPTH = 7;
transient int lookups, hits, sorts, stores, used, overwrites, visited,
scoreImprovements, cutOffs;
transient short[] values;
transient short[] bestChildren;
transient byte[] depths;
transient boolean[] lowerBounds;
transient private int tagSize;
transient private int[] tags;
transient private boolean[] valid;
transient boolean inited;
private void init() {
values = new short[SIZE];
bestChildren = new short[SIZE];
depths = new byte[SIZE];
lowerBounds = new boolean[SIZE];
valid = new boolean[SIZE];
tagSize = OthelloBoard.getTagSize();
tags = new int[SIZE * tagSize];
inited = true;
Runtime.getRuntime().addShutdownHook(new Shutdown(this));
}
int lookup(Tag tag) {
if (!SUPPORT_TT) return -1;
if (!inited) init();
lookups++;
int index = tag.hashCode() & (SIZE - 1);
if (valid[index] && tag.equals(tags, index * tagSize)) return index;
return -1;
}
void store(Tag tag, short value, short bestChild, byte depth,
boolean lowerBound) {
if (!SUPPORT_TT) return;
int index = tag.hashCode() & (SIZE - 1);
if (valid[index] && depth < depths[index]) return;
if (depth >= REPLICATED_DEPTH) {
sharedStore(index, tag, value, bestChild, depth, lowerBound);
} else {
localStore(index, tag, value, bestChild, depth, lowerBound);
}
}
void localStore(int index, Tag tag, short value,
short bestChild, byte depth, boolean lowerBound) {
if (!inited) init();
stores++;
if (!valid[index])
used++;
else
overwrites++;
tag.store(tags, index * tagSize);
values[index] = value;
bestChildren[index] = bestChild;
depths[index] = depth;
lowerBounds[index] = lowerBound;
valid[index] = true;
}
/* shared object write method, is broadcast */
public void sharedStore(int index, Tag tag, short value, short bestChild,
byte depth, boolean lowerBound) {
localStore(index, tag, value, bestChild, depth, lowerBound);
}
void stats() {
System.err.println("tt: lookups: " + lookups + ", hits: " + hits
+ ", sorts: " + sorts + ", stores: " + stores + ", used: " + used
+ ", overwrites: " + overwrites + ", score incs: "
+ scoreImprovements + ", cutoffs: " + cutOffs + ", visited: "
+ visited);
}
}
|
package cl.json;
import android.content.CursorLoader;
import android.content.Intent;
import android.database.Cursor;
import android.media.MediaScannerConnection;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Base64;
import android.webkit.MimeTypeMap;
import com.facebook.react.bridge.ReactApplicationContext;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.StringReader;
import java.net.URI;
public class ShareFile {
private final ReactApplicationContext reactContext;
private String url;
private Uri uri;
private String type;
private String extension = "";
public ShareFile(String url, String type, ReactApplicationContext reactContext){
this(url, reactContext);
this.type = type;
}
public ShareFile(String url, ReactApplicationContext reactContext){
this.url = url;
this.uri = Uri.parse(this.url);
this.reactContext = reactContext;
}
/**
* Obtain mime type from URL
* @param {@link String} url
* @return {@link String} mime type
*/
private String getMimeType(String url) {
String type = null;
String extension = MimeTypeMap.getFileExtensionFromUrl(url);
if (extension != null) {
type = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
}
return type;
}
/**
* Return an if the url is a file (local or base64)l
* @return {@link boolean}
*/
public boolean isFile() {
return this.isBase64File() || this.isLocalFile();
}
public boolean isBase64File() {
String scheme = uri.getScheme();
if((scheme != null) && uri.getScheme().equals("data")) {
this.type = this.uri.getSchemeSpecificPart().substring(0, this.uri.getSchemeSpecificPart().indexOf(";"));
return true;
}
return false;
}
public boolean isLocalFile() {
String scheme = uri.getScheme();
if((scheme != null) && (uri.getScheme().equals("content") || uri.getScheme().equals("file"))) {
// type is already set
if (this.type != null) {
return true;
}
// try to get mimetype from uri
this.type = this.getMimeType(uri.toString());
// try resolving the file and get the mimetype
if(this.type == null) {
String realPath = this.getRealPathFromURI(uri);
if (realPath != null) {
this.type = this.getMimeType(realPath);
} else {
return false;
}
}
if(this.type == null) {
this.type = "*/*";
}
return true;
}
return false;
}
public String getType() {
if (this.type == null) {
return "*/*";
}
return this.type;
}
private String getRealPathFromURI(Uri contentUri) {
String[] proj = { MediaStore.Images.Media.DATA };
CursorLoader loader = new CursorLoader(this.reactContext, contentUri, proj, null, null, null);
Cursor cursor = loader.loadInBackground();
String result = null;
if (cursor != null && cursor.moveToFirst()) {
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
result = cursor.getString(column_index);
cursor.close();
}
return result;
}
public Uri getURI() {
final MimeTypeMap mime = MimeTypeMap.getSingleton();
this.extension = mime.getExtensionFromMimeType(getType());
if(this.isBase64File()) {
String encodedImg = this.uri.getSchemeSpecificPart().substring(this.uri.getSchemeSpecificPart().indexOf(";base64,") + 8);
try {
File dir = new File(Environment.getExternalStorageDirectory(), Environment.DIRECTORY_DOWNLOADS );
if (!dir.exists()) {
dir.mkdirs();
}
File file = new File(dir, System.currentTimeMillis() + "." + this.extension);
final FileOutputStream fos = new FileOutputStream(file);
fos.write(Base64.decode(encodedImg, Base64.DEFAULT));
fos.flush();
fos.close();
return Uri.fromFile(file);
} catch (IOException e) {
e.printStackTrace();
}
} else if(this.isLocalFile()) {
Uri uri = Uri.parse(this.url);
return uri;
}
return null;
}
}
|
public class Solution
{
int N;
int[] ratings;
int[] candy;
boolean[] vis;
int[] order;
int orderIdx;
void DFS (int u)
{
if (vis[u])
return;
vis[u] = true;
if (u - 1 >= 0 && ratings[u - 1] > ratings[u])
DFS(u - 1);
if (u + 1 < N && ratings[u + 1] > ratings[u])
DFS(u + 1);
order[orderIdx
}
void calcOrder ()
{
vis = new boolean[N];
order = new int[N];
orderIdx = N - 1;
for (int i = N - 1; i >= 0; --i)
DFS(i);
}
public int candy (int[] ratings)
{
N = ratings.length;
this.ratings = ratings;
calcOrder();
candy = new int[N];
for (int i = 0; i < N; ++i)
{
int p = order[i];
int c = 0;
if (p - 1 >= 0 && ratings[p - 1] < ratings[p])
c = Math.max(c, candy[p - 1]);
if (p + 1 < N && ratings[p + 1] < ratings[p])
c = Math.max(c, candy[p + 1]);
candy[p] = c + 1;
}
int sum = 0;
for (int i = 0; i < N; ++i)
sum += candy[i];
return sum;
}
}
|
package java.nio.channels;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
public class SocketChannel extends SelectableChannel
implements ReadableByteChannel, WritableByteChannel
{
public static final int InvalidSocket = -1;
protected int socket = InvalidSocket;
protected boolean open = true;
protected boolean connected = false;
public static SocketChannel open() {
return new SocketChannel();
}
public void configureBlocking(boolean v) {
if (v) throw new IllegalArgumentException();
}
public void connect(InetSocketAddress address) throws Exception {
socket = doConnect(address.getHostName(), address.getPort());
}
public void close() throws IOException {
super.close();
if (! open) return;
closeSocket();
open = false;
}
public boolean isOpen() {
return open;
}
private int doConnect(String host, int port) throws Exception {
boolean b[] = new boolean[1];
int s = natDoConnect(host, port, b);
connected = b[0];
return s;
}
public int read(ByteBuffer b) throws IOException {
if (! open) return -1;
if (b.remaining() == 0) return 0;
int r = natRead(socket, b.array(), b.arrayOffset() + b.position(), b.remaining());
if (r > 0) {
b.position(b.position() + r);
}
return r;
}
public int write(ByteBuffer b) throws IOException {
if (! connected) {
natThrowWriteError(socket);
}
int w = natWrite(socket, b.array(), b.arrayOffset() + b.position(), b.remaining());
b.position(b.position() + w);
return w;
}
private void closeSocket() {
natCloseSocket(socket);
}
int socketFD() {
return socket;
}
private static native int natDoConnect(String host, int port, boolean[] connected)
throws Exception;
private static native int natRead(int socket, byte[] buffer, int offset, int length)
throws IOException;
private static native int natWrite(int socket, byte[] buffer, int offset, int length)
throws IOException;
private static native void natThrowWriteError(int socket) throws IOException;
private static native void natCloseSocket(int socket);
}
|
package replicant;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Connection state used by the connector to manage connection to backend system.
* This includes a list of pending requests, pending messages that needs to be applied
* to the local state etc.
*/
public final class Connection
{
//TODO: Make this package access after all classes migrated to replicant package
private static final Logger LOG = Logger.getLogger( Connection.class.getName() );
private static final Level LOG_LEVEL = Level.INFO;
private final String _connectionId;
private final Map<String, RequestEntry> _requests = new HashMap<>();
private final Map<String, RequestEntry> _roRequests = Collections.unmodifiableMap( _requests );
private int _requestID;
/**
* Pending actions that will change the area of interest.
*/
private final LinkedList<AreaOfInterestEntry> _pendingAreaOfInterestActions = new LinkedList<>();
/**
* The set of data load actions that still need to have the json parsed.
*/
private final LinkedList<DataLoadAction> _pendingActions = new LinkedList<>();
/**
* The set of data load actions that have their json parsed. They are inserted into
* this list according to their sequence.
*/
private final LinkedList<DataLoadAction> _parsedActions = new LinkedList<>();
/**
* Sometimes a data load action occurs that is not initiated by the server. These do not
* typically need to be sequenced and are prioritized above other actions.
*/
private final LinkedList<DataLoadAction> _oobActions = new LinkedList<>();
private int _lastRxSequence;
/**
* The current message action being processed.
*/
@Nullable
private DataLoadAction _currentAction;
@Nonnull
private List<AreaOfInterestEntry> _currentAoiActions = new ArrayList<>();
public Connection( @Nonnull final String connectionId )
{
_connectionId = Objects.requireNonNull( connectionId );
}
@Nonnull
public String getConnectionId()
{
return _connectionId;
}
public void enqueueAoiAction( @Nonnull final ChannelAddress descriptor,
@Nonnull final AreaOfInterestAction action,
@Nullable final Object filterParameter )
{
_pendingAreaOfInterestActions.add( new AreaOfInterestEntry( descriptor, action, filterParameter ) );
}
public void enqueueDataLoad( @Nonnull final String rawJsonData )
{
getPendingActions().add( new DataLoadAction( rawJsonData ) );
}
public void enqueueOOB( @Nonnull final String rawJsonData, @Nonnull final SafeProcedure oobCompletionAction )
{
getOobActions().add( new DataLoadAction( rawJsonData, oobCompletionAction ) );
}
public LinkedList<AreaOfInterestEntry> getPendingAreaOfInterestActions()
{
return _pendingAreaOfInterestActions;
}
public LinkedList<DataLoadAction> getPendingActions()
{
return _pendingActions;
}
public LinkedList<DataLoadAction> getParsedActions()
{
return _parsedActions;
}
public LinkedList<DataLoadAction> getOobActions()
{
return _oobActions;
}
public int getLastRxSequence()
{
return _lastRxSequence;
}
public void setLastRxSequence( final int lastRxSequence )
{
_lastRxSequence = lastRxSequence;
}
@Nonnull
public final RequestEntry newRequest( @Nullable final String name, @Nullable final String cacheKey )
{
final RequestEntry entry = new RequestEntry( newRequestID(), name, cacheKey );
_requests.put( entry.getRequestId(), entry );
if ( LOG.isLoggable( LOG_LEVEL ) )
{
LOG.log( LOG_LEVEL, "Created request " + entry );
}
return entry;
}
public final void completeNormalRequest( @Nonnull final RequestEntry request,
@Nonnull final SafeProcedure completionAction )
{
if ( request.isExpectingResults() && !request.haveResultsArrived() )
{
request.setNormalCompletionAction( completionAction );
if ( LOG.isLoggable( LOG_LEVEL ) )
{
LOG.log( LOG_LEVEL, "Request " + request + " completed normally. Change set has not arrived." );
}
}
else
{
completionAction.call();
removeRequest( request.getRequestId() );
if ( LOG.isLoggable( LOG_LEVEL ) )
{
LOG.log( LOG_LEVEL, "Request " + request + " completed normally. No change set or already arrived." );
}
}
}
public final void completeNonNormalRequest( @Nonnull final RequestEntry request,
@Nonnull final SafeProcedure completionAction )
{
if ( request.isExpectingResults() && !request.haveResultsArrived() )
{
request.setNonNormalCompletionAction( completionAction );
if ( LOG.isLoggable( LOG_LEVEL ) )
{
LOG.log( LOG_LEVEL, "Request " + request + " completed with exception. Change set has not arrived." );
}
}
else
{
completionAction.call();
removeRequest( request.getRequestId() );
if ( LOG.isLoggable( LOG_LEVEL ) )
{
LOG.log( LOG_LEVEL, "Request " + request + " completed with exception. No change set or already arrived." );
}
}
}
@Nullable
public RequestEntry getRequest( @Nonnull final String requestID )
{
return _requests.get( requestID );
}
public Map<String, RequestEntry> getRequests()
{
return _roRequests;
}
public boolean removeRequest( @Nonnull final String requestID )
{
return null != _requests.remove( requestID );
}
private String newRequestID()
{
return String.valueOf( ++_requestID );
}
@Nullable
public DataLoadAction getCurrentAction()
{
return _currentAction;
}
public void setCurrentAction( @Nullable final DataLoadAction currentAction )
{
_currentAction = currentAction;
}
@Nonnull
public List<AreaOfInterestEntry> getCurrentAoiActions()
{
return _currentAoiActions;
}
}
|
package tlc2.tool.distributed;
import java.io.File;
import java.io.IOException;
import tlc2.TLC;
import tlc2.TLCGlobals;
import tlc2.tool.Action;
import tlc2.tool.StateVec;
import tlc2.tool.TLCState;
import tlc2.tool.TLCStateInfo;
import tlc2.tool.Tool;
import tlc2.tool.WorkerException;
import tlc2.tool.fp.FPSet;
import tlc2.util.FP64;
import tlc2.value.Value;
import util.FileUtil;
import util.FilenameToStream;
import util.ToolIO;
import util.UniqueString;
/**
* @version $Id$
*/
public class TLCApp extends DistApp {
private String config;
/* Constructors */
public TLCApp(String specFile, String configFile, boolean deadlock,
String fromChkpt, int fpBits, double fpMemSize) throws IOException {
this(specFile, configFile, deadlock, true, null, fpBits);
this.fromChkpt = fromChkpt;
this.metadir = FileUtil.makeMetaDir(this.tool.specDir, fromChkpt);
this.fpMemSize = fpMemSize;
}
// TODO too many constructors redefinitions, replace with this(..) calls
public TLCApp(String specFile, String configFile,
Boolean deadlock, Boolean preprocess, FilenameToStream fts, int fpBits) throws IOException {
// get the spec dir from the spec file
int lastSep = specFile.lastIndexOf(File.separatorChar);
String specDir = (lastSep == -1) ? "" : specFile.substring(0,
lastSep + 1);
specFile = specFile.substring(lastSep + 1);
this.config = configFile;
// TODO NameResolver
this.tool = new Tool(specDir, specFile, configFile, fts);
// SZ Feb 24, 2009: setup the user directory
ToolIO.setUserDir(specDir);
this.checkDeadlock = deadlock.booleanValue();
this.preprocess = preprocess.booleanValue();
// SZ Feb 20, 2009: added null reference to SpecObj
this.tool.init(this.preprocess, null);
this.impliedInits = this.tool.getImpliedInits();
this.invariants = this.tool.getInvariants();
this.impliedActions = this.tool.getImpliedActions();
this.actions = this.tool.getActions();
this.fpBits = fpBits;
}
/* Fields */
public Tool tool;
public Action[] invariants; // the invariants to be checked
public Action[] impliedInits; // the implied-inits to be checked
public Action[] impliedActions; // the implied-actions to be checked
public Action[] actions; // the subactions
private boolean checkDeadlock; // check deadlock?
private boolean preprocess; // preprocess?
private String fromChkpt = null; // recover from this checkpoint
private String metadir = null; // the directory pathname for metadata
private int fpBits = -1;
/**
* fpMemSize is the number of bytes of memory to allocate
* to storing fingerprints of found states in memory. It
* defaults to .25 * runtime.maxMemory().
* The minimum value of fpMemSize is MinFpMemSize unless
* that's bigger than Runtime.getRuntime()).maxMemory(), in
* which case it is .75 * (Runtime.getRuntime()).maxMemory().
*/
private double fpMemSize;
/**
* Statistics how many states this app computed
*/
private long statesComputed = 0L;
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getCheckDeadlock()
*/
public final Boolean getCheckDeadlock() {
return new Boolean(this.checkDeadlock);
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getPreprocess()
*/
public final Boolean getPreprocess() {
return new Boolean(this.preprocess);
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getFileName()
*/
public final String getFileName() {
return this.tool.rootFile;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getSpecDir()
*/
public String getSpecDir() {
return this.tool.specDir;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getConfigName()
*/
public String getConfigName() {
return this.config;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getMetadir()
*/
public final String getMetadir() {
return this.metadir;
}
public final int getFPBits() {
return fpBits;
}
/**
* @return the fpMemSize
*/
public long getFpMemSize() {
// determine amount of memory to be used for fingerprints
final long maxMemory = Runtime.getRuntime().maxMemory();
// -fpmem is given
if (fpMemSize == -1)
{
// .25 * maxMemory
fpMemSize = maxMemory >> 2;
}
// -fpmemratio is given
if (0 <= fpMemSize && fpMemSize <= 1)
{
fpMemSize = maxMemory * fpMemSize;
}
if (fpMemSize < TLC.MinFpMemSize)
{
fpMemSize = TLC.MinFpMemSize;
}
if (fpMemSize >= maxMemory)
{
// .75*maxMemory
fpMemSize = maxMemory - (maxMemory >> 2);
}
return (long) fpMemSize;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#canRecover()
*/
public final boolean canRecover() {
return this.fromChkpt != null;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getInitStates()
*/
public final TLCState[] getInitStates() throws WorkerException {
StateVec theInitStates = this.tool.getInitStates();
TLCState[] res = new TLCState[theInitStates.size()];
for (int i = 0; i < theInitStates.size(); i++) {
TLCState curState = theInitStates.elementAt(i);
if (!this.tool.isGoodState(curState)) {
String msg = "Error: Initial state is not completely specified by the"
+ " initial predicate.";
throw new WorkerException(msg, curState, null, false);
}
res[i] = (TLCState) curState;
}
statesComputed += res.length;
return res;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getNextStates(tlc2.tool.TLCState)
*/
public final TLCState[] getNextStates(TLCState curState)
throws WorkerException {
StateVec nextStates = new StateVec(10);
for (int i = 0; i < this.actions.length; i++) {
Action curAction = this.actions[i];
StateVec nstates = this.tool.getNextStates(curAction,
(TLCState) curState);
nextStates = nextStates.addElements(nstates);
}
int len = nextStates.size();
if (len == 0 && this.checkDeadlock) {
throw new WorkerException("Error: deadlock reached.", curState,
null, false);
}
TLCState[] res = new TLCState[nextStates.size()];
for (int i = 0; i < nextStates.size(); i++) {
TLCState succState = nextStates.elementAt(i);
if (!this.tool.isGoodState(succState)) {
String msg = "Error: Successor state is not completely specified by"
+ " the next-state action.";
throw new WorkerException(msg, curState, succState, false);
}
res[i] = succState;
}
statesComputed += res.length;
return res;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#checkState(tlc2.tool.TLCState, tlc2.tool.TLCState)
*/
public final void checkState(TLCState s1, TLCState s2)
throws WorkerException {
TLCState ts2 = (TLCState) s2;
for (int i = 0; i < this.invariants.length; i++) {
if (!tool.isValid(this.invariants[i], ts2)) {
// We get here because of invariant violation:
String msg = "Error: Invariant " + this.tool.getInvNames()[i]
+ " is violated.";
throw new WorkerException(msg, s1, s2, false);
}
}
if (s1 == null) {
for (int i = 0; i < this.impliedInits.length; i++) {
if (!this.tool.isValid(this.impliedInits[i], ts2)) {
// We get here because of implied-inits violation:
String msg = "Error: Implied-init "
+ this.tool.getImpliedInitNames()[i]
+ " is violated.";
throw new WorkerException(msg, s1, s2, false);
}
}
} else {
TLCState ts1 = (TLCState) s1;
for (int i = 0; i < this.impliedActions.length; i++) {
if (!tool.isValid(this.impliedActions[i], ts1, ts2)) {
// We get here because of implied-action violation:
String msg = "Error: Implied-action "
+ this.tool.getImpliedActNames()[i]
+ " is violated.";
throw new WorkerException(msg, s1, s2, false);
}
}
}
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#isInModel(tlc2.tool.TLCState)
*/
public final boolean isInModel(TLCState s) {
return this.tool.isInModel((TLCState) s);
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#isInActions(tlc2.tool.TLCState, tlc2.tool.TLCState)
*/
public final boolean isInActions(TLCState s1, TLCState s2) {
return this.tool.isInActions((TLCState) s1, (TLCState) s2);
}
/* Reconstruct the initial state whose fingerprint is fp. */
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getState(long)
*/
public final TLCStateInfo getState(long fp) {
return this.tool.getState(fp);
}
/* Reconstruct the next state of state s whose fingerprint is fp. */
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getState(long, tlc2.tool.TLCState)
*/
public final TLCStateInfo getState(long fp, TLCState s) {
return this.tool.getState(fp, s);
}
/* Reconstruct the info for the transition from s to s1. */
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getState(tlc2.tool.TLCState, tlc2.tool.TLCState)
*/
public TLCStateInfo getState(TLCState s1, TLCState s) {
return this.tool.getState(s1, s);
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getStatesComputed()
*/
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#getStatesComputed()
*/
public long getStatesComputed() {
return statesComputed;
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#setCallStack()
*/
public final void setCallStack() {
this.tool.setCallStack();
}
/* (non-Javadoc)
* @see tlc2.tool.distributed.DistApp#printCallStack()
*/
public final String printCallStack() {
// SZ Jul 10, 2009: check if this is ok
// changed the method signature
return this.tool.getCallStack().toString();
}
public static TLCApp create(String args[]) throws IOException {
String specFile = null;
String configFile = null;
boolean deadlock = true;
int fpIndex = 0;
int fpBits = 1;
String fromChkpt = null;
double fpmem = -1;
int index = 0;
while (index < args.length) {
if (args[index].equals("-config")) {
index++;
if (index < args.length) {
configFile = args[index];
int len = configFile.length();
if (configFile.startsWith(".cfg", len - 4)) {
configFile = configFile.substring(0, len - 4);
}
index++;
} else {
printErrorMsg("Error: configuration file required.");
return null;
}
} else if (args[index].equals("-tool")) {
index++;
TLCGlobals.tool = true;
} else if (args[index].equals("-deadlock")) {
index++;
deadlock = false;
} else if (args[index].equals("-recover")) {
index++;
if (index < args.length) {
fromChkpt = args[index++] + FileUtil.separator;
} else {
printErrorMsg("Error: need to specify the metadata directory for recovery.");
return null;
}
} else if (args[index].equals("-checkpoint")) {
index++;
if (index < args.length) {
try {
TLCGlobals.chkptDuration = Integer.parseInt(args[index]) * 1000 * 60;
if (TLCGlobals.chkptDuration < 0) {
printErrorMsg("Error: expect a nonnegative integer for -checkpoint option.");
}
index++;
} catch (Exception e) {
printErrorMsg("Error: An integer for checkpoint interval is required. But encountered "
+ args[index]);
}
} else {
printErrorMsg("Error: checkpoint interval required.");
}
} else if (args[index].equals("-coverage")) {
index++;
if (index < args.length) {
try {
TLCGlobals.coverageInterval = Integer
.parseInt(args[index]) * 1000;
if (TLCGlobals.coverageInterval < 0) {
printErrorMsg("Error: expect a nonnegative integer for -coverage option.");
return null;
}
index++;
} catch (Exception e) {
printErrorMsg("Error: An integer for coverage report interval required."
+ " But encountered " + args[index]);
return null;
}
} else {
printErrorMsg("Error: coverage report interval required.");
return null;
}
} else if (args[index].equals("-terse")) {
index++;
Value.expand = false;
} else if (args[index].equals("-nowarning")) {
index++;
TLCGlobals.warn = false;
} else if (args[index].equals("-fp")) {
index++;
if (index < args.length) {
try {
fpIndex = Integer.parseInt(args[index]);
if (fpIndex < 0 || fpIndex >= FP64.Polys.length) {
printErrorMsg("Error: The number for -fp must be between 0 and "
+ (FP64.Polys.length - 1) + " (inclusive).");
return null;
}
index++;
} catch (Exception e) {
printErrorMsg("Error: A number for -fp is required. But encountered "
+ args[index]);
return null;
}
} else {
printErrorMsg("Error: expect an integer for -workers option.");
return null;
}
} else if (args[index].equals("-fpbits")) {
index++;
if (index < args.length) {
try {
fpBits = Integer.parseInt(args[index]);
// make sure it's in valid range
if (!FPSet.isValid(fpBits)) {
printErrorMsg("Error: Value in interval [0, 30] for fpbits required. But encountered " + args[index]);
return null;
}
index++;
} catch (Exception e) {
printErrorMsg("Error: A number for -fpbits is required. But encountered "
+ args[index]);
return null;
}
} else {
printErrorMsg("Error: expect an integer for -workers option.");
return null;
}
} else if (args[index].equals("-fpmem"))
{
index++;
if (index < args.length)
{
try
{
// -fpmem can be used in two ways:
// a) to set the relative memory to be used for fingerprints (being machine independent)
// b) to set the absolute memory to be used for fingerprints
// In order to set memory relatively, a value in the domain [0.0, 1.0] is interpreted as a fraction.
// A value in the [2, Double.MaxValue] domain allocates memory absolutely.
// Independently of relative or absolute mem allocation,
// a user cannot allocate more than JVM heap space
// available. Conversely there is the lower hard limit TLC#MinFpMemSize.
fpmem = Double.parseDouble(args[index]);
if (fpmem < 0) {
printErrorMsg("Error: An positive integer or a fraction for fpset memory size/percentage required. But encountered " + args[index]);
return null;
} else if (fpmem > 1) {
fpmem = (long) fpmem;
}
index++;
} catch (Exception e)
{
printErrorMsg("Error: An positive integer or a fraction for fpset memory size/percentage required. But encountered " + args[index]);
return null;
}
}
} else if (args[index].equals("-metadir")) {
index++;
if (index < args.length)
{
TLCGlobals.metaDir = args[index++] + FileUtil.separator;
} else {
printErrorMsg("Error: need to specify the metadata directory.");
return null;
}
} else {
if (args[index].charAt(0) == '-') {
printErrorMsg("Error: unrecognized option: " + args[index]);
return null;
}
if (specFile != null) {
printErrorMsg("Error: more than one input files: "
+ specFile + " and " + args[index]);
return null;
}
specFile = args[index++];
int len = specFile.length();
if (specFile.startsWith(".tla", len - 4)) {
specFile = specFile.substring(0, len - 4);
}
}
}
if (specFile == null) {
printErrorMsg("Error: Missing input TLA+ module.");
return null;
}
if (configFile == null)
configFile = specFile;
if (fromChkpt != null) {
// We must recover the intern table as early as possible
UniqueString.internTbl.recover(fromChkpt);
}
FP64.Init(fpIndex);
return new TLCApp(specFile, configFile, deadlock, fromChkpt, fpBits, fpmem);
}
private static void printErrorMsg(String msg) {
ToolIO.out.println(msg);
ToolIO.out
.println("Usage: java tlc2.tool.TLCServer [-option] inputfile");
}
}
|
package org.jetbrains.plugins.scala.lang.parser.stress;
import com.intellij.lang.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.diff.FlyweightCapableTreeStructure;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author ilyas
*/
public class DragBuilderWrapper implements PsiBuilder {
final Project myProject;
final PsiBuilder myBuilder;
final DragStorage myStorage;
public DragBuilderWrapper(Project project, PsiBuilder builder) {
myProject = project;
myBuilder = builder;
myStorage = new DragStorage();
}
public Project getProject() {
return myProject;
}
public void advanceLexer() {
String text = myBuilder.getTokenText();
if (!myBuilder.eof() && text != null) {
int start = myBuilder.getCurrentOffset();
int end = start + text.length();
TextRange range = new TextRange(start, end);
myStorage.registerRevision(range);
}
myBuilder.advanceLexer();
}
public Pair<TextRange, Integer>[] getDragInfo(){
return myStorage.getRangeInfo();
}
public CharSequence getOriginalText() {
return myBuilder.getOriginalText();
}
/*
public void setTokenTypeRemapper(ITokenTypeRemapper remapper) {
myBuilder.setTokenTypeRemapper(remapper);
}
*/
public IElementType getTokenType() {
return myBuilder.getTokenType();
}
public String getTokenText() {
return myBuilder.getTokenText();
}
public int getCurrentOffset() {
return myBuilder.getCurrentOffset();
}
public void setTokenTypeRemapper(ITokenTypeRemapper remapper) {
myBuilder.setTokenTypeRemapper(remapper);
}
public Marker mark() {
return myBuilder.mark();
}
public void error(String messageText) {
myBuilder.error(messageText);
}
public boolean eof() {
return myBuilder.eof();
}
public ASTNode getTreeBuilt() {
return myBuilder.getTreeBuilt();
}
public FlyweightCapableTreeStructure<LighterASTNode> getLightTree() {
return myBuilder.getLightTree();
}
public void setDebugMode(boolean dbgMode) {
myBuilder.setDebugMode(dbgMode);
}
public void enforceCommentTokens(TokenSet tokens) {
myBuilder.enforceCommentTokens(tokens);
}
public LanguageDialect getLanguageDialect() {
return null;
}
public <T> T getUserData(Key<T> key) {
return myBuilder.getUserData(key);
}
public <T> void putUserData(Key<T> key, T value) {
myBuilder.putUserData(key, value);
}
public LighterASTNode getLatestDoneMarker() {
return myBuilder.getLatestDoneMarker();
}
public <T> T getUserDataUnprotected(@NotNull Key<T> key) {
return myBuilder.getUserDataUnprotected(key);
}
public <T> void putUserDataUnprotected(@NotNull Key<T> key, @Nullable T value) {
myBuilder.putUserDataUnprotected(key, value);
}
}
|
package org.voltdb.regressionsuites;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.voltdb.BackendTarget;
import org.voltdb.VoltDB;
import org.voltdb.VoltTable;
import org.voltdb.VoltType;
import org.voltdb.client.Client;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.NoConnectionsException;
import org.voltdb.client.ProcCallException;
import org.voltdb.client.ProcedureCallback;
import org.voltdb.compiler.VoltProjectBuilder;
import org.voltdb_testprocs.regressionsuites.fixedsql.Insert;
/**
* Tests for SQL that was recently (early 2012) unsupported.
*/
public class TestFunctionsForVoltDBSuite extends RegressionSuite {
/** Procedures used by this suite */
static final Class<?>[] PROCEDURES = { Insert.class };
public void testExplicitErrorUDF() throws Exception
{
System.out.println("STARTING testExplicitErrorUDF");
Client client = getClient();
ProcedureCallback callback = new ProcedureCallback() {
@Override
public void clientCallback(ClientResponse clientResponse)
throws Exception {
if (clientResponse.getStatus() != ClientResponse.SUCCESS) {
throw new RuntimeException("Failed with response: " + clientResponse.getStatusString());
}
}
};
/*
CREATE TABLE P1 (
ID INTEGER DEFAULT '0' NOT NULL,
DESC VARCHAR(300),
NUM INTEGER,
RATIO FLOAT,
PRIMARY KEY (ID)
);
*/
for(int id=7; id < 15; id++) {
client.callProcedure(callback, "P1.insert", - id, "X"+String.valueOf(id), 10, 1.1);
client.drain();
}
ClientResponse cr = null;
// Exercise basic syntax without runtime invocation.
cr = client.callProcedure("@AdHoc", "select SQL_ERROR(123) from P1 where ID = 0");
assertTrue(cr.getStatus() == ClientResponse.SUCCESS);
cr = client.callProcedure("@AdHoc", "select SQL_ERROR('abc') from P1 where ID = 0");
assertTrue(cr.getStatus() == ClientResponse.SUCCESS);
cr = client.callProcedure("@AdHoc", "select SQL_ERROR(123, 'abc') from P1 where ID = 0");
assertTrue(cr.getStatus() == ClientResponse.SUCCESS);
boolean caught = false;
caught = false;
try {
cr = client.callProcedure("@AdHoc", "select SQL_ERROR(123, 'abc') from P1");
assertTrue(cr.getStatus() != ClientResponse.SUCCESS);
} catch (ProcCallException e) {
String msg = e.getMessage();
assertTrue(msg.indexOf("abc") != -1);
caught = true;
}
assertTrue(caught);
caught = false;
try {
cr = client.callProcedure("@AdHoc", "select SQL_ERROR(123.5) from P1");
assertTrue(cr.getStatus() != ClientResponse.SUCCESS);
} catch (ProcCallException e) {
String msg = e.getMessage();
assertTrue(msg.indexOf("Specific error code") != -1);
caught = true;
}
assertTrue(caught);
caught = false;
try {
cr = client.callProcedure("@AdHoc", "select SQL_ERROR('abc') from P1");
assertTrue(cr.getStatus() != ClientResponse.SUCCESS);
} catch (ProcCallException e) {
String msg = e.getMessage();
assertTrue(msg.indexOf("abc") != -1);
caught = true;
}
assertTrue(caught);
caught = false;
try {
// This wants to be a statement compile-time error.
cr = client.callProcedure("@AdHoc", "select SQL_ERROR(123, 123) from P1");
assertTrue(cr.getStatus() != ClientResponse.SUCCESS);
} catch (ProcCallException e) {
String msg = e.getMessage();
assertTrue(msg.matches(".*SQL ERROR\n.*VARCHAR.*"));
caught = true;
}
assertTrue(caught);
}
public void testOctetLength() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING OCTET_LENGTH");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "Vo", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "Xin", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("OCTET_LENGTH", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(8, result.getLong(1));
cr = client.callProcedure("OCTET_LENGTH", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(3, result.getLong(1));
// null case
cr = client.callProcedure("OCTET_LENGTH", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(VoltType.NULL_BIGINT,result.getLong(1));
}
// this test is put here instead of TestFunctionSuite, because HSQL uses
// a different null case standard with standard sql
public void testPosition() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING Position");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "Vo", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "Xin@Volt", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("POSITION","Vo", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(3, result.getLong(1));
cr = client.callProcedure("POSITION","DB", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(0, result.getLong(1));
cr = client.callProcedure("POSITION","Vo", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(5, result.getLong(1));
// null case
cr = client.callProcedure("POSITION","Vo", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(VoltType.NULL_BIGINT,result.getLong(1));
}
// this test is put here instead of TestFunctionSuite, because HSQL uses
// a different null case standard with standard sql
public void testCharLength() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING Char length");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "Vo", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "Xin@Volt", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("CHAR_LENGTH", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(4, result.getLong(1));
cr = client.callProcedure("CHAR_LENGTH", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(8, result.getLong(1));
// null case
cr = client.callProcedure("CHAR_LENGTH", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(VoltType.NULL_BIGINT,result.getLong(1));
}
public void testDECODE() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING DECODE");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "IBM", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "Microsoft", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, "Hewlett Packard", 10, 1.1);
cr = client.callProcedure("P1.insert", 4, "Gateway", 10, 1.1);
cr = client.callProcedure("P1.insert", 5, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// match 1st condition
cr = client.callProcedure("DECODE", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("zheng",result.getString(1));
// match 2nd condition
cr = client.callProcedure("DECODE", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("li",result.getString(1));
// match 3rd condition
cr = client.callProcedure("DECODE", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("at",result.getString(1));
// match 4th condition
cr = client.callProcedure("DECODE", 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("VoltDB",result.getString(1));
// null case
cr = client.callProcedure("DECODE", 5);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("where",result.getString(1));
// param cases
// For project.addStmtProcedure("DECODE_PARAM_INFER_STRING", "select desc, DECODE (desc,?,?,desc) from P1 where id = ?");
cr = client.callProcedure("DECODE_PARAM_INFER_STRING", "Gateway", "You got it!", 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("You got it!",result.getString(1));
// For project.addStmtProcedure("DECODE_PARAM_INFER_INT", "select desc, DECODE (id,?,?,id) from P1 where id = ?");
cr = client.callProcedure("DECODE_PARAM_INFER_INT", 4, -4, 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(-4,result.getLong(1));
// For project.addStmtProcedure("DECODE_PARAM_INFER_DEFAULT", "select desc, DECODE (?,?,?,?) from P1 where id = ?");
cr = client.callProcedure("DECODE_PARAM_INFER_DEFAULT", "Gateway", "Gateway", "You got it!", "You ain't got it!", 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("You got it!",result.getString(1));
// For project.addStmtProcedure("DECODE_PARAM_INFER_CONFLICTING", "select desc, DECODE (id,1,?,2,99,'99') from P1 where id = ?");
cr = client.callProcedure("DECODE_PARAM_INFER_CONFLICTING", "?!", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("?!",result.getString(1));
// For project.addStmtProcedure("DECODE_PARAM_INFER_CONFLICTING", "select desc, DECODE (id,1,?,2,99,'99') from P1 where id = ?");
try {
cr = client.callProcedure("DECODE_PARAM_INFER_CONFLICTING", 1000, 1);
fail("Should have thrown unfortunate type error.");
} catch (ProcCallException pce) {
String msg = pce.getMessage();
assertTrue(msg.contains("TYPE ERROR FOR PARAMETER 0"));
}
}
public void testDECODENoDefault() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING DECODE No Default");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "zheng", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "li", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// null case
cr = client.callProcedure("DECODEND", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(null,result.getString(1));
}
public void testDECODEVeryLong() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING DECODE Exceed Limit");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "zheng", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "li", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// null case
cr = client.callProcedure("DECODEVERYLONG", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("where",result.getString(1));
}
public void testDECODEInlineVarcharColumn_ENG5078()
throws NoConnectionsException, IOException, ProcCallException
{
System.out.println("STARTING DECODE inline varchar column pass-through");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P3_INLINE_DESC.insert", 1, "zheng", 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// null case
try {
cr = client.callProcedure("@AdHoc",
"select DECODE(id, -1, 'INVALID', desc) from P3_INLINE_DESC");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("zheng",result.getString(0));
} catch (ProcCallException pce) {
System.out.println(pce);
fail("Looks like a regression of ENG-5078 inline varchar column pass-through by decode");
}
}
public void testDECODEAsInput() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING DECODE No Default");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P1.insert", 1, "zheng", 10, 1.1);
cr = client.callProcedure("P1.insert", 2, "li", 10, 1.1);
cr = client.callProcedure("P1.insert", 3, null, 10, 1.1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// use DECODE as string input to operator
cr = client.callProcedure("@AdHoc", "select desc || DECODE(id, 1, ' is the 1', ' is not the 1') from P1 where id = 2");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("li is not the 1",result.getString(0));
// use DECODE as integer input to operator
cr = client.callProcedure("@AdHoc", "select id + DECODE(desc, 'li', 0, -2*id) from P1 where id = 2");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(2,result.getLong(0));
// use DECODE as integer input to operator, with unused incompatible option
cr = client.callProcedure("@AdHoc", "select id + DECODE(id, 2, 0, 'incompatible') from P1 where id = 2");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(2,result.getLong(0));
// use DECODE as integer input to operator, with used incompatible option
try {
cr = client.callProcedure("@AdHoc", "select id + DECODE(id, 1, 0, 'incompatible') from P1 where id = 2");
fail("failed to except incompatible option");
} catch (ProcCallException pce) {
String message = pce.getMessage();
// It's about that string argument to the addition operator.
assertTrue(message.contains("varchar"));
}
}
private void checkDecodeNullResult (ClientResponse cr, Object[] input) {
VoltTable result;
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
System.out.println("testDECODEWithNULL:" + result);
if (input instanceof String[]) {
String[] expected = (String[]) input;
for (int i = 0; i < expected.length; i++) {
if ( (i == 4 || i == 7) && !expected[i].startsWith("null") ) {
// Float type, decimal type
assertTrue(Math.abs(
Double.valueOf(expected[i]) - Double.valueOf(result.getString(i))
) < 0.00001);
} else {
assertEquals(expected[i],result.getString(i));
}
}
} else if (input instanceof Long[]) {
Long[] expected = (Long[]) input;
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i],Long.valueOf(result.getLong(i)));
}
}
}
public void testDECODEWithNULL() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING DECODE with NULL");
Client client = getClient();
ClientResponse cr;
cr = client.callProcedure("R3.insert", 1, 1, 1, 1, 1, 1.1, "2013-07-18 02:00:00.123457", "IBM", 1);
cr = client.callProcedure("R3.insert", 2, null, null, null, null, null, null, null, null);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// Stored procedure tests
cr = client.callProcedure("TestDecodeNull", 1);
checkDecodeNullResult(cr, new String[]{"1","1","1","1","1.1","tm","IBM","1"});
cr = client.callProcedure("TestDecodeNull", 2);
checkDecodeNullResult(cr, new String[]{"null tiny","null small", "null num", "null big",
"null ratio", "null tm", "null var", "null dec"});
cr = client.callProcedure("TestDecodeNullParam", null, null, null, null, null, null, null, null, 1);
checkDecodeNullResult(cr, new String[]{"1","1","1","1","1.1","tm","IBM","1"});
cr = client.callProcedure("TestDecodeNullParam", null, null, null, null, null, null, null, null, 2);
checkDecodeNullResult(cr, new String[]{"null tiny","null small", "null num", "null big",
"null ratio", "null tm", "null var", "null dec"});
// Test CSV_NULL for params
cr = client.callProcedure("TestDecodeNullParam", "\\N","\\N","\\N","\\N","\\N","\\N","\\N","\\N", 1);
checkDecodeNullResult(cr, new String[]{"1","1","1","1","1.1","tm","IBM","1"});
cr = client.callProcedure("TestDecodeNullParam", "\\N","\\N","\\N","\\N","\\N","\\N","\\N","\\N", 2);
checkDecodeNullResult(cr, new String[]{"null tiny","null small", "null num", "null big",
"null ratio", "null tm", "null var", "null dec"});
// AdHoc queries tests
cr = client.callProcedure("@AdHoc", "select DECODE(tiny, NULL, 'null tiny', tiny)," +
"DECODE(small, NULL, 'null small', small), DECODE(num, NULL, 'null num', num), " +
"DECODE(big, NULL, 'null big', big), DECODE(ratio, NULL, 'null ratio', ratio), " +
"DECODE(tm, NULL, 'null tm', 'tm'), DECODE(var, NULL, 'null var', var), " +
"DECODE(dec, NULL, 'null dec', dec) from R3 where id = 1");
checkDecodeNullResult(cr, new String[]{"1","1","1","1","1.1","tm","IBM","1"});
cr = client.callProcedure("@AdHoc", "select DECODE(tiny, NULL, 'null tiny', tiny)," +
"DECODE(small, NULL, 'null small', small), DECODE(num, NULL, 'null num', num), " +
"DECODE(big, NULL, 'null big', big), DECODE(ratio, NULL, 'null ratio', ratio), " +
"DECODE(tm, NULL, 'null tm', 'tm'), DECODE(var, NULL, 'null var', var), " +
"DECODE(dec, NULL, 'null dec', dec) from R3 where id = 2");
checkDecodeNullResult(cr, new String[]{"null tiny","null small", "null num", "null big",
"null ratio", "null tm", "null var", "null dec"});
cr = client.callProcedure("P2.insert", 1, new Timestamp(1000L));
cr = client.callProcedure("P2.insert", 2, null);
// Test timestamp
try {
// Timstamp converted to varchar, See ENG-4284.
cr = client.callProcedure("TestDecodeNullTimestamp", 1);
fail();
} catch (Exception ex) {
assertTrue(ex.getMessage().contains("TIMESTAMP can't be cast as VARCHAR"));
}
// Test NULL as the second search expression.
cr = client.callProcedure("@AdHoc", "select DECODE(tiny, -1, -1, NULL, 0, tiny)," +
"DECODE(small, -1, -1, NULL, 0, small), DECODE(num, -1, -1, NULL, 0, num), " +
"DECODE(big, -1, -1, NULL, 0, big) from R3 where id = 1");
checkDecodeNullResult(cr, new Long[]{1L,1L,1L,1L});
cr = client.callProcedure("@AdHoc", "select DECODE(tiny, -1, -1, NULL, 0, tiny)," +
"DECODE(small, -1, -1, NULL, 0, small), DECODE(num, -1, -1, NULL, 0, num), " +
"DECODE(big, -1, -1, NULL, 0, big) from R3 where id = 2");
checkDecodeNullResult(cr, new Long[]{0L,0L,0L,0L});
// Test Null return type
cr = client.callProcedure("@AdHoc","select DECODE(tiny, 4, 5, NULL, NULL, 10) " +
" from R3 where id = 2");
assertTrue(cr.getResults()[0].getRowCount() == 1);
assertTrue(cr.getResults()[0].advanceRow());
assertEquals(Integer.MIN_VALUE, cr.getResults()[0].getLong(0));
try {
cr = client.callProcedure("@AdHoc","select DECODE(tiny, 4, 5, NULL, 'null tiny', tiny) " +
" from R3 where id = 2");
fail();
} catch (Exception ex) {
System.out.println(ex.getMessage());
assertTrue(ex.getMessage().contains("SQL ERROR"));
assertTrue(ex.getMessage().contains("value: 'null tiny'"));
}
}
/**
* @return
* @throws IOException
* @throws NoConnectionsException
* @throws ProcCallException
*/
private void loadJS1(Client client) throws IOException, NoConnectionsException, ProcCallException
{
final String jstemplate = "{\n" +
" \"id\": %d,\n" +
" \"bool\": true,\n" +
" \"inner\": {\n" +
" \"veggies\": \"good for you\",\n" +
" \"Vo\": \"wakarimasen\"\n" +
" },\n" +
" \"arr\": [\n" +
" 0,\n" +
" %d,\n" +
" 100\n" +
" ],\n" +
" \"tag\": \"%s\"\n" +
"}";
ClientResponse cr;
cr = client.callProcedure("JS1.insert",1,String.format(jstemplate, 1, 1, "one"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",2,String.format(jstemplate, 2, 2, "two"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",3,String.format(jstemplate, 3, 3, "three"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",4,"{\"id\":4,\"bool\": false}");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",5,"{}");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",6,"[]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",7,"{\"id\":7,\"funky\": null}");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",8, null);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JS1.insert",9, "{\"id\":9, \"Vo\":\"\"}");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
}
public void testFIELDFunction() throws Exception {
ClientResponse cr;
VoltTable result;
Client client = getClient();
loadJS1(client);
cr = client.callProcedure("IdFieldProc", "id", "1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
try {
cr = client.callProcedure("IdFieldProc", "id", 1);
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 1"));
}
try {
cr = client.callProcedure("IdFieldProc", 1, "1");
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 0"));
}
cr = client.callProcedure("IdFieldProc", "tag", "three");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
cr = client.callProcedure("IdFieldProc", "bool", "false");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(4L,result.getLong(0));
cr = client.callProcedure("IdFieldProc", "Vo", "");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(9L,result.getLong(0));
cr = client.callProcedure("NullFieldProc", "funky");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(9, result.getRowCount());
cr = client.callProcedure("NullFieldProc", "id");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(3, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(5L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(6L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(8L,result.getLong(0));
cr = client.callProcedure("InnerFieldProc", "Vo" ,"wakarimasen");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(3, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(2L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
cr = client.callProcedure("IdFieldProc", "arr" ,"[0,2,100]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(2L,result.getLong(0));
cr = client.callProcedure("@AdHoc", // test scalar not an object
"SELECT FIELD(FIELD(DOC, 'id'), 'value') FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
cr = client.callProcedure("@AdHoc", // test array not an object
"SELECT FIELD(FIELD(DOC, 'arr'), 'value') FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
}
public void testARRAY_ELEMENTFunction() throws Exception {
ClientResponse cr;
VoltTable result;
Client client = getClient();
loadJS1(client);
cr = client.callProcedure("IdArrayProc", "arr", 1, "1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
try {
cr = client.callProcedure("IdArrayProc", "arr", "NotNumeric", "1");
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 1"));
}
try {
cr = client.callProcedure("IdArrayProc", 1, 1, "1");
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 0"));
}
cr = client.callProcedure("NullArrayProc", "funky", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(9, result.getRowCount());
cr = client.callProcedure("IdArrayProc", "id", 1, "1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(0, result.getRowCount());
cr = client.callProcedure("@AdHoc", // test index out of bounds
"SELECT ARRAY_ELEMENT(FIELD(DOC, 'arr'), 99) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
cr = client.callProcedure("@AdHoc", // test negative index
"SELECT ARRAY_ELEMENT(FIELD(DOC, 'arr'), -1) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
cr = client.callProcedure("@AdHoc", // test scalar not an array
"SELECT ARRAY_ELEMENT(FIELD(DOC, 'id'), 1) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
cr = client.callProcedure("@AdHoc", // test object not an array
"SELECT ARRAY_ELEMENT(FIELD(DOC, 'inner'), 1) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
// Test top-level json array.
cr = client.callProcedure("JS1.insert", 10, "[0, 10, 100]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("@AdHoc",
"SELECT ARRAY_ELEMENT(DOC, 1) FROM JS1 WHERE ID = 10");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals("10",result.getString(0));
// Test empty json array.
cr = client.callProcedure("JS1.insert", 11, "[]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("@AdHoc",
"SELECT ARRAY_ELEMENT(DOC, 0) FROM JS1 WHERE ID = 11");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getString(0);
assertTrue(result.wasNull());
}
public void testARRAY_LENGTHFunction() throws Exception {
ClientResponse cr;
VoltTable result;
Client client = getClient();
loadJS1(client);
cr = client.callProcedure("IdArrayLengthProc", "arr", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(3, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(2L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
try {
cr = client.callProcedure("IdArrayLengthProc", "arr", "NoNumber");
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 1"));
}
try {
cr = client.callProcedure("IdArrayLengthProc", 1, 3);
fail("parameter check failed");
}
catch ( ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("TYPE ERROR FOR PARAMETER 0"));
}
cr = client.callProcedure("NullFieldProc", "funky");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(9, result.getRowCount());
cr = client.callProcedure("NullArrayLengthProc", "arr");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(6, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(4L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(5L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(6L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(7L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(8L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(9L,result.getLong(0));
cr = client.callProcedure("LargeArrayLengthProc", "arr", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(0, result.getRowCount());
cr = client.callProcedure("LargeArrayLengthProc", "arr", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(3, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(2L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
cr = client.callProcedure("SmallArrayLengthProc", "arr", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(0, result.getRowCount());
cr = client.callProcedure("SmallArrayLengthProc", "arr", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(3, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(2L,result.getLong(0));
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
cr = client.callProcedure("@AdHoc", // test scalar not an array
"SELECT ARRAY_LENGTH(FIELD(DOC, 'id')) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getLong(0);
assertTrue(result.wasNull());
cr = client.callProcedure("@AdHoc", // test object not an array
"SELECT ARRAY_LENGTH(FIELD(DOC, 'inner')) FROM JS1 WHERE ID = 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
result.getLong(0);
assertTrue(result.wasNull());
// Test top-level json array.
cr = client.callProcedure("JS1.insert", 10, "[0, 10, 100]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("@AdHoc", // test object not an array
"SELECT ARRAY_LENGTH(DOC) FROM JS1 WHERE ID = 10");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(3L,result.getLong(0));
// Test empty json array.
cr = client.callProcedure("JS1.insert", 11, "[]");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("@AdHoc",
"SELECT ARRAY_LENGTH(DOC) FROM JS1 WHERE ID = 11");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(0L,result.getLong(0));
}
public void testSINCE_EPOCH() throws Exception {
System.out.println("STARTING SINCE_EPOCH");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P2.insert", 0, new Timestamp(0L));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("P2.insert", 1, new Timestamp(1L));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("P2.insert", 2, new Timestamp(1000L));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("P2.insert", 3, new Timestamp(-1000L));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("P2.insert", 4, new Timestamp(1371808830000L));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("P2.insert", 5, "2013-07-18 02:00:00.123457");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
// Test AdHoc
cr = client.callProcedure("@AdHoc", "select SINCE_EPOCH (SECOND, TM), TM from P2 where id = 4");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1371808830L, result.getLong(0));
assertEquals(1371808830000000L, result.getTimestampAsLong(1));
// Test constants timestamp with string
cr = client.callProcedure("@AdHoc", "select TM, TO_TIMESTAMP(MICROS, SINCE_EPOCH (MICROS, '2013-07-18 02:00:00.123457') ) from P2 where id = 5");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(result.getTimestampAsLong(0), result.getTimestampAsLong(1));
// Test user error input, Only accept JDBC's timestamp format: YYYY-MM-DD-SS.sss.
try {
cr = client.callProcedure("@AdHoc", "select SINCE_EPOCH (MICROS, 'I am a timestamp') from P2 where id = 5");
fail();
} catch (Exception ex) {
assertTrue(ex.getMessage().contains("PlanningErrorException"));
assertTrue(ex.getMessage().contains("incompatible data type in conversion"));
}
String[] procedures = {"SINCE_EPOCH_SECOND", "SINCE_EPOCH_MILLIS",
"SINCE_EPOCH_MILLISECOND", "SINCE_EPOCH_MICROS", "SINCE_EPOCH_MICROSECOND"};
for (int i=0; i< procedures.length; i++) {
String proc = procedures[i];
cr = client.callProcedure(proc, 0);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "SINCE_EPOCH_SECOND") {
assertEquals(0, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MILLIS" || proc == "SINCE_EPOCH_MILLISECOND") {
assertEquals(0, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MICROS" || proc == "SINCE_EPOCH_MICROSECOND") {
assertEquals(0, result.getLong(0));
}
cr = client.callProcedure(proc, 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "SINCE_EPOCH_SECOND") {
assertEquals(0, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MILLIS" || proc == "SINCE_EPOCH_MILLISECOND") {
assertEquals(1, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MICROS" || proc == "SINCE_EPOCH_MICROSECOND") {
assertEquals(1000, result.getLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "SINCE_EPOCH_SECOND") {
assertEquals(1, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MILLIS" || proc == "SINCE_EPOCH_MILLISECOND") {
assertEquals(1000, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MICROS" || proc == "SINCE_EPOCH_MICROSECOND") {
assertEquals(1000000, result.getLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "SINCE_EPOCH_SECOND") {
assertEquals(-1, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MILLIS" || proc == "SINCE_EPOCH_MILLISECOND") {
assertEquals(-1000, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MICROS" || proc == "SINCE_EPOCH_MICROSECOND") {
assertEquals(-1000000, result.getLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "SINCE_EPOCH_SECOND") {
assertEquals(1371808830L, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MILLIS" || proc == "SINCE_EPOCH_MILLISECOND") {
assertEquals(1371808830000L, result.getLong(0));
} else if (proc == "SINCE_EPOCH_MICROS" || proc == "SINCE_EPOCH_MICROSECOND") {
assertEquals(1371808830000000L, result.getLong(0));
} else {
fail();
}
}
}
public void testTO_TIMESTAMP() throws NoConnectionsException, IOException, ProcCallException {
System.out.println("STARTING TO_TIMESTAMP");
Client client = getClient();
ClientResponse cr;
VoltTable result;
cr = client.callProcedure("P2.insert", 0, new Timestamp(0L));
cr = client.callProcedure("P2.insert", 1, new Timestamp(1L));
cr = client.callProcedure("P2.insert", 2, new Timestamp(1000L));
cr = client.callProcedure("P2.insert", 3, new Timestamp(-1000L));
// Test AdHoc
cr = client.callProcedure("@AdHoc", "select to_timestamp(second, 1372640523) from P2 limit 1");
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
assertEquals(1372640523 * 1000000L, result.getTimestampAsLong(0));
// Test string input number, expect error
try {
cr = client.callProcedure("@AdHoc", "select to_timestamp(second, '1372640523') from P2 limit 1");
fail();
} catch (Exception ex) {
assertTrue(ex.getMessage().contains("PlanningErrorException"));
assertTrue(ex.getMessage().contains("incompatible data type"));
}
String[] procedures = {"FROM_UNIXTIME", "TO_TIMESTAMP_SECOND", "TO_TIMESTAMP_MILLIS",
"TO_TIMESTAMP_MILLISECOND", "TO_TIMESTAMP_MICROS", "TO_TIMESTAMP_MICROSECOND"};
for (int i=0; i< procedures.length; i++) {
String proc = procedures[i];
cr = client.callProcedure(proc, 0L , 0);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "TO_TIMESTAMP_SECOND" || proc == "FROM_UNIXTIME") {
assertEquals(0L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MILLIS" || proc == "TO_TIMESTAMP_MILLISECOND") {
assertEquals(0L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MICROS" || proc == "TO_TIMESTAMP_MICROSECOND") {
assertEquals(0L, result.getTimestampAsLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 1L , 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "TO_TIMESTAMP_SECOND" || proc == "FROM_UNIXTIME") {
assertEquals(1000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MILLIS" || proc == "TO_TIMESTAMP_MILLISECOND") {
assertEquals(1000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MICROS" || proc == "TO_TIMESTAMP_MICROSECOND") {
assertEquals(1L, result.getTimestampAsLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 1000L , 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "TO_TIMESTAMP_SECOND" || proc == "FROM_UNIXTIME") {
assertEquals(1000000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MILLIS" || proc == "TO_TIMESTAMP_MILLISECOND") {
assertEquals(1000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MICROS" || proc == "TO_TIMESTAMP_MICROSECOND") {
assertEquals(1000L, result.getTimestampAsLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, -1000 , 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "TO_TIMESTAMP_SECOND" || proc == "FROM_UNIXTIME") {
assertEquals(-1000000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MILLIS" || proc == "TO_TIMESTAMP_MILLISECOND") {
assertEquals(-1000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MICROS" || proc == "TO_TIMESTAMP_MICROSECOND") {
assertEquals(-1000L, result.getTimestampAsLong(0));
} else {
fail();
}
cr = client.callProcedure(proc, 1371808830000L, 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
if (proc == "TO_TIMESTAMP_SECOND" || proc == "FROM_UNIXTIME") {
assertEquals(1371808830000000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MILLIS" || proc == "TO_TIMESTAMP_MILLISECOND") {
assertEquals(1371808830000000L, result.getTimestampAsLong(0));
} else if (proc == "TO_TIMESTAMP_MICROS" || proc == "TO_TIMESTAMP_MICROSECOND") {
assertEquals(1371808830000L, result.getTimestampAsLong(0));
} else {
fail();
}
}
}
public void testTRUNCATE() throws Exception {
System.out.println("STARTING TRUNCATE with timestamp");
Client client = getClient();
ClientResponse cr;
VoltTable result;
VoltDB.setDefaultTimezone();
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
//System.out.println(dateFormat.getTimeZone());
Date time = null;
// Test Standard TRUNCATE function for floating numbers
Exception ex = null;
try {
cr = client.callProcedure("@AdHoc", "select TRUNCATE (1.2, 1), TM from P2 where id = 0");
} catch (Exception e) {
System.out.println(e.getMessage());
ex = e;
} finally {
assertNotNull(ex);
assertTrue((ex.getMessage().contains("Error compiling query")));
assertTrue((ex.getMessage().contains("PlanningErrorException")));
}
// Test date before Gregorian calendar beginning.
cr = client.callProcedure("P2.insert", 0, Timestamp.valueOf("1582-03-06 13:56:40.123456"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
ex = null;
try {
cr = client.callProcedure("TRUNCATE", 0);
} catch (Exception e) {
System.out.println(e.getMessage());
ex = e;
} finally {
assertNotNull(ex);
assertTrue((ex.getMessage().contains("SQL ERROR")));
}
// Test Timestamp Null value
cr = client.callProcedure("P2.insert", 1, null);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("TRUNCATE", 1);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
for (int i=0; i< 11; i++) {
assertNull(result.getTimestampAsTimestamp(i));
}
// Test normal TRUNCATE functionalities
cr = client.callProcedure("P2.insert", 2, Timestamp.valueOf("2001-09-09 01:46:40.035123"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("TRUNCATE", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
cr = client.callProcedure("TRUNCATE", 2);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
time = dateFormat.parse("2001-01-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(0));
time = dateFormat.parse("2001-07-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(1));
time = dateFormat.parse("2001-09-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(2));
time = dateFormat.parse("2001-09-09 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(3));
time = dateFormat.parse("2001-09-09 01:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(4));
time = dateFormat.parse("2001-09-09 01:46:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(5));
time = dateFormat.parse("2001-09-09 01:46:40.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(6));
time = dateFormat.parse("2001-09-09 01:46:40.035");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(7));
time = dateFormat.parse("2001-09-09 01:46:40.035");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(8));
assertEquals(1000000000035123L, result.getTimestampAsLong(9));
assertEquals(1000000000035123L, result.getTimestampAsLong(10));
// Test time before EPOCH
cr = client.callProcedure("P2.insert", 3, Timestamp.valueOf("1583-11-24 13:56:40.123456"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("TRUNCATE", 3);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
time = dateFormat.parse("1583-01-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(0));
time = dateFormat.parse("1583-10-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(1));
time = dateFormat.parse("1583-11-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(2));
time = dateFormat.parse("1583-11-24 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(3));
time = dateFormat.parse("1583-11-24 13:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(4));
time = dateFormat.parse("1583-11-24 13:56:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(5));
time = dateFormat.parse("1583-11-24 13:56:40.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(6));
time = dateFormat.parse("1583-11-24 13:56:40.123");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(7));
time = dateFormat.parse("1583-11-24 13:56:40.123");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(7));
assertEquals(-12184250599876544L, result.getTimestampAsLong(9));
assertEquals(-12184250599876544L, result.getTimestampAsLong(10));
// Test date in far future
cr = client.callProcedure("P2.insert", 4, Timestamp.valueOf("2608-03-06 13:56:40.123456"));
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("TRUNCATE", 4);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
result = cr.getResults()[0];
assertEquals(1, result.getRowCount());
assertTrue(result.advanceRow());
time = dateFormat.parse("2608-01-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(0));
time = dateFormat.parse("2608-01-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(1));
time = dateFormat.parse("2608-03-01 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(2));
time = dateFormat.parse("2608-03-06 00:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(3));
time = dateFormat.parse("2608-03-06 13:00:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(4));
time = dateFormat.parse("2608-03-06 13:56:00.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(5));
time = dateFormat.parse("2608-03-06 13:56:40.000");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(6));
time = dateFormat.parse("2608-03-06 13:56:40.123");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(7));
time = dateFormat.parse("2608-03-06 13:56:40.123");
assertEquals(time.getTime() * 1000, result.getTimestampAsLong(7));
assertEquals(20138939800123456L, result.getTimestampAsLong(9));
assertEquals(20138939800123456L, result.getTimestampAsLong(10));
}
public void testFunctionsWithInvalidJSON() throws Exception {
Client client = getClient();
ClientResponse cr;
cr = client.callProcedure(
"JSBAD.insert", 1, // OOPS. skipped comma before "bool"
"{\"id\":1 \"bool\": false}"
);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure(
"JSBAD.insert", 2, // OOPS. semi-colon in place of colon before "bool"
"{\"id\":2, \"bool\"; false, \"Vo\":\"\"}"
);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
final String jsTrailingCommaArray = "[ 0, 100, ]"; // OOPS. trailing comma in array
final String jsWithTrailingCommaArray = "{\"id\":3, \"trailer\":" + jsTrailingCommaArray + "}";
cr = client.callProcedure("JSBAD.insert", 3, jsWithTrailingCommaArray);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
cr = client.callProcedure("JSBAD.insert", 4, jsTrailingCommaArray);
assertEquals(ClientResponse.SUCCESS, cr.getStatus());
String[] jsonProcs = { "BadIdFieldProc", "BadIdArrayProc", "BadIdArrayLengthProc" };
for (String procname : jsonProcs) {
try {
cr = client.callProcedure(procname, 1, "id", "1");
fail("document validity check failed for " + procname);
}
catch(ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("Invalid JSON * Line 1, Column 9"));
}
try {
cr = client.callProcedure(procname, 2, "id", "2");
fail("document validity check failed for " + procname);
}
catch(ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("Invalid JSON * Line 1, Column 16"));
}
try {
cr = client.callProcedure(procname, 3, "id", "3");
fail("document validity check failed for " + procname);
}
catch(ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("Invalid JSON * Line 1, Column 30"));
}
try {
cr = client.callProcedure(procname, 4, "id", "4");
fail("document validity check failed for " + procname);
}
catch(ProcCallException pcex) {
assertTrue(pcex.getMessage().contains("Invalid JSON * Line 1, Column 11"));
}
}
}
// JUnit / RegressionSuite boilerplate
public TestFunctionsForVoltDBSuite(String name) {
super(name);
}
static public junit.framework.Test suite() {
VoltServerConfig config = null;
MultiConfigSuiteBuilder builder =
new MultiConfigSuiteBuilder(TestFunctionsForVoltDBSuite.class);
boolean success;
VoltProjectBuilder project = new VoltProjectBuilder();
final String literalSchema =
"CREATE TABLE P1 ( " +
"ID INTEGER DEFAULT '0' NOT NULL, " +
"DESC VARCHAR(300), " +
"NUM INTEGER, " +
"RATIO FLOAT, " +
"PRIMARY KEY (ID) ); " +
"PARTITION TABLE P1 ON COLUMN ID;" +
"CREATE TABLE P2 ( " +
"ID INTEGER DEFAULT '0' NOT NULL, " +
"TM TIMESTAMP DEFAULT NULL, " +
"PRIMARY KEY (ID) ); " +
"PARTITION TABLE P2 ON COLUMN ID;\n" +
"CREATE TABLE P3_INLINE_DESC ( " +
"ID INTEGER DEFAULT '0' NOT NULL, " +
"DESC VARCHAR(30), " +
"NUM INTEGER, " +
"RATIO FLOAT, " +
"PRIMARY KEY (ID) ); " +
"PARTITION TABLE P3_INLINE_DESC ON COLUMN ID;" +
"CREATE TABLE R3 ( " +
"ID INTEGER DEFAULT '0' NOT NULL, " +
"TINY TINYINT, " +
"SMALL SMALLINT, " +
"NUM INTEGER, " +
"BIG BIGINT, " +
"RATIO FLOAT, " +
"TM TIMESTAMP DEFAULT NULL, " +
"VAR VARCHAR(300), " +
"DEC DECIMAL, " +
"PRIMARY KEY (ID) ); " +
"CREATE TABLE JS1 (\n" +
" ID INTEGER NOT NULL, \n" +
" DOC VARCHAR(8192),\n" +
" PRIMARY KEY(ID))\n" +
";\n" +
"CREATE PROCEDURE IdFieldProc AS\n" +
" SELECT ID FROM JS1 WHERE FIELD(DOC, ?) = ? ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE InnerFieldProc AS\n" +
" SELECT ID FROM JS1 WHERE FIELD(FIELD(DOC, 'inner'), ?) = ? ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE NullFieldProc AS\n" +
" SELECT ID FROM JS1 WHERE FIELD(DOC, ?) IS NULL ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE IdArrayProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_ELEMENT(FIELD(DOC, ?), ?) = ? ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE NullArrayProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_ELEMENT(FIELD(DOC, ?), ?) IS NULL ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE IdArrayLengthProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_LENGTH(FIELD(DOC, ?)) = ? ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE NullArrayLengthProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_LENGTH(FIELD(DOC, ?)) IS NULL ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE SmallArrayLengthProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_LENGTH(FIELD(DOC, ?)) BETWEEN 0 AND ? ORDER BY ID\n" +
";\n" +
"CREATE PROCEDURE LargeArrayLengthProc AS\n" +
" SELECT ID FROM JS1 WHERE ARRAY_LENGTH(FIELD(DOC, ?)) > ? ORDER BY ID\n" +
";\n" +
"CREATE TABLE JSBAD (\n" +
" ID INTEGER NOT NULL,\n" +
" DOC VARCHAR(8192),\n" +
" PRIMARY KEY(ID))\n" +
";\n" +
"CREATE PROCEDURE BadIdFieldProc AS\n" +
" SELECT ID FROM JSBAD WHERE ID = ? AND FIELD(DOC, ?) = ?\n" +
";\n" +
"CREATE PROCEDURE BadIdArrayProc AS\n" +
" SELECT ID FROM JSBAD WHERE ID = ? AND ARRAY_ELEMENT(FIELD(DOC, ?), 1) = ?\n" +
";\n" +
"CREATE PROCEDURE BadIdArrayLengthProc AS\n" +
" SELECT ID FROM JSBAD WHERE ID = ? AND ARRAY_LENGTH(FIELD(DOC, ?)) = ?\n" +
";\n" +
"";
try {
project.addLiteralSchema(literalSchema);
} catch (IOException e) {
assertFalse(true);
}
// Test DECODE
project.addStmtProcedure("DECODE", "select desc, DECODE (desc,'IBM','zheng'," +
"'Microsoft','li'," +
"'Hewlett Packard','at'," +
"'Gateway','VoltDB'," +
"'where') from P1 where id = ?");
project.addStmtProcedure("DECODEND", "select desc, DECODE (desc,'zheng','a') from P1 where id = ?");
project.addStmtProcedure("DECODEVERYLONG", "select desc, DECODE (desc,'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'a','a'," +
"'where') from P1 where id = ?");
project.addStmtProcedure("DECODE_PARAM_INFER_STRING", "select desc, DECODE (desc,?,?,desc) from P1 where id = ?");
project.addStmtProcedure("DECODE_PARAM_INFER_INT", "select desc, DECODE (id,?,?,id) from P1 where id = ?");
project.addStmtProcedure("DECODE_PARAM_INFER_DEFAULT", "select desc, DECODE (?,?,?,?) from P1 where id = ?");
project.addStmtProcedure("DECODE_PARAM_INFER_CONFLICTING", "select desc, DECODE (id,1,?,2,99,'') from P1 where id = ?");
// Test OCTET_LENGTH
project.addStmtProcedure("OCTET_LENGTH", "select desc, OCTET_LENGTH (desc) from P1 where id = ?");
// Test POSITION and CHAR_LENGTH
project.addStmtProcedure("POSITION", "select desc, POSITION (? IN desc) from P1 where id = ?");
project.addStmtProcedure("CHAR_LENGTH", "select desc, CHAR_LENGTH (desc) from P1 where id = ?");
// Test SINCE_EPOCH
project.addStmtProcedure("SINCE_EPOCH_SECOND", "select SINCE_EPOCH (SECOND, TM) from P2 where id = ?");
project.addStmtProcedure("SINCE_EPOCH_MILLIS", "select SINCE_EPOCH (MILLIS, TM) from P2 where id = ?");
project.addStmtProcedure("SINCE_EPOCH_MILLISECOND", "select SINCE_EPOCH (MILLISECOND, TM) from P2 where id = ?");
project.addStmtProcedure("SINCE_EPOCH_MICROS", "select SINCE_EPOCH (MICROS, TM) from P2 where id = ?");
project.addStmtProcedure("SINCE_EPOCH_MICROSECOND", "select SINCE_EPOCH (MICROSECOND, TM) from P2 where id = ?");
// Test TO_TIMESTAMP
project.addStmtProcedure("TO_TIMESTAMP_SECOND", "select TO_TIMESTAMP (SECOND, ?) from P2 where id = ?");
project.addStmtProcedure("TO_TIMESTAMP_MILLIS", "select TO_TIMESTAMP (MILLIS, ?) from P2 where id = ?");
project.addStmtProcedure("TO_TIMESTAMP_MILLISECOND", "select TO_TIMESTAMP (MILLISECOND, ?) from P2 where id = ?");
project.addStmtProcedure("TO_TIMESTAMP_MICROS", "select TO_TIMESTAMP (MICROS, ?) from P2 where id = ?");
project.addStmtProcedure("TO_TIMESTAMP_MICROSECOND", "select TO_TIMESTAMP (MICROSECOND, ?) from P2 where id = ?");
project.addStmtProcedure("TRUNCATE", "select TRUNCATE(YEAR, TM), TRUNCATE(QUARTER, TM), TRUNCATE(MONTH, TM), " +
"TRUNCATE(DAY, TM), TRUNCATE(HOUR, TM),TRUNCATE(MINUTE, TM),TRUNCATE(SECOND, TM), TRUNCATE(MILLIS, TM), " +
"TRUNCATE(MILLISECOND, TM), TRUNCATE(MICROS, TM), TRUNCATE(MICROSECOND, TM) from P2 where id = ?");
project.addStmtProcedure("FROM_UNIXTIME", "select FROM_UNIXTIME (?) from P2 where id = ?");
project.addStmtProcedure("TestDecodeNull", "select DECODE(tiny, NULL, 'null tiny', tiny)," +
"DECODE(small, NULL, 'null small', small), DECODE(num, NULL, 'null num', num), " +
"DECODE(big, NULL, 'null big', big), DECODE(ratio, NULL, 'null ratio', ratio), " +
"DECODE(tm, NULL, 'null tm', 'tm'), DECODE(var, NULL, 'null var', var), " +
"DECODE(dec, NULL, 'null dec', dec) from R3 where id = ?");
project.addStmtProcedure("TestDecodeNullParam", "select DECODE(tiny, ?, 'null tiny', tiny)," +
"DECODE(small, ?, 'null small', small), DECODE(num, ?, 'null num', num), " +
"DECODE(big, ?, 'null big', big), DECODE(ratio, ?, 'null ratio', ratio), " +
"DECODE(tm, ?, 'null tm', 'tm'), DECODE(var, ?, 'null var', var), " +
"DECODE(dec, ?, 'null dec', dec) from R3 where id = ?");
project.addStmtProcedure("TestDecodeNullTimestamp", "select DECODE(tm, NULL, 'null tm', tm) from p2 where id = ?");
// CONFIG #1: Local Site/Partition running on JNI backend
config = new LocalCluster("fixedsql-onesite.jar", 1, 1, 0, BackendTarget.NATIVE_EE_JNI);
success = config.compile(project);
assertTrue(success);
builder.addServerConfig(config);
// CONFIG #2: Local Site/Partitions running on JNI backend
config = new LocalCluster("fixedsql-threesite.jar", 3, 1, 0, BackendTarget.NATIVE_EE_JNI);
success = config.compile(project);
assertTrue(success);
builder.addServerConfig(config);
/*
// CONFIG #2: HSQL -- disabled, the functions being tested are not HSQL compatible
config = new LocalCluster("fixedsql-hsql.jar", 1, 1, 0, BackendTarget.HSQLDB_BACKEND);
success = config.compile(project);
assertTrue(success);
builder.addServerConfig(config);
*/
// no clustering tests for functions
return builder;
}
}
|
package aggregationbenchmark;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.List;
import org.voltdb.CLIConfig;
import org.voltdb.VoltTable;
import org.voltdb.client.Client;
import org.voltdb.client.ClientConfig;
import org.voltdb.client.ClientFactory;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.ClientStats;
import org.voltdb.client.ClientStatsContext;
import org.voltdb.client.ClientStatusListenerExt;
public class AggregationBenchmark {
// handy, rather than typing this out several times
static final String HORIZONTAL_RULE =
"
"
// validated command line configuration
final AggConfig config;
// Reference to the database connection we will use
final Client client;
AtomicInteger total = new AtomicInteger();
// Statistics manager objects from the client
final ClientStatsContext fullStatsContext;
/**
* Uses included {@link CLIConfig} class to
* declaratively state command line options with defaults
* and validation.
*/
static class AggConfig extends CLIConfig {
@Option(desc = "Comma separated list of the form server[:port] to connect to.")
String servers = "localhost";
@Option(desc = "Number of invocations.")
int invocations = 6;
@Option(desc = "Restore the data from snapshot or not.")
int restore = 0;
@Option(desc = "Snapshot path.")
String snapshotpath = "";
@Option(desc = "Stored procedure number ( an integer from 1 to 20 )")
int proc = 1;
@Option(desc = "Filename to write raw summary statistics to.")
String statsfile = "";
@Override
public void validate() {
if (proc <= 0 || proc > 20) exitWithMessageAndUsage("procedure number must be in range [1, 20]");
}
}
/**
* Provides a callback to be notified on node failure.
* This example only logs the event.
*/
class StatusListener extends ClientStatusListenerExt {
@Override
public void connectionLost(String hostname, int port, int connectionsLeft, DisconnectCause cause) {
// if the benchmark is still active
System.err.printf("Connection to %s:%d was lost.\n", hostname, port);
}
}
/**
* Constructor for benchmark instance.
* Configures VoltDB client and prints configuration.
*
* @param config Parsed & validated CLI options.
*/
public AggregationBenchmark(AggConfig config) {
this.config = config;
ClientConfig clientConfig = new ClientConfig("", "", new StatusListener());
//snapshot restore needs > default 2 minute timeout
clientConfig.setProcedureCallTimeout(0);
client = ClientFactory.createClient(clientConfig);
fullStatsContext = client.createStatsContext();
System.out.print(HORIZONTAL_RULE);
System.out.println(" Command Line Configuration");
System.out.println(HORIZONTAL_RULE);
System.out.println(config.getConfigDumpString());
}
/**
* Connect to a single server with retry. Limited exponential backoff.
* No timeout. This will run until the process is killed if it's not
* able to connect.
*
* @param server hostname:port or just hostname (hostname can be ip).
*/
void connectToOneServerWithRetry(String server) {
int sleep = 1000;
while (true) {
try {
client.createConnection(server);
break;
}
catch (Exception e) {
System.err.printf("Connection failed - retrying in %d second(s).\n", sleep / 1000);
try { Thread.sleep(sleep); } catch (Exception interruted) {}
if (sleep < 8000) sleep += sleep;
}
}
System.out.printf("Connected to VoltDB node at: %s.\n", server);
}
/**
* Connect to a set of servers in parallel. Each will retry until
* connection. This call will block until all have connected.
*
* @param servers A comma separated list of servers using the hostname:port
* syntax (where :port is optional).
* @throws InterruptedException if anything bad happens with the threads.
*/
void connect(String servers) throws InterruptedException {
System.out.println("Connecting to VoltDB...");
String[] serverArray = servers.split(",");
final CountDownLatch connections = new CountDownLatch(serverArray.length);
// use a new thread to connect to each server
for (final String server : serverArray) {
new Thread(new Runnable() {
@Override
public void run() {
connectToOneServerWithRetry(server);
connections.countDown();
}
}).start();
}
// block until all have connected
connections.await();
}
void restoreDatabase() throws Exception {
ClientResponse resp = null;
try {
resp = client.callProcedure("@SnapshotRestore", config.snapshotpath, "TestBackupAggBench");
} catch (Exception ex) {
ex.printStackTrace();
System.exit(-1);
}
}
/**
* Core benchmark code.
* Connect. Initialize. Run the loop. Cleanup. Print Results.
*
* @throws Exception if anything unexpected happens.
*/
public void runBenchmark() throws Exception {
// connect to one or more servers, loop until success
connect(config.servers);
if (config.restore > 0) {
System.out.println("\nLoading data from snapshot...");
restoreDatabase();
}
FileWriter fw = null;
if ((config.statsfile != null) && (config.statsfile.length() != 0)) {
fw = new FileWriter(config.statsfile);
}
System.out.print(HORIZONTAL_RULE);
System.out.println("\nRunning Benchmark");
System.out.println(HORIZONTAL_RULE);
// Benchmark start time
long queryStartTS, queryElapse;
int counter = config.invocations;
String procName = "Q" + config.proc;
List<Long> m = new ArrayList<Long>();
System.out.println(String.format("Running procedure %s for the %d times...", procName, counter));
queryStartTS = System.nanoTime();
VoltTable vt = null;
for (int i = 1; i <= counter; i++) {
vt = client.callProcedure(procName).getResults()[0];
if (vt.getRowCount() <= 0) {
System.err.println("ERROR Query %d empty result set");
System.exit(-1);
}
}
double avg = (double)(System.nanoTime() - queryStartTS) / counter;
System.out.printf("\n\n(Returned %d rows in average %f us)\n",
vt.getRowCount(), avg);
// block until all outstanding txns return
client.drain();
//retrieve stats
ClientStats stats = fullStatsContext.fetch().getStats();
// write stats to file
//client.writeSummaryCSV(stats, config.statsfile);
fw.append(String.format("%s,%d,-1,0,0,0,0,%f,0,0,0,0,0,0\n",
"Q" + String.format("%02d", config.proc),
stats.getStartTimestamp(),
avg/1000.0));
// close down the client connections
client.close();
if (fw != null)
fw.close();
}
/**
* Main routine creates a benchmark instance and kicks off the run method.
*
* @param args Command line arguments.
* @throws Exception if anything goes wrong.
* @see {@link AggConfig}
*/
public static void main(String[] args) throws Exception {
// create a configuration from the arguments
AggConfig config = new AggConfig();
config.parse(AggregationBenchmark.class.getName(), args);
AggregationBenchmark benchmark = new AggregationBenchmark(config);
benchmark.runBenchmark();
}
}
|
package mtr.data;
import io.netty.buffer.Unpooled;
import mtr.MTR;
import mtr.Registry;
import mtr.block.BlockNode;
import mtr.entity.EntitySeat;
import mtr.mappings.PersistentStateMapper;
import mtr.packet.IPacket;
import mtr.packet.PacketTrainDataGuiServer;
import mtr.path.PathData;
import net.minecraft.SharedConstants;
import net.minecraft.core.BlockPos;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.nbt.NbtIo;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.level.ServerLevel;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.item.DyeColor;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.phys.AABB;
import net.minecraft.world.phys.Vec3;
import org.apache.commons.io.IOUtils;
import org.msgpack.core.MessagePack;
import org.msgpack.core.MessagePacker;
import org.msgpack.core.MessageUnpacker;
import org.msgpack.value.ArrayValue;
import org.msgpack.value.Value;
import java.io.*;
import java.util.*;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class RailwayData extends PersistentStateMapper implements IPacket {
public final Set<Station> stations;
public final Set<Platform> platforms;
public final Set<Siding> sidings;
public final Set<Route> routes;
public final Set<Depot> depots;
public final DataCache dataCache;
private int prevPlatformCount;
private int prevSidingCount;
private boolean canWriteToFile = true;
private final Level world;
private final Map<BlockPos, Map<BlockPos, Rail>> rails;
private final SignalBlocks signalBlocks = new SignalBlocks();
private final List<Map<UUID, Long>> trainPositions = new ArrayList<>(2);
private final Map<Player, BlockPos> playerLastUpdatedPositions = new HashMap<>();
private final List<Player> playersToSyncSchedules = new ArrayList<>();
private final Map<Player, Set<TrainServer>> trainsInPlayerRange = new HashMap<>();
private final Map<Long, List<ScheduleEntry>> schedulesForPlatform = new HashMap<>();
private final Map<Player, EntitySeat> playerSeats = new HashMap<>();
private final List<Rail.RailActions> railActions = new ArrayList<>();
private final Map<Long, Thread> generatingPathThreads = new HashMap<>();
private static final int RAIL_UPDATE_DISTANCE = 128;
private static final int PLAYER_MOVE_UPDATE_THRESHOLD = 16;
private static final int SCHEDULE_UPDATE_TICKS = 60;
private static final int DATA_VERSION = 1;
private static final String NAME = "mtr_train_data";
private static final String KEY_RAW_MSGPACK = "raw_msgpack";
private static final String KEY_DATA_VERSION = "mtr_data_version";
private static final String KEY_STATIONS = "stations";
private static final String KEY_PLATFORMS = "platforms";
private static final String KEY_SIDINGS = "sidings";
private static final String KEY_ROUTES = "routes";
private static final String KEY_DEPOTS = "depots";
private static final String KEY_RAILS = "rails";
private static final String KEY_SIGNAL_BLOCKS = "signal_blocks";
public RailwayData(Level world) {
super(NAME);
this.world = world;
stations = new HashSet<>();
platforms = new HashSet<>();
sidings = new HashSet<>();
routes = new HashSet<>();
depots = new HashSet<>();
rails = new HashMap<>();
dataCache = new DataCache(stations, platforms, sidings, routes, depots);
trainPositions.add(new HashMap<>());
trainPositions.add(new HashMap<>());
}
@Override
public void load(CompoundTag compoundTag) {
if (compoundTag.contains(KEY_RAW_MSGPACK)) {
MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(compoundTag.getByteArray(KEY_RAW_MSGPACK));
try {
load(unpacker);
} catch (IOException e) {
e.printStackTrace();
}
} else {
try {
final CompoundTag tagStations = compoundTag.getCompound(KEY_STATIONS);
for (final String key : tagStations.getAllKeys()) {
stations.add(new Station(tagStations.getCompound(key)));
}
final CompoundTag tagNewPlatforms = compoundTag.getCompound(KEY_PLATFORMS);
for (final String key : tagNewPlatforms.getAllKeys()) {
platforms.add(new Platform(tagNewPlatforms.getCompound(key)));
}
final CompoundTag tagNewSidings = compoundTag.getCompound(KEY_SIDINGS);
for (final String key : tagNewSidings.getAllKeys()) {
sidings.add(new Siding(tagNewSidings.getCompound(key)));
}
final CompoundTag tagNewRoutes = compoundTag.getCompound(KEY_ROUTES);
for (final String key : tagNewRoutes.getAllKeys()) {
routes.add(new Route(tagNewRoutes.getCompound(key)));
}
final CompoundTag tagNewDepots = compoundTag.getCompound(KEY_DEPOTS);
for (final String key : tagNewDepots.getAllKeys()) {
depots.add(new Depot(tagNewDepots.getCompound(key)));
}
final CompoundTag tagNewRails = compoundTag.getCompound(KEY_RAILS);
for (final String key : tagNewRails.getAllKeys()) {
final RailEntry railEntry = new RailEntry(tagNewRails.getCompound(key));
rails.put(railEntry.pos, railEntry.connections);
}
final CompoundTag tagNewSignalBlocks = compoundTag.getCompound(KEY_SIGNAL_BLOCKS);
for (final String key : tagNewSignalBlocks.getAllKeys()) {
signalBlocks.signalBlocks.add(new SignalBlocks.SignalBlock(tagNewSignalBlocks.getCompound(key)));
}
validateData();
dataCache.sync();
} catch (Exception e) {
e.printStackTrace();
}
}
}
public void load(MessageUnpacker unpacker) throws IOException {
int mapSize = unpacker.unpackMapHeader();
for (int i = 0; i < mapSize; ++i) {
String key = unpacker.unpackString();
if (key.equals(KEY_DATA_VERSION)) {
if (unpacker.unpackInt() > DATA_VERSION) throw new IllegalArgumentException("Unsupported data version");
continue;
}
int arraySize = unpacker.unpackArrayHeader();
switch (key) {
case KEY_STATIONS:
for (int j = 0; j < arraySize; ++j) stations.add(new Station(readMessagePackSKMap(unpacker)));
break;
case KEY_PLATFORMS:
for (int j = 0; j < arraySize; ++j) platforms.add(new Platform(readMessagePackSKMap(unpacker)));
break;
case KEY_SIDINGS:
for (int j = 0; j < arraySize; ++j) sidings.add(new Siding(readMessagePackSKMap(unpacker)));
break;
case KEY_ROUTES:
for (int j = 0; j < arraySize; ++j) routes.add(new Route(readMessagePackSKMap(unpacker)));
break;
case KEY_DEPOTS:
for (int j = 0; j < arraySize; ++j) depots.add(new Depot(readMessagePackSKMap(unpacker)));
break;
case KEY_RAILS:
for (int j = 0; j < arraySize; ++j) {
final RailEntry railEntry = new RailEntry(readMessagePackSKMap(unpacker));
rails.put(railEntry.pos, railEntry.connections);
}
break;
case KEY_SIGNAL_BLOCKS:
for (int j = 0; j < arraySize; ++j) signalBlocks.signalBlocks.add(new SignalBlocks.SignalBlock(readMessagePackSKMap(unpacker)));
break;
}
}
validateData();
dataCache.sync();
}
@Override
public CompoundTag save(CompoundTag compoundTag) {
return compoundTag;
}
@Override
public void save(File file) {
if (!canWriteToFile) {
return;
}
final ByteArrayOutputStream bufferStream = new ByteArrayOutputStream(16777216);
final MessagePacker messagePacker = MessagePack.newDefaultPacker(bufferStream);
final long time1 = System.nanoTime();
try {
validateData();
canWriteToFile = false;
messagePacker.packMapHeader(8);
messagePacker.packString(KEY_DATA_VERSION).packInt(DATA_VERSION);
writeMessagePackDataset(messagePacker, stations, KEY_STATIONS, false);
writeMessagePackDataset(messagePacker, platforms, KEY_PLATFORMS);
writeMessagePackDataset(messagePacker, sidings, KEY_SIDINGS);
writeMessagePackDataset(messagePacker, routes, KEY_ROUTES, false);
writeMessagePackDataset(messagePacker, depots, KEY_DEPOTS, false);
writeMessagePackDataset(messagePacker, signalBlocks.signalBlocks, KEY_SIGNAL_BLOCKS);
messagePacker.packString(KEY_RAILS);
messagePacker.packArrayHeader(rails.size());
for (final Map.Entry<BlockPos, Map<BlockPos, Rail>> entry : rails.entrySet()) {
final BlockPos startPos = entry.getKey();
final Map<BlockPos, Rail> railMap = entry.getValue();
final RailEntry data = new RailEntry(startPos, railMap);
messagePacker.packMapHeader(data.messagePackLength());
data.toMessagePack(messagePacker);
}
messagePacker.close();
new Thread(() -> {
CompoundTag compoundTag = new CompoundTag();
CompoundTag dataTag = new CompoundTag();
dataTag.putInt(KEY_DATA_VERSION, DATA_VERSION);
dataTag.putByteArray(KEY_RAW_MSGPACK, bufferStream.toByteArray());
compoundTag.put("data", dataTag);
compoundTag.putInt("DataVersion", SharedConstants.getCurrentVersion().getWorldVersion());
try {
NbtIo.writeCompressed(compoundTag, file);
} catch (IOException iOException) {
iOException.printStackTrace();
} finally {
canWriteToFile = true;
}
}).start();
} catch (Exception e) {
e.printStackTrace();
}
final long time2 = System.nanoTime();
System.out.printf("MessagePack: %f ms\n", (time2 - time1) / 1000000F);
}
public void simulateTrains() {
final List<? extends Player> players = world.players();
players.forEach(player -> {
final BlockPos playerBlockPos = player.blockPosition();
final Vec3 playerPos = player.position();
if (!playerLastUpdatedPositions.containsKey(player) || playerLastUpdatedPositions.get(player).distManhattan(playerBlockPos) > PLAYER_MOVE_UPDATE_THRESHOLD) {
final Map<BlockPos, Map<BlockPos, Rail>> railsToAdd = new HashMap<>();
rails.forEach((startPos, blockPosRailMap) -> blockPosRailMap.forEach((endPos, rail) -> {
if (new AABB(startPos, endPos).inflate(RAIL_UPDATE_DISTANCE).contains(playerPos)) {
if (!railsToAdd.containsKey(startPos)) {
railsToAdd.put(startPos, new HashMap<>());
}
railsToAdd.get(startPos).put(endPos, rail);
}
}));
final FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
packet.writeInt(railsToAdd.size());
railsToAdd.forEach((posStart, railMap) -> {
packet.writeBlockPos(posStart);
packet.writeInt(railMap.size());
railMap.forEach((posEnd, rail) -> {
packet.writeBlockPos(posEnd);
rail.writePacket(packet);
});
});
if (packet.readableBytes() <= MAX_PACKET_BYTES) {
Registry.sendToPlayer((ServerPlayer) player, PACKET_WRITE_RAILS, packet);
}
playerLastUpdatedPositions.put(player, playerBlockPos);
}
});
trainPositions.remove(0);
trainPositions.add(new HashMap<>());
final Map<Player, Set<TrainServer>> newTrainsInPlayerRange = new HashMap<>();
final Set<TrainServer> trainsToSync = new HashSet<>();
schedulesForPlatform.clear();
signalBlocks.resetOccupied();
sidings.forEach(siding -> {
siding.setSidingData(world, dataCache.sidingIdToDepot.get(siding.id), rails);
siding.simulateTrain(dataCache, trainPositions, signalBlocks, newTrainsInPlayerRange, trainsToSync, schedulesForPlatform);
});
final int hour = Depot.getHour(world);
depots.forEach(depot -> depot.deployTrain(this, hour));
final Set<Player> playersToRemove = new HashSet<>();
playerSeats.forEach((player, seat) -> {
if (players.contains(player)) {
seat.updateSeat(player);
} else {
playersToRemove.add(player);
}
});
playersToRemove.forEach(playerSeats::remove);
if (!railActions.isEmpty() && railActions.get(0).build()) {
railActions.remove(0);
PacketTrainDataGuiServer.updateRailActionsS2C(world, railActions);
}
trainsInPlayerRange.forEach((player, trains) -> {
for (final TrainServer train : trains) {
if (!newTrainsInPlayerRange.containsKey(player) || !newTrainsInPlayerRange.get(player).contains(train)) {
final FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
if (newTrainsInPlayerRange.containsKey(player)) {
packet.writeInt(newTrainsInPlayerRange.get(player).size());
newTrainsInPlayerRange.get(player).forEach(trainToKeep -> packet.writeLong(trainToKeep.id));
} else {
packet.writeInt(0);
}
if (packet.readableBytes() <= MAX_PACKET_BYTES) {
Registry.sendToPlayer((ServerPlayer) player, PACKET_DELETE_TRAINS, packet);
}
break;
}
}
});
newTrainsInPlayerRange.forEach((player, trains) -> {
final List<FriendlyByteBuf> trainsPacketsToUpdate = new ArrayList<>();
trains.forEach(train -> {
if (trainsToSync.contains(train) || !trainsInPlayerRange.containsKey(player) || !trainsInPlayerRange.get(player).contains(train)) {
final FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
train.writePacket(packet);
if (packet.readableBytes() < MAX_PACKET_BYTES) {
trainsPacketsToUpdate.add(packet);
}
}
});
while (!trainsPacketsToUpdate.isEmpty()) {
FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
while (!trainsPacketsToUpdate.isEmpty()) {
final FriendlyByteBuf trainPacket = trainsPacketsToUpdate.get(0);
if (packet.readableBytes() + trainPacket.readableBytes() < MAX_PACKET_BYTES) {
packet.writeBytes(trainPacket);
trainsPacketsToUpdate.remove(0);
} else {
break;
}
}
Registry.sendToPlayer((ServerPlayer) player, PACKET_UPDATE_TRAINS, packet);
}
});
trainsInPlayerRange.clear();
trainsInPlayerRange.putAll(newTrainsInPlayerRange);
if (MTR.isGameTickInterval(SCHEDULE_UPDATE_TICKS)) {
players.forEach(player -> {
if (!playersToSyncSchedules.contains(player)) {
playersToSyncSchedules.add(player);
}
});
}
if (!playersToSyncSchedules.isEmpty()) {
final Player player = playersToSyncSchedules.remove(0);
final BlockPos playerBlockPos = player.blockPosition();
final Vec3 playerPos = player.position();
final Set<Long> platformIds = platforms.stream().filter(platform -> {
if (platform.isCloseToSavedRail(playerBlockPos, PLAYER_MOVE_UPDATE_THRESHOLD, PLAYER_MOVE_UPDATE_THRESHOLD, PLAYER_MOVE_UPDATE_THRESHOLD)) {
return true;
}
final Station station = dataCache.platformIdToStation.get(platform.id);
return station != null && station.inArea(playerBlockPos.getX(), playerBlockPos.getZ());
}).map(platform -> platform.id).collect(Collectors.toSet());
final Set<UUID> railsToAdd = new HashSet<>();
rails.forEach((startPos, blockPosRailMap) -> blockPosRailMap.forEach((endPos, rail) -> {
if (new AABB(startPos, endPos).inflate(RAIL_UPDATE_DISTANCE).contains(playerPos)) {
railsToAdd.add(PathData.getRailProduct(startPos, endPos));
}
}));
final Map<Long, Boolean> signalBlockStatus = new HashMap<>();
railsToAdd.forEach(rail -> signalBlocks.getSignalBlockStatus(signalBlockStatus, rail));
if (!platformIds.isEmpty() || !signalBlockStatus.isEmpty()) {
final FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
packet.writeInt(platformIds.size());
platformIds.forEach(platformId -> {
packet.writeLong(platformId);
final List<ScheduleEntry> scheduleEntries = schedulesForPlatform.get(platformId);
if (scheduleEntries == null) {
packet.writeInt(0);
} else {
packet.writeInt(scheduleEntries.size());
scheduleEntries.forEach(scheduleEntry -> scheduleEntry.writePacket(packet));
}
});
packet.writeInt(signalBlockStatus.size());
signalBlockStatus.forEach((id, occupied) -> {
packet.writeLong(id);
packet.writeBoolean(occupied);
});
if (packet.readableBytes() <= MAX_PACKET_BYTES) {
Registry.sendToPlayer((ServerPlayer) player, PACKET_UPDATE_SCHEDULE, packet);
}
}
}
if (prevPlatformCount != platforms.size() || prevSidingCount != sidings.size()) {
dataCache.sync();
}
prevPlatformCount = platforms.size();
prevSidingCount = sidings.size();
}
public void onPlayerJoin(ServerPlayer serverPlayer) {
final EntitySeat seat = new EntitySeat(world, serverPlayer.getX(), serverPlayer.getY(), serverPlayer.getZ());
seat.updateSeat(serverPlayer);
world.addFreshEntity(seat);
playerSeats.put(serverPlayer, seat);
PacketTrainDataGuiServer.sendAllInChunks(serverPlayer, stations, platforms, sidings, routes, depots, signalBlocks);
}
public EntitySeat getSeatFromPlayer(Player player) {
return playerSeats.get(player);
}
// writing data
public long addRail(TransportMode transportMode, BlockPos posStart, BlockPos posEnd, Rail rail, boolean validate) {
final long newId = validate ? new Random().nextLong() : 0;
addRail(rails, platforms, sidings, transportMode, posStart, posEnd, rail, newId);
if (validate) {
validateData();
}
return newId;
}
public long addSignal(DyeColor color, BlockPos posStart, BlockPos posEnd) {
return signalBlocks.add(0, color, PathData.getRailProduct(posStart, posEnd));
}
public void removeNode(BlockPos pos) {
removeNode(world, rails, pos);
validateData();
final FriendlyByteBuf packet = signalBlocks.getValidationPacket(rails);
if (packet != null) {
world.players().forEach(player -> Registry.sendToPlayer((ServerPlayer) player, PACKET_REMOVE_SIGNALS, packet));
}
}
public void removeRailConnection(BlockPos pos1, BlockPos pos2) {
removeRailConnection(world, rails, pos1, pos2);
validateData();
final FriendlyByteBuf packet = signalBlocks.getValidationPacket(rails);
if (packet != null) {
world.players().forEach(player -> Registry.sendToPlayer((ServerPlayer) player, PACKET_REMOVE_SIGNALS, packet));
}
}
public boolean hasSavedRail(BlockPos pos) {
return rails.containsKey(pos) && rails.get(pos).values().stream().anyMatch(rail -> rail.railType.hasSavedRail);
}
public boolean containsRail(BlockPos pos1, BlockPos pos2) {
return containsRail(rails, pos1, pos2);
}
public long removeSignal(DyeColor color, BlockPos posStart, BlockPos posEnd) {
return signalBlocks.remove(0, color, PathData.getRailProduct(posStart, posEnd));
}
public boolean markRailForBridge(Player player, BlockPos pos1, BlockPos pos2, int radius, BlockState state) {
if (containsRail(pos1, pos2)) {
railActions.add(new Rail.RailActions(world, player, Rail.RailActionType.BRIDGE, rails.get(pos1).get(pos2), radius, 0, state));
PacketTrainDataGuiServer.updateRailActionsS2C(world, railActions);
return true;
} else {
return false;
}
}
public boolean markRailForTunnel(Player player, BlockPos pos1, BlockPos pos2, int radius, int height) {
if (containsRail(pos1, pos2)) {
railActions.add(new Rail.RailActions(world, player, Rail.RailActionType.TUNNEL, rails.get(pos1).get(pos2), radius, height, null));
PacketTrainDataGuiServer.updateRailActionsS2C(world, railActions);
return true;
} else {
return false;
}
}
public boolean markRailForTunnelWall(Player player, BlockPos pos1, BlockPos pos2, int radius, int height, BlockState state) {
if (containsRail(pos1, pos2)) {
railActions.add(new Rail.RailActions(world, player, Rail.RailActionType.TUNNEL_WALL, rails.get(pos1).get(pos2), radius + 1, height + 1, state));
PacketTrainDataGuiServer.updateRailActionsS2C(world, railActions);
return true;
} else {
return false;
}
}
public void disconnectPlayer(Player player) {
playerLastUpdatedPositions.remove(player);
}
public void removeRailAction(long id) {
railActions.removeIf(railAction -> railAction.id == id);
PacketTrainDataGuiServer.updateRailActionsS2C(world, railActions);
}
public void generatePath(MinecraftServer minecraftServer, long depotId) {
generatingPathThreads.keySet().removeIf(id -> !generatingPathThreads.get(id).isAlive());
final Depot depot = dataCache.depotIdMap.get(depotId);
if (depot != null) {
if (generatingPathThreads.containsKey(depotId)) {
generatingPathThreads.get(depotId).interrupt();
System.out.println("Restarting path generation" + (depot.name.isEmpty() ? "" : " for " + depot.name));
} else {
System.out.println("Starting path generation" + (depot.name.isEmpty() ? "" : " for " + depot.name));
}
depot.generateMainRoute(minecraftServer, world, dataCache, rails, sidings, thread -> generatingPathThreads.put(depotId, thread));
}
}
public void getSchedulesForStation(Map<Long, List<ScheduleEntry>> schedulesForStation, long stationId) {
schedulesForPlatform.forEach((platformId, schedules) -> {
final Station station = dataCache.platformIdToStation.get(platformId);
if (station != null && station.id == stationId) {
schedulesForStation.put(platformId, schedules);
}
});
}
public List<ScheduleEntry> getSchedulesAtPlatform(long platformId) {
return schedulesForPlatform.get(platformId);
}
private void validateData() {
removeSavedRailS2C(world, platforms, rails, PACKET_DELETE_PLATFORM);
removeSavedRailS2C(world, sidings, rails, PACKET_DELETE_SIDING);
final List<BlockPos> railsToRemove = new ArrayList<>();
rails.forEach((startPos, railMap) -> railMap.forEach((endPos, rail) -> {
if (rail.railType.hasSavedRail && SavedRailBase.isInvalidSavedRail(rails, endPos, startPos)) {
railsToRemove.add(startPos);
railsToRemove.add(endPos);
}
}));
for (int i = 0; i < railsToRemove.size() - 1; i += 2) {
removeRailConnection(null, rails, railsToRemove.get(i), railsToRemove.get(i + 1));
}
}
// static finders
public static Platform getPlatformByPos(Set<Platform> platforms, BlockPos pos) {
try {
return platforms.stream().filter(platform -> platform.containsPos(pos)).findFirst().orElse(null);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
// other
public static void addRail(Map<BlockPos, Map<BlockPos, Rail>> rails, Set<Platform> platforms, Set<Siding> sidings, TransportMode transportMode, BlockPos posStart, BlockPos posEnd, Rail rail, long savedRailId) {
try {
if (!rails.containsKey(posStart)) {
rails.put(posStart, new HashMap<>());
}
rails.get(posStart).put(posEnd, rail);
if (savedRailId != 0) {
if (rail.railType == RailType.PLATFORM && platforms.stream().noneMatch(platform -> platform.containsPos(posStart) || platform.containsPos(posEnd))) {
platforms.add(new Platform(savedRailId, transportMode, posStart, posEnd));
} else if (rail.railType == RailType.SIDING && sidings.stream().noneMatch(siding -> siding.containsPos(posStart) || siding.containsPos(posEnd))) {
sidings.add(new Siding(savedRailId, transportMode, posStart, posEnd, (int) Math.floor(rail.getLength())));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void removeNode(Level world, Map<BlockPos, Map<BlockPos, Rail>> rails, BlockPos pos) {
try {
rails.remove(pos);
rails.forEach((startPos, railMap) -> {
railMap.remove(pos);
if (railMap.isEmpty() && world != null) {
BlockNode.resetRailNode(world, startPos);
}
});
if (world != null) {
validateRails(world, rails);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void removeRailConnection(Level world, Map<BlockPos, Map<BlockPos, Rail>> rails, BlockPos pos1, BlockPos pos2) {
try {
if (rails.containsKey(pos1)) {
rails.get(pos1).remove(pos2);
if (rails.get(pos1).isEmpty() && world != null) {
BlockNode.resetRailNode(world, pos1);
}
}
if (rails.containsKey(pos2)) {
rails.get(pos2).remove(pos1);
if (rails.get(pos2).isEmpty() && world != null) {
BlockNode.resetRailNode(world, pos2);
}
}
if (world != null) {
validateRails(world, rails);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static boolean containsRail(Map<BlockPos, Map<BlockPos, Rail>> rails, BlockPos pos1, BlockPos pos2) {
return rails.containsKey(pos1) && rails.get(pos1).containsKey(pos2);
}
public static Station getStation(Set<Station> stations, DataCache dataCache, BlockPos pos) {
try {
if (dataCache.blockPosToStation.containsKey(pos)) {
return dataCache.blockPosToStation.get(pos);
} else {
return stations.stream().filter(station -> station.inArea(pos.getX(), pos.getZ())).findFirst().orElse(null);
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public static long getClosePlatformId(Set<Platform> platforms, DataCache dataCache, BlockPos pos) {
return getClosePlatformId(platforms, dataCache, pos, 4, 0, 4);
}
public static long getClosePlatformId(Set<Platform> platforms, DataCache dataCache, BlockPos pos, int radius, int lower, int upper) {
try {
if (dataCache.blockPosToPlatformId.containsKey(pos)) {
return dataCache.blockPosToPlatformId.get(pos);
} else {
long platformId = 0;
for (int i = 1; i <= radius; i++) {
final int searchRadius = i;
platformId = platforms.stream().filter(platform -> platform.isCloseToSavedRail(pos, searchRadius, lower, upper)).min(Comparator.comparingInt(platform -> platform.getMidPos().distManhattan(pos))).map(platform -> platform.id).orElse(0L);
if (platformId != 0) {
break;
}
}
dataCache.blockPosToPlatformId.put(pos, platformId);
return platformId;
}
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
public static boolean useRoutesAndStationsFromIndex(int stopIndex, List<Long> routeIds, DataCache dataCache, RouteAndStationsCallback routeAndStationsCallback) {
if (stopIndex < 0) {
return false;
}
int sum = 0;
for (int i = 0; i < routeIds.size(); i++) {
final Route thisRoute = dataCache.routeIdMap.get(routeIds.get(i));
final Route nextRoute = i < routeIds.size() - 1 ? dataCache.routeIdMap.get(routeIds.get(i + 1)) : null;
if (thisRoute != null) {
final int difference = stopIndex - sum;
sum += thisRoute.platformIds.size();
if (!thisRoute.platformIds.isEmpty() && nextRoute != null && !nextRoute.platformIds.isEmpty() && thisRoute.platformIds.get(thisRoute.platformIds.size() - 1).equals(nextRoute.platformIds.get(0))) {
sum
}
if (stopIndex < sum) {
final Station thisStation = dataCache.platformIdToStation.get(thisRoute.platformIds.get(difference));
final Station nextStation = difference < thisRoute.platformIds.size() - 1 ? dataCache.platformIdToStation.get(thisRoute.platformIds.get(difference + 1)) : null;
final Station lastStation = thisRoute.platformIds.isEmpty() ? null : dataCache.platformIdToStation.get(thisRoute.platformIds.get(thisRoute.platformIds.size() - 1));
routeAndStationsCallback.routeAndStationsCallback(thisRoute, nextRoute, thisStation, nextStation, lastStation);
return true;
}
}
}
return false;
}
public static boolean isBetween(double value, double value1, double value2) {
return isBetween(value, value1, value2, 0);
}
public static boolean isBetween(double value, double value1, double value2, double padding) {
return value >= Math.min(value1, value2) - padding && value <= Math.max(value1, value2) + padding;
}
public static void writeMessagePackDataset(MessagePacker packer, Collection<? extends SerializedDataBase> dataSet, String key) throws IOException {
writeMessagePackDataset(packer, dataSet, key, true);
}
public static void writeMessagePackDataset(MessagePacker messagePacker, Collection<? extends SerializedDataBase> dataSet, String key, boolean skipVerify) throws IOException {
messagePacker.packString(key);
int dataSetSize = 0;
if (skipVerify) {
dataSetSize = dataSet.size();
} else {
for (final SerializedDataBase data : dataSet) {
if (!(data instanceof NameColorDataBase) || !((NameColorDataBase) data).name.isEmpty()) {
++dataSetSize;
}
}
}
messagePacker.packArrayHeader(dataSetSize);
for (final SerializedDataBase data : dataSet) {
if (skipVerify || !(data instanceof NameColorDataBase) || !((NameColorDataBase) data).name.isEmpty()) {
messagePacker.packMapHeader(data.messagePackLength());
data.toMessagePack(messagePacker);
}
}
}
public static Map<String, Value> readMessagePackSKMap(MessageUnpacker unpacker) throws IOException {
int size = unpacker.unpackMapHeader();
HashMap<String, Value> result = new HashMap<>(size);
for (int i = 0; i < size; ++i) {
result.put(unpacker.unpackString(), unpacker.unpackValue());
}
return result;
}
public static Map<String, Value> castMessagePackValueToSKMap(Value value) {
Map<Value, Value> oldMap = value.asMapValue().map();
HashMap<String, Value> resultMap = new HashMap<>(oldMap.size());
oldMap.forEach((key, val) -> {
resultMap.put(key.asStringValue().asString(), val);
});
return resultMap;
}
public static RailwayData getInstance(Level world) {
return getInstance(world, () -> new RailwayData(world), NAME);
}
public static void benchmark(Runnable runnable, float threshold) {
final long nanos = System.nanoTime();
runnable.run();
final float duration = (System.nanoTime() - nanos) / 1000000000F;
if (duration >= threshold) {
System.out.println(duration);
}
}
private static void validateRails(Level world, Map<BlockPos, Map<BlockPos, Rail>> rails) {
final Set<BlockPos> railsToRemove = new HashSet<>();
final Set<BlockPos> railsNodesToRemove = new HashSet<>();
rails.forEach((startPos, railMap) -> {
final boolean loadedChunk = world.hasChunk(startPos.getX() / 16, startPos.getZ() / 16);
if (loadedChunk && !(world.getBlockState(startPos).getBlock() instanceof BlockNode)) {
railsNodesToRemove.add(startPos);
}
if (railMap.isEmpty()) {
railsToRemove.add(startPos);
}
});
railsToRemove.forEach(rails::remove);
railsNodesToRemove.forEach(pos -> removeNode(null, rails, pos));
}
private static void removeSavedRailS2C(Level world, Set<? extends SavedRailBase> savedRailBases, Map<BlockPos, Map<BlockPos, Rail>> rails, ResourceLocation packetId) {
savedRailBases.removeIf(savedRailBase -> {
final boolean delete = savedRailBase.isInvalidSavedRail(rails);
if (delete) {
final FriendlyByteBuf packet = new FriendlyByteBuf(Unpooled.buffer());
packet.writeLong(savedRailBase.id);
world.players().forEach(player -> Registry.sendToPlayer((ServerPlayer) player, packetId, packet));
}
return delete;
});
}
private static class RailEntry extends SerializedDataBase {
public final BlockPos pos;
public final Map<BlockPos, Rail> connections;
private static final String KEY_NODE_POS = "node_pos";
private static final String KEY_RAIL_CONNECTIONS = "rail_connections";
public RailEntry(BlockPos pos, Map<BlockPos, Rail> connections) {
this.pos = pos;
this.connections = connections;
}
public RailEntry(CompoundTag compoundTag) {
pos = BlockPos.of(compoundTag.getLong(KEY_NODE_POS));
connections = new HashMap<>();
final CompoundTag tagConnections = compoundTag.getCompound(KEY_RAIL_CONNECTIONS);
for (final String keyConnection : tagConnections.getAllKeys()) {
connections.put(BlockPos.of(tagConnections.getCompound(keyConnection).getLong(KEY_NODE_POS)), new Rail(tagConnections.getCompound(keyConnection)));
}
}
public RailEntry(Map<String, Value> map) {
pos = BlockPos.of(map.get(KEY_NODE_POS).asIntegerValue().asLong());
final ArrayValue mapConnections = map.get(KEY_RAIL_CONNECTIONS).asArrayValue();
connections = new HashMap<>(mapConnections.size());
mapConnections.forEach(value -> {
Map<String, Value> mapSK = RailwayData.castMessagePackValueToSKMap(value);
connections.put(BlockPos.of(mapSK.get(KEY_NODE_POS).asIntegerValue().asLong()), new Rail(mapSK));
});
}
@Override
public void toMessagePack(MessagePacker messagePacker) throws IOException {
messagePacker.packString(KEY_NODE_POS).packLong(pos.asLong());
messagePacker.packString(KEY_RAIL_CONNECTIONS).packArrayHeader(connections.size());
for (final Map.Entry<BlockPos, Rail> entry : connections.entrySet()) {
final BlockPos endNodePos = entry.getKey();
messagePacker.packMapHeader(entry.getValue().messagePackLength() + 1);
messagePacker.packString(KEY_NODE_POS).packLong(endNodePos.asLong());
entry.getValue().toMessagePack(messagePacker);
}
}
@Override
public int messagePackLength() {
return 2;
}
@Override
public void writePacket(FriendlyByteBuf packet) {
}
}
@FunctionalInterface
public interface RouteAndStationsCallback {
void routeAndStationsCallback(Route thisRoute, Route nextRoute, Station thisStation, Station nextStation, Station lastStation);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.