gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.internal.cache; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import com.gemstone.gemfire.cache.CacheWriter; import com.gemstone.gemfire.cache.CacheWriterException; import com.gemstone.gemfire.cache.DataPolicy; import com.gemstone.gemfire.cache.EntryEvent; import com.gemstone.gemfire.cache.EntryNotFoundException; import com.gemstone.gemfire.cache.Operation; import com.gemstone.gemfire.cache.TimeoutException; import com.gemstone.gemfire.cache.query.internal.IndexUpdater; import com.gemstone.gemfire.distributed.internal.DM; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.internal.ByteArrayDataInput; import com.gemstone.gemfire.internal.InternalStatisticsDisabledException; import com.gemstone.gemfire.internal.cache.delta.Delta; import com.gemstone.gemfire.internal.cache.locks.ExclusiveSharedLockObject; import com.gemstone.gemfire.internal.cache.locks.LockMode; import com.gemstone.gemfire.internal.cache.locks.LockingPolicy; import com.gemstone.gemfire.internal.cache.lru.LRUEntry; import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID; import com.gemstone.gemfire.internal.cache.versions.RegionVersionVector; import com.gemstone.gemfire.internal.cache.versions.VersionHolder; import com.gemstone.gemfire.internal.cache.versions.VersionSource; import com.gemstone.gemfire.internal.cache.versions.VersionStamp; import com.gemstone.gemfire.internal.cache.versions.VersionTag; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.internal.shared.Version; import com.gemstone.gemfire.internal.size.SingleObjectSizer; /** * Internal implementation of {@link RegionMap}for regions whose DataPolicy is * proxy. Proxy maps are always empty. * * @since 5.0 * * @author Darrel Schneider * */ public final class ProxyRegionMap implements RegionMap { /** An internal Listener for index maintenance for GemFireXD. */ private IndexUpdater indexUpdater; protected ProxyRegionMap(LocalRegion owner, Attributes attr, InternalRegionArguments internalRegionArgs) { this.owner = owner; this.attr = attr; if (internalRegionArgs != null) { this.indexUpdater = internalRegionArgs.getIndexUpdater(); } else { this.indexUpdater = null; } } @Override public final IndexUpdater getIndexUpdater() { return this.indexUpdater; } @Override public final void setIndexUpdater(IndexUpdater indexManager) { this.indexUpdater = indexManager; } /** * the region that owns this map */ private final LocalRegion owner; private final Attributes attr; public RegionEntryFactory getEntryFactory() { throw new UnsupportedOperationException(); } public Attributes getAttributes() { return this.attr; } public void setOwner(Object r) { throw new UnsupportedOperationException(); } public void changeOwner(LocalRegion r, InternalRegionArguments args) { throw new UnsupportedOperationException(); } public int size() { return 0; } public boolean isEmpty() { return true; } public Set keySet() { return Collections.EMPTY_SET; } public Collection<RegionEntry> regionEntries() { return Collections.emptySet(); } @Override public Collection<RegionEntry> regionEntriesInVM() { return Collections.emptySet(); } public boolean containsKey(Object key) { return false; } public RegionEntry getEntry(Object key) { return null; } public RegionEntry putEntryIfAbsent(Object key, RegionEntry re) { return null; } @SuppressWarnings({ "rawtypes", "unchecked" }) public Set<VersionSource> clear(RegionVersionVector rvv) { // nothing needs to be done RegionVersionVector v = this.owner.getVersionVector(); if (v != null) { return v.getDepartedMembersSet(); } else { return Collections.emptySet(); } } public void diskClear() { // nothing needs to be done } public RegionEntry initRecoveredEntry(Object key, DiskEntry.RecoveredEntry value) { throw new UnsupportedOperationException(); } public RegionEntry updateRecoveredEntry(Object key, RegionEntry re, DiskEntry.RecoveredEntry value) { throw new UnsupportedOperationException(); } /** * Used to modify an existing RegionEntry or create a new one when processing * the values obtained during a getInitialImage. */ public boolean initialImagePut(Object key, long lastModified, Object newValue, boolean wasRecovered, boolean deferLRUCallback, VersionTag version, InternalDistributedMember sender, boolean forceValue) { throw new UnsupportedOperationException(); } public boolean destroy(EntryEventImpl event, boolean inTokenMode, boolean duringRI, boolean cacheWrite, boolean isEviction, Object expectedOldValue, boolean removeRecoveredEntry) throws CacheWriterException, EntryNotFoundException, TimeoutException { if (event.getOperation().isLocal()) { throw new EntryNotFoundException(event.getKey().toString()); } if (cacheWrite) { this.owner.cacheWriteBeforeDestroy(event, expectedOldValue); } owner.recordEvent(event); this.owner.basicDestroyPart2(markerEntry, event, inTokenMode, false /*Clear conflict occured */, duringRI, true); this.owner.basicDestroyPart3(markerEntry, event, inTokenMode, duringRI, true, expectedOldValue); return true; } public boolean invalidate(EntryEventImpl event, boolean invokeCallbacks, boolean forceNewEntry, boolean forceCallback) throws EntryNotFoundException { if (event.getOperation().isLocal()) { throw new EntryNotFoundException(event.getKey().toString()); } this.owner.cacheWriteBeforeInvalidate(event, invokeCallbacks, forceNewEntry); this.owner.recordEvent(event); this.owner.basicInvalidatePart2(markerEntry, event, false /*Clear conflict occurred */, true); this.owner.basicInvalidatePart3(markerEntry, event, true); return true; } public void evictEntry(Object key) { // noop } public void evictValue(Object key) { // noop } /** * Used by basicPut to signal the caller that the put was successful. */ static final RegionEntry markerEntry = new ProxyRegionEntry(); public RegionEntry basicPut(EntryEventImpl event, long lastModified, boolean ifNew, boolean ifOld, Object expectedOldValue, boolean requireOldValue, boolean overwriteDestroyed) throws CacheWriterException, TimeoutException { if (!event.isOriginRemote() && event.getOperation() != Operation.REPLACE) { // bug 42167 - don't convert replace to CREATE event.makeCreate(); } final CacheWriter cacheWriter = this.owner.basicGetWriter(); final boolean cacheWrite = !event.isOriginRemote() && !event.isNetSearch() && !event.getInhibitDistribution() && event.isGenerateCallbacks() && (cacheWriter != null || this.owner.hasServerProxy() || this.owner.scope.isDistributed()); if (cacheWrite) { final Set netWriteRecipients; if (cacheWriter == null && this.owner.scope.isDistributed()) { CacheDistributionAdvisor cda = ((DistributedRegion)this.owner).getDistributionAdvisor(); netWriteRecipients = cda.adviseNetWrite(); } else { netWriteRecipients = null; } if (event.getOperation() != Operation.REPLACE) { // bug #42167 - makeCreate() causes REPLACE to eventually become UPDATE event.makeCreate(); } this.owner.cacheWriteBeforePut(event, netWriteRecipients, cacheWriter, requireOldValue, expectedOldValue); } owner.recordEvent(event); // Added to ensure that for DataPolicy Empty regions which have Gfxd delta, // should convert op = create to op = update if( event.hasDeltaPut()) { event.makeUpdate(); } lastModified = // fix for bug 40129 this.owner.basicPutPart2(event, markerEntry, true, lastModified, false /*Clear conflict occurred */); // invoke GemFireXD index manager if present final IndexUpdater indexUpdater = getIndexUpdater(); if (indexUpdater != null) { // postEvent not required to be invoked since this is currently used // only for FK checks try { indexUpdater.onEvent(this.owner, event, markerEntry); } finally { indexUpdater.postEventCleanup(event); } } this.owner.basicPutPart3(event, markerEntry, true, lastModified, true, ifNew, ifOld, expectedOldValue, requireOldValue); return markerEntry; } public void writeSyncIfPresent(Object key, Runnable runner) { // nothing needed } public void removeIfDestroyed(Object key) { // nothing needed } @Override public void removeIfDelta(Object next) { // nothing needed } @Override public boolean isListOfDeltas(Object key) { // TODO Auto-generated method stub return false; } @Override public void txApplyDestroy(RegionEntry re, TXStateInterface txState, Object key, boolean inTokenMode, boolean inRI, boolean localOp, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, VersionTag<?> versionTag, long tailKey, TXRegionState txr, EntryEventImpl cbEvent) { this.owner.txApplyDestroyPart2(markerEntry, key, inTokenMode, false /*Clear conflict occured */); if (!inTokenMode) { /* if (txEvent != null) { txEvent.addDestroy(this.owner, markerEntry, key,aCallbackArgument); } */ if (AbstractRegionMap.shouldCreateCBEvent(this.owner, false, !inTokenMode)) { // fix for bug 39526 cbEvent = AbstractRegionMap.createCBEvent(this.owner, localOp ? Operation.LOCAL_DESTROY : Operation.DESTROY, key, null, txState, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, versionTag, tailKey, cbEvent); boolean cbEventInPending = false; try { AbstractRegionMap.switchEventOwnerAndOriginRemote(cbEvent, !txState.isCoordinator()); if (pendingCallbacks == null) { this.owner.invokeTXCallbacks(EnumListenerEvent.AFTER_DESTROY, cbEvent, true /* callDispatchListenerEvent */, true /*notifyGateway*/); } else { pendingCallbacks.add(cbEvent); cbEventInPending = true; } } finally { if (!cbEventInPending) cbEvent.release(); } } } } @Override public void txApplyInvalidate(RegionEntry re, TXStateInterface txState, Object key, Object newValue, boolean didDestroy, boolean localOp, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, VersionTag<?> versionTag, long tailKey, TXRegionState txr, EntryEventImpl cbEvent) { this.owner.txApplyInvalidatePart2(markerEntry, key, didDestroy, true, false /*Clear conflic occured */); if (this.owner.isInitialized()) { /* if (txEvent != null) { txEvent.addInvalidate(this.owner, markerEntry, key, newValue,aCallbackArgument); } */ if (AbstractRegionMap.shouldCreateCBEvent(this.owner, true, this.owner.isInitialized())) { // fix for bug 39526 boolean cbEventInPending = false; cbEvent = AbstractRegionMap.createCBEvent(this.owner, localOp ? Operation.LOCAL_INVALIDATE : Operation.INVALIDATE, key, newValue, txState, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, versionTag, tailKey, cbEvent); try { AbstractRegionMap.switchEventOwnerAndOriginRemote(cbEvent, !txState.isCoordinator()); if (pendingCallbacks == null) { this.owner.invokeTXCallbacks(EnumListenerEvent.AFTER_INVALIDATE, cbEvent, true/* callDispatchListenerEvent */, true /*notifyGateway*/); } else { pendingCallbacks.add(cbEvent); cbEventInPending = true; } } finally { if (!cbEventInPending) cbEvent.release(); } } } } @Override public void txApplyPut(Operation putOp, RegionEntry re, TXStateInterface txState, Object key, Object newValue, boolean didDestroy, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, VersionTag<?> versionTag, long tailKey, TXRegionState txr, EntryEventImpl cbEvent, Delta delta) { putOp = putOp.getCorrespondingCreateOp(); final long lastMod = cbEvent.getEventTime(0L, this.owner); this.owner.txApplyPutPart2(markerEntry, key, newValue, lastMod, true, didDestroy, false /*Clear conflict occured */); if (this.owner.isInitialized()) { /* if (txEvent != null) { txEvent.addPut(putOp, this.owner, markerEntry, key, newValue,aCallbackArgument); } */ if (AbstractRegionMap.shouldCreateCBEvent(this.owner, false, this.owner.isInitialized())) { // fix for bug 39526 boolean cbEventInPending = false; cbEvent = AbstractRegionMap.createCBEvent(this.owner, putOp, key, newValue, txState, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, versionTag, tailKey, cbEvent); try { AbstractRegionMap.switchEventOwnerAndOriginRemote(cbEvent, !txState.isCoordinator()); if (pendingCallbacks == null) { this.owner.invokeTXCallbacks(EnumListenerEvent.AFTER_CREATE, cbEvent, true/* callDispatchListenerEvent */, true /*notifyGateway*/); } else { pendingCallbacks.add(cbEvent); cbEventInPending = true; } } finally { if (!cbEventInPending) cbEvent.release(); } } } } // LRUMapCallbacks methods public void lruUpdateCallback() { // nothing needed } public boolean disableLruUpdateCallback() { // nothing needed return false; } public void enableLruUpdateCallback() { // nothing needed } public final boolean lruLimitExceeded() { return false; } public void lruCloseStats() { // nothing needed } public void resetThreadLocals() { // nothing needed } public void removeEntry(Object key, RegionEntry entry, boolean updateStats) { // nothing to do } public void removeEntry(Object key, RegionEntry re, boolean updateStat, EntryEventImpl event, LocalRegion owner, IndexUpdater indexUpdater) { // nothing to do } /** * Provides a dummy implementation of RegionEntry so that basicPut can return * an instance that make the upper levels think it did the put. */ public static class ProxyRegionEntry implements RegionEntry { public long getLastModified() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void _setLastModified(long lastModified) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public void setLastModified(long lastModified) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public boolean isLockedForCreate() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public long getLastAccessed() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public long getHitCount() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public long getMissCount() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public VersionStamp getVersionStamp() { return null; } public boolean isTombstone() { return false; } public VersionTag generateVersionTag(VersionSource member, boolean isRemoteVersionSource, boolean withDelta, LocalRegion region, EntryEventImpl event) { return null; // proxies don't do versioning } public void processVersionTag(EntryEvent ev) { return; } public void makeTombstone(LocalRegion r, VersionTag isOperationRemote) { return; } public void updateStatsForPut(long lastModifiedTime) { // do nothing; called by LocalRegion.updateStatsForPut } public void setRecentlyUsed() { // do nothing; called by LocalRegion.updateStatsForPut } public void updateStatsForGet(boolean hit, long time) { // do nothing; no entry stats } public void txDidDestroy(long currTime) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void resetCounts() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void removePhase1(LocalRegion r, boolean isClear) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void removePhase2() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean isRemoved() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean isRemovedOrDestroyed() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean isRemovedPhase2() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean fillInValue(LocalRegion r, InitialImageOperation.Entry entry, ByteArrayDataInput in, DM mgr, Version targetVersion) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean isOverflowedToDisk(LocalRegion r, DistributedRegion.DiskPosition dp) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public Object getKey() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public Object getKeyCopy() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public Object getRawKey() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public Object getValue(RegionEntryContext context) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void setValue(RegionEntryContext context, Object value) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public Object prepareValueForCache(RegionEntryContext r, Object val, boolean isEntryUpdate, boolean valHasMetadataForGfxdOffHeapUpdate) { throw new IllegalStateException("Should never be called"); } // @Override // public void _setValue(Object value) { // throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); // } @Override public Object _getValue() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public Token getValueAsToken() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public void setOwner(LocalRegion owner) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } @Override public Object _getValueRetain(RegionEntryContext context, boolean decompress) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public Object getTransformedValue() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public Object getValueInVM(RegionEntryContext context) { return null; // called by TXRmtEvent.createEvent } public Object getValueOnDisk(LocalRegion r) throws EntryNotFoundException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public Object getValueOnDiskOrBuffer(LocalRegion r) throws EntryNotFoundException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } /* (non-Javadoc) * @see com.gemstone.gemfire.internal.cache.RegionEntry#getSerializedValueOnDisk(com.gemstone.gemfire.internal.cache.LocalRegion) */ public Object getSerializedValueOnDisk(LocalRegion localRegion) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean initialImagePut(LocalRegion region, long lastModified, Object newValue, boolean wasRecovered, boolean versionTagAccepted) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean initialImageInit(LocalRegion region, long lastModified, Object newValue, boolean create, boolean wasRecovered, boolean versionTagAccepted) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean destroy(LocalRegion region, EntryEventImpl event, boolean inTokenMode, boolean cacheWrite, Object expectedOldValue, boolean forceDestroy, boolean removeRecoveredEntry) throws CacheWriterException, EntryNotFoundException, TimeoutException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } public boolean dispatchListenerEvents(EntryEventImpl event) throws InterruptedException { // note that we don't synchronize on the RE before dispatching // events event.invokeCallbacks(event.getRegion(), event.inhibitCacheListenerNotification(), false); return true; } public boolean hasStats() { return false; } public Object getValueInVMOrDiskWithoutFaultIn(LocalRegion owner) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public Object getValueOffHeapOrDiskWithoutFaultIn(LocalRegion owner) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#getOwnerId(Object) */ public Object getOwnerId(Object context) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#attemptLock */ public boolean attemptLock(LockMode mode, int flags, LockingPolicy lockPolicy, long msecs, Object owner, Object context) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#releaseLock */ public void releaseLock(LockMode mode, boolean releaseAll, Object owner, Object context) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#numSharedLocks() */ public int numSharedLocks() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#numReadOnlyLocks() */ public int numReadOnlyLocks() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#hasExclusiveLock(Object, Object) */ public boolean hasExclusiveLock(Object owner, Object context) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#hasExclusiveSharedLock(Object, Object) */ public boolean hasExclusiveSharedLock(Object owner, Object context) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * @see ExclusiveSharedLockObject#getState() */ public int getState() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } /** * {@inheritDoc} */ @Override public boolean hasAnyLock() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public Object getContainerInfo() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } public Object setContainerInfo(LocalRegion owner, Object val) { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isUpdateInProgress() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public void setUpdateInProgress(boolean underUpdate) { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isMarkedForEviction() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } @Override public void setMarkedForEviction() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } @Override public void clearMarkedForEviction() { throw new UnsupportedOperationException(LocalizedStrings .ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0 .toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isValueNull() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isInvalid() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isDestroyed() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public void setValueToNull() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isInvalidOrRemoved() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isDestroyedOrRemoved() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isDestroyedOrRemovedButNotTombstone() { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public void returnToPool() { // TODO Auto-generated method stub } @Override public void setValueWithTombstoneCheck(Object value, EntryEvent event) throws RegionClearedException { throw new UnsupportedOperationException(LocalizedStrings.ProxyRegionMap_NO_ENTRY_SUPPORT_ON_REGIONS_WITH_DATAPOLICY_0.toLocalizedString(DataPolicy.EMPTY)); } @Override public boolean isCacheListenerInvocationInProgress() { // TODO Auto-generated method stub return false; } @Override public void setCacheListenerInvocationInProgress(boolean isListenerInvoked) { // TODO Auto-generated method stub } } public void lruUpdateCallback(int n) { //do nothing } public void lruEntryFaultIn(LRUEntry entry) { //do nothing. } public void copyRecoveredEntries(RegionMap rm, boolean entriesIncompatible) { throw new IllegalStateException("copyRecoveredEntries should never be called on proxy"); } @Override public long estimateMemoryOverhead(SingleObjectSizer sizer) { return sizer.sizeof(this) ; } public boolean removeTombstone(RegionEntry re, VersionHolder destroyedVersion, boolean isEviction, boolean isScheduledTombstone) { throw new IllegalStateException("removeTombstone should never be called on a proxy"); } public boolean isTombstoneNotNeeded(RegionEntry re, int destroyedVersion) { throw new IllegalStateException("removeTombstone should never be called on a proxy"); } /* (non-Javadoc) * @see com.gemstone.gemfire.internal.cache.RegionMap#unscheduleTombstone(com.gemstone.gemfire.internal.cache.RegionEntry) */ public void unscheduleTombstone(RegionEntry re) { } public void setEntryFactory(RegionEntryFactory f) { throw new IllegalStateException("Should not be called on a ProxyRegionMap"); } @Override public void updateEntryVersion(EntryEventImpl event) { // Do nothing. Not applicable for clients. } @Override public RegionEntry getEntryInVM(Object key) { return null; } @Override public RegionEntry getOperationalEntryInVM(Object key) { return null; } @Override public int sizeInVM() { return 0; } @Override public Map<?, ?> getTestSuspectMap() { return null; } @Override public void close() { } }
/* * Copyright (c) 2017 Michael Krotscheck * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package net.krotscheck.kangaroo.authz.common.util; import net.krotscheck.kangaroo.authz.common.authenticator.AuthenticatorType; import net.krotscheck.kangaroo.authz.common.database.entity.AbstractClientUri; import net.krotscheck.kangaroo.authz.common.database.entity.ApplicationScope; import net.krotscheck.kangaroo.authz.common.database.entity.Authenticator; import net.krotscheck.kangaroo.authz.common.database.entity.Client; import net.krotscheck.kangaroo.authz.common.database.entity.ClientRedirect; import net.krotscheck.kangaroo.authz.common.database.entity.ClientType; import net.krotscheck.kangaroo.authz.common.database.entity.Role; import net.krotscheck.kangaroo.authz.oauth2.exception.RFC6749.InvalidRequestException; import net.krotscheck.kangaroo.authz.oauth2.exception.RFC6749.InvalidScopeException; import net.krotscheck.kangaroo.authz.oauth2.exception.RFC6749.UnsupportedResponseTypeException; import org.apache.commons.lang3.StringUtils; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriBuilder; import java.net.URI; import java.net.URLDecoder; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.stream.Collectors; /** * A utility filled with validation tools. * * @author Michael Krotscheck */ public final class ValidationUtil { /** * Utility class, private constructor. */ private ValidationUtil() { } /** * Validate that a response type is appropriate for a given client. * * @param client The client to check. * @param responseType The requested response type. */ public static void validateResponseType(final Client client, final String responseType) { if (client != null) { if (ClientType.Implicit.equals(client.getType()) && "token".equals(responseType)) { return; } else if (ClientType.AuthorizationGrant.equals(client.getType()) && "code".equals(responseType)) { return; } } throw new UnsupportedResponseTypeException(); } /** * Require that the provided string matches the set of redirection URL's. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI requireValidRedirect( final URI redirect, final List<ClientRedirect> redirects) { URI validRedirect = validateRedirect(redirect, redirects); if (validRedirect == null) { throw new InvalidRequestException(); } return validRedirect; } /** * Require that the provided string matches the set of redirection URL's. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI requireValidRedirect( final String redirect, final List<ClientRedirect> redirects) { URI validRedirect = validateRedirect(redirect, redirects); if (validRedirect == null) { throw new InvalidRequestException(); } return validRedirect; } /** * Require that the provided string matches the set of redirection URL's. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI requireValidRedirect(final String redirect, final Set<URI> redirects) { URI validRedirect = validateRedirect(redirect, redirects); if (validRedirect == null) { throw new InvalidRequestException(); } return validRedirect; } /** * This method assists in determining if a particular URI is valid for * the scope of this client. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI validateRedirect(final URI redirect, final List<ClientRedirect> redirects) { if (redirect == null) { return validateRedirect((String) null, redirects); } return validateRedirect(redirect.toString(), redirects); } /** * This method assists in determining if a particular URI is valid for * the scope of this client. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI validateRedirect(final String redirect, final List<ClientRedirect> redirects) { Set<URI> redirectUris = redirects.stream() .map(AbstractClientUri::getUri) .collect(Collectors.toSet()); return validateRedirect(redirect, redirectUris); } /** * This method assists in determining if a particular URI is valid for * the scope of this client. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI validateRedirect(final URI redirect, final Set<URI> redirects) { if (redirect == null) { return validateRedirect((String) null, redirects); } return validateRedirect(redirect.toString(), redirects); } /** * This method assists in determining if a particular URI is valid for * the scope of this client. * * @param redirect The URI to check. * @param redirects A set of redirect url's to check against. * @return The validated redirect URI, or null. */ public static URI validateRedirect(final String redirect, final Set<URI> redirects) { // Quick exit if (redirects.size() == 0) { return null; } // Can we default? if (StringUtils.isEmpty(redirect)) { if (redirects.size() == 1) { URI[] redirectArray = redirects.toArray(new URI[redirects.size()]); return redirectArray[0]; } else { return null; } } // Make sure the passed string is valid. URI redirectUri; try { redirectUri = UriBuilder.fromUri(redirect).build(); } catch (Exception e) { return null; } // Convert the query parameters into a multivaluedMap MultivaluedMap<String, String> params = extractParams(redirectUri); Set<String> keySet = new HashSet<>(params.keySet()); params.keySet().retainAll(keySet); uriloop: for (URI test : redirects) { // Test the scheme if (!test.getScheme().equals(redirectUri.getScheme())) { continue; } // Test the host if (!test.getHost().equals(redirectUri.getHost())) { continue; } // Test the port if (test.getPort() != redirectUri.getPort()) { continue; } // Test the path if (!test.getPath().equals(redirectUri.getPath())) { continue; } MultivaluedMap<String, String> testParams = extractParams(test); keySet.addAll(testParams.keySet()); // This modifies 'params'. // All variables in testParams must exist in params to pass. for (Entry<String, List<String>> entry : testParams.entrySet()) { if (!params.get(entry.getKey()).containsAll(entry.getValue())) { continue uriloop; } } return redirectUri; // NOPMD } return null; } /** * Convert a URI and its query parameters into a MultivaluedMap, for * later comparison. * * @param redirectUri The URI to parse. * @return A map of all results. */ private static MultivaluedMap<String, String> extractParams( final URI redirectUri) { MultivaluedMap<String, String> results = new MultivaluedHashMap<>(); for (NameValuePair pair : URLEncodedUtils.parse(redirectUri, "UTF-8")) { results.add(pair.getName(), pair.getValue()); } return results; } /** * Creates a collection of scopes from a list of valid scopes. If the * requested scopes are not in that valid list, it will throw an exception. * * @param requestedScopes An array of requested scopes. * @param validScopes A list of valid scopes. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> validateScope( final String[] requestedScopes, final SortedMap<String, ApplicationScope> validScopes) { if (requestedScopes == null || requestedScopes.length == 0) { return new TreeMap<>(); } if (validScopes == null) { throw new InvalidScopeException(); } // Make sure all requested scopes are in the map. SortedMap<String, ApplicationScope> results = new TreeMap<>(); for (String scope : requestedScopes) { if (validScopes.containsKey(scope)) { results.put(scope, validScopes.get(scope)); } } return results; } /** * Creates a collection of scopes from a list of valid scopes. If the * requested scopes are not in that valid list, it will throw an exception. * * @param requestedScopes An array of requested scopes. * @param validScopes A string of valid scopes. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> validateScope( final String requestedScopes, final SortedMap<String, ApplicationScope> validScopes) { if (StringUtils.isEmpty(requestedScopes)) { return new TreeMap<>(); } String decodedScopes = URLDecoder.decode(requestedScopes); return validateScope(decodedScopes.split(" "), validScopes); } /** * Ensure that a requested list of scopes is permitted for a specific role. * * @param requestedScopes The requested client scopes. * @param role The role. * @return A valid map of scopes. */ public static SortedMap<String, ApplicationScope> validateScope( final SortedMap<String, ApplicationScope> requestedScopes, final Role role) { if (role == null) { throw new InvalidScopeException(); } // All requested client scopes MUST exit in the user role. Collection<ApplicationScope> roleScope = role.getScopes().values(); for (ApplicationScope s : requestedScopes.values()) { if (!roleScope.contains(s)) { throw new InvalidScopeException(); } } return requestedScopes; } /** * Validate a list of scopes against a role. * * @param requestedScopes An array of requested scopes. * @param role The role. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> validateScope( final String requestedScopes, final Role role) { if (role == null) { throw new InvalidScopeException(); } return validateScope(requestedScopes, role.getScopes()); } /** * Revalidates a list of provided scopes against the originally granted * scopes, as well as the current list of valid scopes. If the list of * valid scopes has changed since the original token list, any missing * scopes will be quietly dropped. * * @param requestedScopes An array of requested scopes. * @param originalScopes The original set of scopes. * @param validScopes The current list of valid scopes. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> revalidateScope( final String[] requestedScopes, final SortedMap<String, ApplicationScope> originalScopes, final SortedMap<String, ApplicationScope> validScopes) { if (validScopes == null || originalScopes == null) { throw new InvalidScopeException(); } if (requestedScopes == null || requestedScopes.length == 0) { return new TreeMap<>(); } // Reduce the valid scope list down by the original scopes. SortedMap<String, ApplicationScope> results = new TreeMap<>(); for (String scope : requestedScopes) { if (!originalScopes.containsKey(scope)) { throw new InvalidScopeException(); } else if (validScopes.containsKey(scope)) { results.put(scope, validScopes.get(scope)); } } return results; } /** * Revalidates a list of provided scopes against the originally granted * scopes, as well as the current list of valid scopes. If the list of * valid scopes has changed since the original token list, any missing * scopes will be quietly dropped. * * @param requestedScopes An array of requested scopes. * @param originalScopes The original set of scopes. * @param validScopes The current list of valid scopes. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> revalidateScope( final String requestedScopes, final SortedMap<String, ApplicationScope> originalScopes, final SortedMap<String, ApplicationScope> validScopes) { if (StringUtils.isEmpty(requestedScopes)) { return new TreeMap<>(); } return revalidateScope(requestedScopes.split(" "), originalScopes, validScopes); } /** * Revalidate a list of scopes against an originally granted list, as * well as the current list of valid scopes from a user's role. * * @param requestedScopes A space-separated list of scopes. * @param originalScopes The original set of scopes. * @param role The user's role to check for a scope list. * @return A list of the requested scopes, as database instances. */ public static SortedMap<String, ApplicationScope> revalidateScope( final String requestedScopes, final SortedMap<String, ApplicationScope> originalScopes, final Role role) { if (role == null) { throw new InvalidScopeException(); } // Convert the role scope list into a sorted map. return revalidateScope(requestedScopes, originalScopes, role.getScopes()); } /** * Ensure that an authenticator, requested by name, is valid within a * specific list of authenticators. If no string is provided, and yet the * list of authenticators only contains one, this will default to that * authenticator. * * @param type The requested authenticator type. * @param authenticators The list of authenticators to test against. * @return The valid authenticator. */ public static Authenticator validateAuthenticator( final AuthenticatorType type, final List<Authenticator> authenticators) { // Quick exit if (authenticators.size() == 0) { throw new InvalidRequestException(); } // Can we default? if (type == null) { if (authenticators.size() == 1) { return authenticators.get(0); } else { throw new InvalidRequestException(); } } // Iterate through the set, comparing as we go. for (Authenticator test : authenticators) { if (test.getType().equals(type)) { return test; } } throw new InvalidRequestException(); } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.objc; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.ASSET_CATALOG; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.BUNDLE_FILE; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.STRINGS; import static com.google.devtools.build.lib.rules.objc.ObjcProvider.XCASSETS_DIR; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.base.Verify; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.BinaryFileWriteAction; import com.google.devtools.build.lib.analysis.actions.CommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.apple.AppleToolchain; import com.google.devtools.build.lib.rules.apple.Platform; import com.google.devtools.build.lib.rules.apple.Platform.PlatformType; import com.google.devtools.build.lib.rules.objc.XcodeProvider.Builder; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Objects; /** * Support for generating iOS bundles which contain metadata (a plist file), assets, resources and * optionally a binary: registers actions that assemble resources and merge plists, provides data * to providers and validates bundle-related attributes. * * <p>Methods on this class can be called in any order without impacting the result. */ final class BundleSupport { /** * Iterable wrapper used to strongly type arguments eventually passed to {@code actool}. */ static final class ExtraActoolArgs extends IterableWrapper<String> { ExtraActoolArgs(Iterable<String> args) { super(args); } ExtraActoolArgs(String... args) { super(args); } } private final RuleContext ruleContext; private final ExtraActoolArgs extraActoolArgs; private final Bundling bundling; private final Attributes attributes; /** * Creates a new bundle support with no special {@code actool} arguments. * * @param ruleContext context this bundle is constructed in * @param bundling bundle information as configured for this rule */ public BundleSupport(RuleContext ruleContext, Bundling bundling) { this(ruleContext, bundling, new ExtraActoolArgs()); } /** * Creates a new bundle support. * * @param ruleContext context this bundle is constructed in * @param bundling bundle information as configured for this rule * @param extraActoolArgs any additional parameters to be used for invoking {@code actool} */ public BundleSupport(RuleContext ruleContext, Bundling bundling, ExtraActoolArgs extraActoolArgs) { this.ruleContext = ruleContext; this.extraActoolArgs = extraActoolArgs; this.bundling = bundling; this.attributes = new Attributes(ruleContext); } /** * Registers actions required for constructing this bundle, namely merging all involved {@code * Info.plist} files and generating asset catalogues. * * @param objcProvider source of information from this rule's attributes and its dependencies * @return this bundle support */ BundleSupport registerActions(ObjcProvider objcProvider) { registerConvertStringsActions(objcProvider); registerConvertXibsActions(objcProvider); registerMomczipActions(objcProvider); registerInterfaceBuilderActions(objcProvider); registerActoolActionIfNecessary(objcProvider); if (bundling.needsToMergeInfoplist()) { NestedSet<Artifact> mergingContentArtifacts = bundling.getMergingContentArtifacts(); Artifact mergedPlist = bundling.getBundleInfoplist().get(); registerMergeInfoplistAction( mergingContentArtifacts, PlMergeControlBytes.fromBundling(bundling, mergedPlist)); } return this; } /** * Adds any Xcode settings related to this bundle to the given provider builder. * * @return this bundle support */ BundleSupport addXcodeSettings(Builder xcodeProviderBuilder) { if (bundling.getBundleInfoplist().isPresent()) { xcodeProviderBuilder.setBundleInfoplist(bundling.getBundleInfoplist().get()); } return this; } private void validatePlatform() { AppleConfiguration appleConfiguration = ruleContext.getFragment(AppleConfiguration.class); Platform platform = null; for (String architecture : appleConfiguration.getIosMultiCpus()) { if (platform == null) { platform = Platform.forTarget(PlatformType.IOS, architecture); } else if (platform != Platform.forTarget(PlatformType.IOS, architecture)) { ruleContext.ruleError( String.format("In builds which require bundling, --ios_multi_cpus does not currently " + "allow values for both simulator and device builds. Flag was %s", appleConfiguration.getIosMultiCpus())); } } } private void validateResources(ObjcProvider objcProvider) { Map<String, Artifact> bundlePathToFile = new HashMap<>(); NestedSet<Artifact> artifacts = objcProvider.get(STRINGS); Iterable<BundleableFile> bundleFiles = Iterables.concat( objcProvider.get(BUNDLE_FILE), BundleableFile.flattenedRawResourceFiles(artifacts)); for (BundleableFile bundleFile : bundleFiles) { String bundlePath = bundleFile.getBundlePath(); Artifact bundled = bundleFile.getBundled(); // Normally any two resources mapped to the same path in the bundle are illegal. However, we // currently don't have a good solution for resources generated by a genrule in // multi-architecture builds: They map to the same bundle path but have different owners (the // genrules targets in the various configurations) and roots (one for each architecture). // Since we know that architecture shouldn't matter for strings file generation we silently // ignore cases like this and pick one of the outputs at random to put in the bundle (see also // related filtering code in Bundling.Builder.build()). if (bundlePathToFile.containsKey(bundlePath)) { Artifact original = bundlePathToFile.get(bundlePath); if (!Objects.equals(original.getOwner(), bundled.getOwner())) { ruleContext.ruleError( String.format( "Two files map to the same path [%s] in this bundle but come from different " + "locations: %s and %s", bundlePath, original.getOwner(), bundled.getOwner())); } else { Verify.verify( !original.getRoot().equals(bundled.getRoot()), "%s and %s should have different roots but have %s and %s", original, bundleFile, original.getRoot(), bundled.getRoot()); } } else { bundlePathToFile.put(bundlePath, bundled); } } // TODO(bazel-team): Do the same validation for storyboards and datamodels which could also be // generated by genrules or doubly defined. } /** * Validates bundle support. * <ul> * <li>Validates that resources defined in this rule and its dependencies and written to this * bundle are legal (for example that they are not mapped to the same bundle location) * <li>Validates the platform for this build is either simulator or device, and does not * contain architectures for both platforms * </ul> * * @return this bundle support */ BundleSupport validate(ObjcProvider objcProvider) { validatePlatform(); validateResources(objcProvider); return this; } /** * Returns a set containing the {@link TargetDeviceFamily} values which this bundle is targeting. * Returns an empty set for any invalid value of the target device families attribute. */ ImmutableSet<TargetDeviceFamily> targetDeviceFamilies() { return bundling.getTargetDeviceFamilies(); } /** * Returns true if this bundle is targeted to {@link TargetDeviceFamily#WATCH}, false otherwise. */ boolean isBuildingForWatch() { return Iterables.any(targetDeviceFamilies(), new Predicate<TargetDeviceFamily>() { @Override public boolean apply(TargetDeviceFamily targetDeviceFamily) { return targetDeviceFamily.name().equalsIgnoreCase(TargetDeviceFamily.WATCH.getNameInRule()); } }); } /** * Returns a set containing the {@link TargetDeviceFamily} values the resources in this bundle * are targeting. When watch is included as one of the families, (for example [iphone, watch] for * simulator builds, assets should always be compiled for {@link TargetDeviceFamily#WATCH}. */ private ImmutableSet<TargetDeviceFamily> targetDeviceFamiliesForResources() { if (isBuildingForWatch()) { return ImmutableSet.of(TargetDeviceFamily.WATCH); } else { return targetDeviceFamilies(); } } private void registerInterfaceBuilderActions(ObjcProvider objcProvider) { for (Artifact storyboardInput : objcProvider.get(ObjcProvider.STORYBOARD)) { String archiveRoot = storyboardArchiveRoot(storyboardInput); Artifact zipOutput = bundling.getIntermediateArtifacts() .compiledStoryboardZip(storyboardInput); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder(ruleContext) .setMnemonic("StoryboardCompile") .setExecutable(attributes.ibtoolWrapper()) .setCommandLine(ibActionsCommandLine(archiveRoot, zipOutput, storyboardInput)) .addOutput(zipOutput) .addInput(storyboardInput) .build(ruleContext)); } } /** * Returns the root file path to which storyboard interfaces are compiled. */ protected String storyboardArchiveRoot(Artifact storyboardInput) { // When storyboards are compiled for {@link TargetDeviceFamily#WATCH}, return the containing // directory if it ends with .lproj to account for localization or "." representing the bundle // root otherwise. Examples: Payload/Foo.app/Base.lproj/<compiled_file>, // Payload/Foo.app/<compile_file_1> if (isBuildingForWatch()) { String containingDir = storyboardInput.getExecPath().getParentDirectory().getBaseName(); return containingDir.endsWith(".lproj") ? (containingDir + "/") : "."; } else { return BundleableFile.flatBundlePath(storyboardInput.getExecPath()) + "c"; } } private CommandLine ibActionsCommandLine(String archiveRoot, Artifact zipOutput, Artifact storyboardInput) { CustomCommandLine.Builder commandLine = CustomCommandLine.builder() // The next three arguments are positional, i.e. they don't have flags before them. .addPath(zipOutput.getExecPath()) .add(archiveRoot) .add("--minimum-deployment-target") .add(bundling.getMinimumOsVersion().toString()) .add("--module") .add(ruleContext.getLabel().getName()); for (TargetDeviceFamily targetDeviceFamily : targetDeviceFamiliesForResources()) { commandLine.add("--target-device").add(targetDeviceFamily.name().toLowerCase(Locale.US)); } return commandLine .addPath(storyboardInput.getExecPath()) .build(); } private void registerMomczipActions(ObjcProvider objcProvider) { AppleConfiguration appleConfiguration = ruleContext.getFragment(AppleConfiguration.class); Iterable<Xcdatamodel> xcdatamodels = Xcdatamodels.xcdatamodels( bundling.getIntermediateArtifacts(), objcProvider.get(ObjcProvider.XCDATAMODEL)); for (Xcdatamodel datamodel : xcdatamodels) { Artifact outputZip = datamodel.getOutputZip(); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder(ruleContext) .setMnemonic("MomCompile") .setExecutable(attributes.momcWrapper()) .addOutput(outputZip) .addInputs(datamodel.getInputs()) .setCommandLine(CustomCommandLine.builder() .addPath(outputZip.getExecPath()) .add(datamodel.archiveRootForMomczip()) .add("-XD_MOMC_SDKROOT=" + AppleToolchain.sdkDir()) .add("-XD_MOMC_IOS_TARGET_VERSION=" + bundling.getMinimumOsVersion()) .add("-MOMC_PLATFORMS") .add(appleConfiguration.getMultiArchPlatform(PlatformType.IOS) .getLowerCaseNameInPlist()) .add("-XD_MOMC_TARGET_VERSION=10.6") .add(datamodel.getContainer().getSafePathString()) .build()) .build(ruleContext)); } } private void registerConvertXibsActions(ObjcProvider objcProvider) { for (Artifact original : objcProvider.get(ObjcProvider.XIB)) { Artifact zipOutput = bundling.getIntermediateArtifacts().compiledXibFileZip(original); String archiveRoot = BundleableFile.flatBundlePath( FileSystemUtils.replaceExtension(original.getExecPath(), ".nib")); ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder(ruleContext) .setMnemonic("XibCompile") .setExecutable(attributes.ibtoolWrapper()) .setCommandLine(ibActionsCommandLine(archiveRoot, zipOutput, original)) .addOutput(zipOutput) .addInput(original) .build(ruleContext)); } } private void registerConvertStringsActions(ObjcProvider objcProvider) { for (Artifact strings : objcProvider.get(ObjcProvider.STRINGS)) { Artifact bundled = bundling.getIntermediateArtifacts().convertedStringsFile(strings); ruleContext.registerAction(ObjcRuleClasses.spawnAppleEnvActionBuilder(ruleContext) .setMnemonic("ConvertStringsPlist") .setExecutable(new PathFragment("/usr/bin/plutil")) .setCommandLine(CustomCommandLine.builder() .add("-convert").add("binary1") .addExecPath("-o", bundled) .add("--") .addPath(strings.getExecPath()) .build()) .addInput(strings) .addInput(CompilationSupport.xcrunwrapper(ruleContext).getExecutable()) .addOutput(bundled) .build(ruleContext)); } } /** * Creates action to merge multiple Info.plist files of a bundle into a single Info.plist. The * merge action is necessary if there are more than one input plist files or we have a bundle ID * to stamp on the merged plist. */ private void registerMergeInfoplistAction( NestedSet<Artifact> mergingContentArtifacts, PlMergeControlBytes controlBytes) { if (!bundling.needsToMergeInfoplist()) { return; // Nothing to do here. } Artifact plMergeControlArtifact = baseNameArtifact(ruleContext, ".plmerge-control"); ruleContext.registerAction( new BinaryFileWriteAction( ruleContext.getActionOwner(), plMergeControlArtifact, controlBytes, /*makeExecutable=*/ false)); ruleContext.registerAction( new SpawnAction.Builder() .setMnemonic("MergeInfoPlistFiles") .setExecutable(attributes.plmerge()) .addArgument("--control") .addInputArgument(plMergeControlArtifact) .addTransitiveInputs(mergingContentArtifacts) .addOutput(bundling.getIntermediateArtifacts().mergedInfoplist()) .build(ruleContext)); } /** * Returns an {@link Artifact} with name prefixed with prefix given in {@link Bundling} if * available. This helps in creating unique artifact name when multiple bundles are created * with a different name than the target name. */ private Artifact baseNameArtifact(RuleContext ruleContext, String artifactName) { String prefixedArtifactName; if (bundling.getArtifactPrefix() != null) { prefixedArtifactName = String.format("-%s%s", bundling.getArtifactPrefix(), artifactName); } else { prefixedArtifactName = artifactName; } return ObjcRuleClasses.artifactByAppendingToBaseName(ruleContext, prefixedArtifactName); } private void registerActoolActionIfNecessary(ObjcProvider objcProvider) { Optional<Artifact> actoolzipOutput = bundling.getActoolzipOutput(); if (!actoolzipOutput.isPresent()) { return; } Artifact actoolPartialInfoplist = actoolPartialInfoplist(objcProvider).get(); Artifact zipOutput = actoolzipOutput.get(); // TODO(bazel-team): Do not use the deploy jar explicitly here. There is currently a bug where // we cannot .setExecutable({java_binary target}) and set REQUIRES_DARWIN in the execution info. // Note that below we set the archive root to the empty string. This means that the generated // zip file will be rooted at the bundle root, and we have to prepend the bundle root to each // entry when merging it with the final .ipa file. ruleContext.registerAction( ObjcRuleClasses.spawnAppleEnvActionBuilder(ruleContext) .setMnemonic("AssetCatalogCompile") .setExecutable(attributes.actoolWrapper()) .addTransitiveInputs(objcProvider.get(ASSET_CATALOG)) .addOutput(zipOutput) .addOutput(actoolPartialInfoplist) .setCommandLine(actoolzipCommandLine( objcProvider, zipOutput, actoolPartialInfoplist)) .disableSandboxing() .build(ruleContext)); } private CommandLine actoolzipCommandLine(ObjcProvider provider, Artifact zipOutput, Artifact partialInfoPlist) { AppleConfiguration appleConfiguration = ruleContext.getFragment(AppleConfiguration.class); PlatformType platformType = PlatformType.IOS; // watchOS 1 and 2 use different platform arguments. It is likely that versions 2 and later will // use the watchos platform whereas watchOS 1 uses the iphone platform. if (isBuildingForWatch() && bundling.getBundleDir().startsWith("Watch")) { platformType = PlatformType.WATCHOS; } CustomCommandLine.Builder commandLine = CustomCommandLine.builder() // The next three arguments are positional, i.e. they don't have flags before them. .addPath(zipOutput.getExecPath()) .add("--platform") .add(appleConfiguration.getMultiArchPlatform(platformType) .getLowerCaseNameInPlist()) .addExecPath("--output-partial-info-plist", partialInfoPlist) .add("--minimum-deployment-target") .add(bundling.getMinimumOsVersion().toString()); for (TargetDeviceFamily targetDeviceFamily : targetDeviceFamiliesForResources()) { commandLine.add("--target-device").add(targetDeviceFamily.name().toLowerCase(Locale.US)); } return commandLine .add(PathFragment.safePathStrings(provider.get(XCASSETS_DIR))) .add(extraActoolArgs) .build(); } /** * Returns the artifact that is a plist file generated by an invocation of {@code actool} or * {@link Optional#absent()} if no asset catalogues are present in this target and its * dependencies. * * <p>All invocations of {@code actool} generate this kind of plist file, which contains metadata * about the {@code app_icon} and {@code launch_image} if supplied. If neither an app icon or a * launch image was supplied, the plist file generated is empty. */ private Optional<Artifact> actoolPartialInfoplist(ObjcProvider objcProvider) { if (objcProvider.hasAssetCatalogs()) { return Optional.of(bundling.getIntermediateArtifacts().actoolPartialInfoplist()); } else { return Optional.absent(); } } /** * Common rule attributes used by a bundle support. */ private static class Attributes { private final RuleContext ruleContext; private Attributes(RuleContext ruleContext) { this.ruleContext = ruleContext; } /** * Returns a reference to the plmerge executable. */ FilesToRunProvider plmerge() { return ruleContext.getExecutablePrerequisite("$plmerge", Mode.HOST); } /** * Returns the location of the ibtoolwrapper tool. */ FilesToRunProvider ibtoolWrapper() { return ruleContext.getExecutablePrerequisite("$ibtoolwrapper", Mode.HOST); } /** * Returns the location of the momcwrapper. */ FilesToRunProvider momcWrapper() { return ruleContext.getExecutablePrerequisite("$momcwrapper", Mode.HOST); } /** * Returns the location of the actoolwrapper. */ FilesToRunProvider actoolWrapper() { return ruleContext.getExecutablePrerequisite("$actoolwrapper", Mode.HOST); } } }
package org.drip.template.state; import org.drip.analytics.date.*; import org.drip.quant.common.FormatUtil; import org.drip.service.env.EnvManager; import org.drip.service.template.LatentMarketStateBuilder; import org.drip.state.discount.*; import org.drip.state.forward.ForwardCurve; import org.drip.state.identifier.ForwardLabel; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /*! * Copyright (C) 2017 Lakshmi Krishnamurthy * Copyright (C) 2016 Lakshmi Krishnamurthy * Copyright (C) 2015 Lakshmi Krishnamurthy * * This file is part of DRIP, a free-software/open-source library for buy/side financial/trading model * libraries targeting analysts and developers * https://lakshmidrip.github.io/DRIP/ * * DRIP is composed of four main libraries: * * - DRIP Fixed Income - https://lakshmidrip.github.io/DRIP-Fixed-Income/ * - DRIP Asset Allocation - https://lakshmidrip.github.io/DRIP-Asset-Allocation/ * - DRIP Numerical Optimizer - https://lakshmidrip.github.io/DRIP-Numerical-Optimizer/ * - DRIP Statistical Learning - https://lakshmidrip.github.io/DRIP-Statistical-Learning/ * * - DRIP Fixed Income: Library for Instrument/Trading Conventions, Treasury Futures/Options, * Funding/Forward/Overnight Curves, Multi-Curve Construction/Valuation, Collateral Valuation and XVA * Metric Generation, Calibration and Hedge Attributions, Statistical Curve Construction, Bond RV * Metrics, Stochastic Evolution and Option Pricing, Interest Rate Dynamics and Option Pricing, LMM * Extensions/Calibrations/Greeks, Algorithmic Differentiation, and Asset Backed Models and Analytics. * * - DRIP Asset Allocation: Library for model libraries for MPT framework, Black Litterman Strategy * Incorporator, Holdings Constraint, and Transaction Costs. * * - DRIP Numerical Optimizer: Library for Numerical Optimization and Spline Functionality. * * - DRIP Statistical Learning: Library for Statistical Evaluation and Machine Learning. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * DerivedForwardState sets up the Calibration of the Derived Forward Latent State and examine the Emitted * Metrics. * * @author Lakshmi Krishnamurthy */ public class DerivedForwardState { private static final MergedDiscountForwardCurve OvernightCurve ( final JulianDate dtSpot, final String strCurrency) throws Exception { String[] astrDepositMaturityTenor = new String[] { "1D", "2D", "3D" }; double[] adblDepositQuote = new double[] { 0.0004, // 1D 0.0004, // 2D 0.0004 // 3D }; String[] astrShortEndOISMaturityTenor = new String[] { "1W", "2W", "3W", "1M" }; double[] adblShortEndOISQuote = new double[] { 0.00070, // 1W 0.00069, // 2W 0.00078, // 3W 0.00074 // 1M }; String[] astrOISFuturesEffectiveTenor = new String[] { "1M", "2M", "3M", "4M", "5M" }; String[] astrOISFuturesMaturityTenor = new String[] { "1M", "1M", "1M", "1M", "1M" }; double[] adblOISFuturesQuote = new double[] { 0.00046, // 1M x 1M 0.00016, // 2M x 1M -0.00007, // 3M x 1M -0.00013, // 4M x 1M -0.00014 // 5M x 1M }; String[] astrLongEndOISMaturityTenor = new String[] { "15M", "18M", "21M", "02Y", "03Y", "04Y", "05Y", "06Y", "07Y", "08Y", "09Y", "10Y", "11Y", "12Y", "15Y", "20Y", "25Y", "30Y" }; double[] adblLongEndOISQuote = new double[] { 0.00002, // 15M 0.00008, // 18M 0.00021, // 21M 0.00036, // 2Y 0.00127, // 3Y 0.00274, // 4Y 0.00456, // 5Y 0.00647, // 6Y 0.00827, // 7Y 0.00996, // 8Y 0.01147, // 9Y 0.01280, // 10Y 0.01404, // 11Y 0.01516, // 12Y 0.01764, // 15Y 0.01939, // 20Y 0.02003, // 25Y 0.02038 // 30Y }; return LatentMarketStateBuilder.SmoothOvernightCurve ( dtSpot, strCurrency, astrDepositMaturityTenor, adblDepositQuote, "Rate", astrShortEndOISMaturityTenor, adblShortEndOISQuote, "SwapRate", astrOISFuturesEffectiveTenor, astrOISFuturesMaturityTenor, adblOISFuturesQuote, "SwapRate", astrLongEndOISMaturityTenor, adblLongEndOISQuote, "SwapRate" ); } private static final ForwardCurve Reference ( final JulianDate dtSpot, final MergedDiscountForwardCurve dcOvernight, final String strReferenceForwardTenor) throws Exception { ForwardLabel forwardLabel = ForwardLabel.Create ( dcOvernight.currency(), strReferenceForwardTenor ); String[] astrDepositMaturityTenor = new String[] { "1D", "1W", "2W", "3W", "1M", "2M", "3M", "4M", "5M" }; double[] adblDepositQuote = new double[] { 0.003565, // 1D 0.003858, // 1W 0.003840, // 2W 0.003922, // 3W 0.003869, // 1M 0.003698, // 2M 0.003527, // 3M 0.003342, // 4M 0.003225 // 5M }; String[] astrFRAMaturityTenor = new String[] { "00D", "01M", "02M", "03M", "04M", "05M", "06M", "07M", "08M", "09M", "10M", "11M", "12M", "13M", "14M", "15M", "16M", "17M", "18M" }; double[] adblFRAQuote = new double[] { 0.003120, // 0D 0.002930, // 1M 0.002720, // 2M 0.002600, // 3M 0.002560, // 4M 0.002520, // 5M 0.002480, // 6M 0.002540, // 7M 0.002610, // 8M 0.002670, // 9M 0.002790, // 10M 0.002910, // 11M 0.003030, // 12M 0.003180, // 13M 0.003350, // 14M 0.003520, // 15M 0.003710, // 16M 0.003890, // 17M 0.004090 // 18M }; String[] astrFixFloatMaturityTenor = new String[] { "03Y", "04Y", "05Y", "06Y", "07Y", "08Y", "09Y", "10Y", "12Y", "15Y", "20Y", "25Y", "30Y", "35Y", "40Y", "50Y", "60Y" }; double[] adblFixFloatQuote = new double[] { 0.004240, // 3Y 0.005760, // 4Y 0.007620, // 5Y 0.009540, // 6Y 0.011350, // 7Y 0.013030, // 8Y 0.014520, // 9Y 0.015840, // 10Y 0.018090, // 12Y 0.020370, // 15Y 0.021870, // 20Y 0.022340, // 25Y 0.022560, // 30Y 0.022950, // 35Y 0.023480, // 40Y 0.024210, // 50Y 0.024630 // 60Y }; return LatentMarketStateBuilder.SmoothForwardCurve ( dtSpot, forwardLabel, astrDepositMaturityTenor, adblDepositQuote, "ForwardRate", astrFRAMaturityTenor, adblFRAQuote, "ParForwardRate", astrFixFloatMaturityTenor, adblFixFloatQuote, "SwapRate", null, null, "DerivedParBasisSpread", null, null, "DerivedParBasisSpread", dcOvernight, null ); } public static final void main ( final String[] astrArgs) throws Exception { /* * Initialize the Credit Analytics Library */ EnvManager.InitEnv (""); String strCurrency = "EUR"; String strDerivedForwardTenor = "3M"; String strReferenceForwardTenor = "6M"; JulianDate dtSpot = DateUtil.Today(); MergedDiscountForwardCurve dcOvernight = OvernightCurve ( dtSpot, strCurrency ); ForwardCurve fcReference = Reference ( dtSpot, dcOvernight, strReferenceForwardTenor ); ForwardLabel forwardLabel = ForwardLabel.Create ( strCurrency, strDerivedForwardTenor ); String[] astrDepositMaturityTenor = new String[] { "2W", "3W", "1M", "2M" }; double[] adblDepositQuote = new double[] { 0.001865, // 2W 0.001969, // 3W 0.001951, // 1M 0.001874 // 2M }; String[] astrFRAMaturityTenor = new String[] { "00D", "01M", "03M", "06M", "09M", "12M", "15M", "18M", "21M" }; double[] adblFRAQuote = new double[] { 0.001790, // 0D 0.001775, // 1M 0.001274, // 3M 0.001222, // 6M 0.001269, // 9M 0.001565, // 12M 0.001961, // 15M 0.002556, // 18M 0.003101 // 21M }; String[] astrFixFloatMaturityTenor = new String[] { "03Y", "04Y", "05Y", "06Y", "07Y", "08Y", "09Y", "10Y", "12Y", "15Y", "20Y", "25Y", "30Y" }; double[] adblFixFloatQuote = new double[] { 0.002850, // 3Y 0.004370, // 4Y 0.006230, // 5Y 0.008170, // 6Y 0.010000, // 7Y 0.011710, // 8Y 0.013240, // 9Y 0.014590, // 10Y 0.016920, // 12Y 0.019330, // 15Y 0.020990, // 20Y 0.021560, // 25Y 0.021860 // 30Y }; String[] astrSyntheticFloatFloatMaturityTenor = new String[] { "35Y", "40Y", "50Y", "60Y" }; double[] adblSyntheticFloatFloatQuote = new double[] { 0.00065, // 35Y 0.00060, // 40Y 0.00054, // 50Y 0.00050 // 60Y }; ForwardCurve fcDerived = LatentMarketStateBuilder.SmoothForwardCurve ( dtSpot, forwardLabel, astrDepositMaturityTenor, adblDepositQuote, "ForwardRate", astrFRAMaturityTenor, adblFRAQuote, "ParForwardRate", astrFixFloatMaturityTenor, adblFixFloatQuote, "SwapRate", null, null, "DerivedParBasisSpread", astrSyntheticFloatFloatMaturityTenor, adblSyntheticFloatFloatQuote, "DerivedParBasisSpread", dcOvernight, fcReference ); String strLatentStateLabel = fcDerived.label().fullyQualifiedName(); System.out.println ("\n\n\t||-------------------------------------------------------------------------||"); for (int i = 0; i < adblDepositQuote.length; ++i) System.out.println ( "\t|| " + strLatentStateLabel + " | DEPOSIT | " + astrDepositMaturityTenor[i] + " | " + FormatUtil.FormatDouble (adblDepositQuote[i], 1, 4, 100.) + "% | Forward Rate | " + FormatUtil.FormatDouble (fcDerived.forward (astrDepositMaturityTenor[i]), 1, 4, 100.) + "% ||" ); System.out.println ("\t||-------------------------------------------------------------------------||"); System.out.println ("\n\n\t||--------------------------------------------------------------------------||"); for (int i = 0; i < adblFRAQuote.length; ++i) System.out.println ( "\t|| " + strLatentStateLabel + " | FRA | " + astrFRAMaturityTenor[i] + " | " + FormatUtil.FormatDouble (adblFRAQuote[i], 1, 4, 100.) + "% | Par Forward Rate | " + FormatUtil.FormatDouble (fcDerived.forward (dtSpot.addTenor (astrFRAMaturityTenor[i]).addTenor (strDerivedForwardTenor)), 1, 4, 100.) + "% ||" ); System.out.println ("\t||--------------------------------------------------------------------------||"); System.out.println ("\n\n\t||-------------------------------------------------------------------------||"); for (int i = 0; i < adblFixFloatQuote.length; ++i) System.out.println ( "\t|| " + strLatentStateLabel + " | FIX FLOAT | " + astrFixFloatMaturityTenor[i] + " | " + FormatUtil.FormatDouble (adblFixFloatQuote[i], 1, 4, 100.) + "% | Swap Rate | " + FormatUtil.FormatDouble (fcDerived.forward (astrFixFloatMaturityTenor[i]), 1, 4, 100.) + "% ||" ); System.out.println ("\t||-------------------------------------------------------------------------||"); System.out.println ("\n\n\t||----------------------------------------------------------------------------------------------------||"); for (int i = 0; i < adblSyntheticFloatFloatQuote.length; ++i) System.out.println ( "\t|| " + strLatentStateLabel + " | SYNTHETIC FLOAT FLOAT | " + astrSyntheticFloatFloatMaturityTenor[i] + " | " + FormatUtil.FormatDouble (adblSyntheticFloatFloatQuote[i], 1, 2, 10000.) + " bp | Derived Par Basis Spread | " + FormatUtil.FormatDouble (fcDerived.forward (astrSyntheticFloatFloatMaturityTenor[i]), 1, 4, 100.) + "% ||" ); System.out.println ("\t||----------------------------------------------------------------------------------------------------||\n"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.pentaho.community.di.plugins.database.presto.delegate; import java.sql.ResultSetMetaData; import java.sql.SQLException; public class DelegateResultSetMetaData implements ResultSetMetaData { private final ResultSetMetaData realResultSetMetaData; public DelegateResultSetMetaData( ResultSetMetaData realResultSetMetaData ) { this.realResultSetMetaData = realResultSetMetaData; } /** * Returns the number of columns in this <code>ResultSet</code> object. * * @return the number of columns * @throws SQLException if a database access error occurs */ @Override public int getColumnCount() throws SQLException { return realResultSetMetaData.getColumnCount(); } /** * Indicates whether the designated column is automatically numbered. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isAutoIncrement( int column ) throws SQLException { return realResultSetMetaData.isAutoIncrement( column ); } /** * Indicates whether a column's case matters. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isCaseSensitive( int column ) throws SQLException { return realResultSetMetaData.isCaseSensitive( column ); } /** * Indicates whether the designated column can be used in a where clause. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isSearchable( int column ) throws SQLException { return realResultSetMetaData.isSearchable( column ); } /** * Indicates whether the designated column is a cash value. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isCurrency( int column ) throws SQLException { return realResultSetMetaData.isCurrency( column ); } /** * Indicates the nullability of values in the designated column. * * @param column the first column is 1, the second is 2, ... * @return the nullability status of the given column; one of <code>columnNoNulls</code>, * <code>columnNullable</code> or <code>columnNullableUnknown</code> * @throws SQLException if a database access error occurs */ @Override public int isNullable( int column ) throws SQLException { return realResultSetMetaData.isNullable( column ); } /** * Indicates whether values in the designated column are signed numbers. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isSigned( int column ) throws SQLException { return realResultSetMetaData.isSigned( column ); } /** * Indicates the designated column's normal maximum width in characters. * * @param column the first column is 1, the second is 2, ... * @return the normal maximum number of characters allowed as the width * of the designated column * @throws SQLException if a database access error occurs */ @Override public int getColumnDisplaySize( int column ) throws SQLException { return realResultSetMetaData.getColumnDisplaySize( column ); } /** * Gets the designated column's suggested title for use in printouts and * displays. The suggested title is usually specified by the SQL <code>AS</code> * clause. If a SQL <code>AS</code> is not specified, the value returned from * <code>getColumnLabel</code> will be the same as the value returned by the * <code>getColumnName</code> method. * * @param column the first column is 1, the second is 2, ... * @return the suggested column title * @throws SQLException if a database access error occurs */ @Override public String getColumnLabel( int column ) throws SQLException { return realResultSetMetaData.getColumnLabel( column ); } /** * Get the designated column's name. * * @param column the first column is 1, the second is 2, ... * @return column name * @throws SQLException if a database access error occurs */ @Override public String getColumnName( int column ) throws SQLException { return realResultSetMetaData.getColumnName( column ); } /** * Get the designated column's table's schema. * * @param column the first column is 1, the second is 2, ... * @return schema name or "" if not applicable * @throws SQLException if a database access error occurs */ @Override public String getSchemaName( int column ) throws SQLException { return realResultSetMetaData.getSchemaName( column ); } /** * Get the designated column's specified column size. * For numeric data, this is the maximum precision. For character data, this is the length in characters. * For datetime datatypes, this is the length in characters of the String representation (assuming the * maximum allowed precision of the fractional seconds component). For binary data, this is the length in bytes. For the ROWID datatype, * this is the length in bytes. 0 is returned for data types where the * column size is not applicable. * * @param column the first column is 1, the second is 2, ... * @return precision * @throws SQLException if a database access error occurs */ @Override public int getPrecision( int column ) throws SQLException { return realResultSetMetaData.getPrecision( column ); } /** * Gets the designated column's number of digits to right of the decimal point. * 0 is returned for data types where the scale is not applicable. * * @param column the first column is 1, the second is 2, ... * @return scale * @throws SQLException if a database access error occurs */ @Override public int getScale( int column ) throws SQLException { return realResultSetMetaData.getScale( column ); } /** * Gets the designated column's table name. * * @param column the first column is 1, the second is 2, ... * @return table name or "" if not applicable * @throws SQLException if a database access error occurs */ @Override public String getTableName( int column ) throws SQLException { return realResultSetMetaData.getTableName( column ); } /** * Gets the designated column's table's catalog name. * * @param column the first column is 1, the second is 2, ... * @return the name of the catalog for the table in which the given column * appears or "" if not applicable * @throws SQLException if a database access error occurs */ @Override public String getCatalogName( int column ) throws SQLException { return realResultSetMetaData.getCatalogName( column ); } /** * Retrieves the designated column's SQL type. * * @param column the first column is 1, the second is 2, ... * @return SQL type from java.sql.Types * @throws SQLException if a database access error occurs * @see Types */ @Override public int getColumnType( int column ) throws SQLException { return realResultSetMetaData.getColumnType( column ); } /** * Retrieves the designated column's database-specific type name. * * @param column the first column is 1, the second is 2, ... * @return type name used by the database. If the column type is * a user-defined type, then a fully-qualified type name is returned. * @throws SQLException if a database access error occurs */ @Override public String getColumnTypeName( int column ) throws SQLException { return realResultSetMetaData.getColumnTypeName( column ); } /** * Indicates whether the designated column is definitely not writable. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isReadOnly( int column ) throws SQLException { return realResultSetMetaData.isReadOnly( column ); } /** * Indicates whether it is possible for a write on the designated column to succeed. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isWritable( int column ) throws SQLException { return realResultSetMetaData.isWritable( column ); } /** * Indicates whether a write on the designated column will definitely succeed. * * @param column the first column is 1, the second is 2, ... * @return <code>true</code> if so; <code>false</code> otherwise * @throws SQLException if a database access error occurs */ @Override public boolean isDefinitelyWritable( int column ) throws SQLException { return realResultSetMetaData.isDefinitelyWritable( column ); } /** * <p>Returns the fully-qualified name of the Java class whose instances * are manufactured if the method <code>ResultSet.getObject</code> * is called to retrieve a value * from the column. <code>ResultSet.getObject</code> may return a subclass of the * class returned by this method. * * @param column the first column is 1, the second is 2, ... * @return the fully-qualified name of the class in the Java programming * language that would be used by the method * <code>ResultSet.getObject</code> to retrieve the value in the specified * column. This is the class name used for custom mapping. * @throws SQLException if a database access error occurs * @since 1.2 */ @Override public String getColumnClassName( int column ) throws SQLException { return realResultSetMetaData.getColumnClassName( column ); } /** * Returns an object that implements the given interface to allow access to * non-standard methods, or standard methods not exposed by the proxy. * <p/> * If the receiver implements the interface then the result is the receiver * or a proxy for the receiver. If the receiver is a wrapper * and the wrapped object implements the interface then the result is the * wrapped object or a proxy for the wrapped object. Otherwise return the * the result of calling <code>unwrap</code> recursively on the wrapped object * or a proxy for that result. If the receiver is not a * wrapper and does not implement the interface, then an <code>SQLException</code> is thrown. * * @param iface A Class defining an interface that the result must implement. * @return an object that implements the interface. May be a proxy for the actual implementing object. * @throws SQLException If no object found that implements the interface * @since 1.6 */ @Override public <T> T unwrap( Class<T> iface ) throws SQLException { return realResultSetMetaData.unwrap( iface ); } /** * Returns true if this either implements the interface argument or is directly or indirectly a wrapper * for an object that does. Returns false otherwise. If this implements the interface then return true, * else if this is a wrapper then return the result of recursively calling <code>isWrapperFor</code> on the wrapped * object. If this does not implement the interface and is not a wrapper, return false. * This method should be implemented as a low-cost operation compared to <code>unwrap</code> so that * callers can use this method to avoid expensive <code>unwrap</code> calls that may fail. If this method * returns true then calling <code>unwrap</code> with the same argument should succeed. * * @param iface a Class defining an interface. * @return true if this implements the interface or directly or indirectly wraps an object that does. * @throws SQLException if an error occurs while determining whether this is a wrapper * for an object with the given interface. * @since 1.6 */ @Override public boolean isWrapperFor( Class<?> iface ) throws SQLException { return realResultSetMetaData.isWrapperFor( iface ); } }
/* * Copyright 2008-2019 by Emeric Vernat * * This file is part of Java Melody. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bull.javamelody; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Cursor; import java.awt.Rectangle; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.List; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.SwingUtilities; import javax.swing.border.Border; import net.bull.javamelody.internal.common.I18N; import net.bull.javamelody.internal.model.MBeanNode; import net.bull.javamelody.internal.model.MBeanNode.MBeanAttribute; import net.bull.javamelody.swing.Utilities; import net.bull.javamelody.swing.table.MDefaultTableCellRenderer; import net.bull.javamelody.swing.table.MMultiLineTableCellRenderer; import net.bull.javamelody.swing.table.MTable; /** * Panel des MBeanNode. * @author Emeric Vernat */ class MBeanNodePanel extends JPanel { static final Border LEFT_MARGIN_BORDER = BorderFactory.createEmptyBorder(0, 30, 0, 0); private static final long serialVersionUID = 1L; private static final Color FOREGROUND = Color.BLUE.darker(); private static final Cursor HAND_CURSOR = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR); private static final ImageIcon PLUS_ICON = ImageIconCache.getImageIcon("bullets/plus.png"); private static final ImageIcon MINUS_ICON = ImageIconCache.getImageIcon("bullets/minus.png"); private static final MMultiLineTableCellRenderer FORMATTED_VALUE_CELL_RENDERER = new MMultiLineTableCellRenderer(); private static final MDefaultTableCellRenderer DESCRIPTION_CELL_RENDERER = new MDefaultTableCellRenderer() { private static final long serialVersionUID = 1L; @Override protected void setValue(Object value) { if (value != null) { super.setValue('(' + value.toString() + ')'); } else { super.setValue(null); } } }; private static final MouseListener LABEL_MOUSE_LISTENER = new MouseAdapter() { @Override public void mouseClicked(MouseEvent event) { final MBeanNodePanel nodePanel = (MBeanNodePanel) event.getComponent().getParent(); nodePanel.onClick(); } }; private static final MouseListener TABLE_MOUSE_LISTENER = new MouseAdapter() { @SuppressWarnings("unchecked") @Override public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2) { final MTable<MBeanAttribute> table = (MTable<MBeanAttribute>) e.getComponent(); final MBeanAttribute attribute = table.getSelectedObject(); Utilities.showTextInPopup(table, attribute.getName(), attribute.getFormattedValue()); } } }; private final MBeanNode node; private JLabel label; private JPanel detailPanel; MBeanNodePanel(MBeanNode node) { super(new BorderLayout()); assert node != null; this.node = node; init(); } private void init() { setOpaque(false); String name = node.getName(); final int indexOfComma = name.indexOf(','); if (node.getChildren() != null || indexOfComma != -1) { if (indexOfComma != -1) { name = name.substring(indexOfComma + 1); } label = new JLabel(name); if (node.getDescription() != null) { label.setToolTipText("<html>" + name + "<br/>(" + node.getDescription() + ')'); } label.setIcon(PLUS_ICON); label.setForeground(FOREGROUND); label.setCursor(HAND_CURSOR); label.addMouseListener(LABEL_MOUSE_LISTENER); add(label, BorderLayout.CENTER); } else { detailPanel = createAttributesPanel(); add(detailPanel, BorderLayout.CENTER); } } void onClick() { if (detailPanel == null) { final List<MBeanNode> children = node.getChildren(); if (children != null) { detailPanel = createNodeTreePanel(children); } else { detailPanel = createAttributesPanel(); } detailPanel.setBorder(LEFT_MARGIN_BORDER); detailPanel.setVisible(false); add(detailPanel, BorderLayout.SOUTH); } detailPanel.setVisible(!detailPanel.isVisible()); if (detailPanel.isVisible()) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { scrollToVisible(); } }); label.setIcon(MINUS_ICON); } else { label.setIcon(PLUS_ICON); } validate(); } void expand() { if (detailPanel == null || !detailPanel.isVisible()) { onClick(); } } void collapse() { if (label != null && detailPanel != null && detailPanel.isVisible()) { onClick(); } } private JPanel createAttributesPanel() { final List<MBeanAttribute> attributes = node.getAttributes(); boolean descriptionDisplayed = false; for (final MBeanAttribute attribute : attributes) { if (attribute.getDescription() != null) { descriptionDisplayed = true; break; } } final JPanel attributesPanel = new JPanel(new BorderLayout()); attributesPanel.setOpaque(false); if (node.getDescription() != null) { final JLabel descriptionLabel = new JLabel('(' + node.getDescription() + ')'); attributesPanel.add(descriptionLabel, BorderLayout.NORTH); } final MTable<MBeanAttribute> table = new MTable<>(); table.addColumn("name", I18N.getString("Nom")); table.addColumn("formattedValue", I18N.getString("Contenu")); table.setColumnCellRenderer("formattedValue", FORMATTED_VALUE_CELL_RENDERER); if (descriptionDisplayed) { table.addColumn("description", ""); table.setColumnCellRenderer("description", DESCRIPTION_CELL_RENDERER); } table.setList(attributes); table.addMouseListener(TABLE_MOUSE_LISTENER); attributesPanel.add(table, BorderLayout.CENTER); return attributesPanel; } void scrollToVisible() { final Rectangle localBounds = SwingUtilities.getLocalBounds(MBeanNodePanel.this); localBounds.grow(0, 15); scrollRectToVisible(localBounds); } static JPanel createNodeTreePanel(List<MBeanNode> nodes) { final JPanel nodeTreePanel = new JPanel(); nodeTreePanel.setOpaque(false); nodeTreePanel.setLayout(new BoxLayout(nodeTreePanel, BoxLayout.Y_AXIS)); for (final MBeanNode node : nodes) { nodeTreePanel.add(new MBeanNodePanel(node)); } return nodeTreePanel; } }
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.util; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.Nullability; import com.intellij.codeInsight.NullableNotNullManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.impl.source.resolve.graphInference.PsiPolyExpressionUtil; import com.intellij.psi.infos.MethodCandidateInfo; import com.intellij.psi.tree.IElementType; import com.intellij.util.ArrayUtil; import com.intellij.util.Function; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author max */ public class RedundantCastUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.redundantCast.RedundantCastUtil"); private RedundantCastUtil() { } @NotNull public static List<PsiTypeCastExpression> getRedundantCastsInside(@NotNull PsiElement where) { MyCollectingVisitor visitor = new MyCollectingVisitor(); if (where instanceof PsiEnumConstant) { where.accept(visitor); } else { where.acceptChildren(visitor); } return new ArrayList<>(visitor.myFoundCasts); } public static boolean isCastRedundant (PsiTypeCastExpression typeCast) { PsiElement parent = typeCast.getParent(); PsiExpression operand = typeCast.getOperand(); if (operand != null && operand.getType() != null && operand.getType().equals(typeCast.getType())) return true; while(parent instanceof PsiParenthesizedExpression) parent = parent.getParent(); if (parent instanceof PsiExpressionList) parent = parent.getParent(); if (parent instanceof PsiReferenceExpression) parent = parent.getParent(); if (parent instanceof PsiAnonymousClass) parent = parent.getParent(); MyIsRedundantVisitor visitor = new MyIsRedundantVisitor(true); parent.accept(visitor); return visitor.isRedundant; } @Nullable private static PsiExpression deparenthesizeExpression(PsiExpression arg) { while (arg instanceof PsiParenthesizedExpression) arg = ((PsiParenthesizedExpression) arg).getExpression(); return arg; } private static class MyCollectingVisitor extends MyIsRedundantVisitor { private final Set<PsiTypeCastExpression> myFoundCasts = new HashSet<>(); private MyCollectingVisitor() { super(true); } @Override public void visitClass(PsiClass aClass) { // avoid multiple visit } @Override public void visitMethod(PsiMethod method) { // avoid multiple visit } @Override public void visitField(PsiField field) { // avoid multiple visit } @Override protected void addToResults(@NotNull PsiTypeCastExpression typeCast) { if (!isTypeCastSemantic(typeCast)) { myFoundCasts.add(typeCast); } } } private static class MyIsRedundantVisitor extends JavaRecursiveElementWalkingVisitor { private boolean isRedundant; private final boolean myRecursive; private MyIsRedundantVisitor(final boolean recursive) { myRecursive = recursive; } @Override public void visitElement(final PsiElement element) { if (myRecursive) { super.visitElement(element); } } protected void addToResults(@NotNull PsiTypeCastExpression typeCast){ if (!isTypeCastSemantic(typeCast)) { isRedundant = true; } } @Override public void visitAssignmentExpression(PsiAssignmentExpression expression) { processPossibleTypeCast(expression.getRExpression(), expression.getLExpression().getType()); super.visitAssignmentExpression(expression); } @Override public void visitArrayInitializerExpression(PsiArrayInitializerExpression expression) { PsiType type = expression.getType(); if (type instanceof PsiArrayType) { for (PsiExpression initializer : expression.getInitializers()) { processPossibleTypeCast(initializer, ((PsiArrayType)type).getComponentType()); } } super.visitArrayInitializerExpression(expression); } @Override public void visitVariable(PsiVariable variable) { processPossibleTypeCast(variable.getInitializer(), variable.getType()); super.visitVariable(variable); } @Override public void visitReturnStatement(PsiReturnStatement statement) { final PsiMethod method = PsiTreeUtil.getParentOfType(statement, PsiMethod.class, true, PsiLambdaExpression.class); if (method != null) { final PsiType returnType = method.getReturnType(); final PsiExpression returnValue = statement.getReturnValue(); if (returnValue != null) { processPossibleTypeCast(returnValue, returnType); } } super.visitReturnStatement(statement); } @Override public void visitPolyadicExpression(PsiPolyadicExpression expression) { IElementType tokenType = expression.getOperationTokenType(); PsiExpression[] operands = expression.getOperands(); if (operands.length >= 2) { PsiType lType = operands[0].getType(); processBinaryExpressionOperand(deparenthesizeExpression(operands[0]), operands[1].getType(), tokenType); for (int i = 1; i < operands.length; i++) { PsiExpression operand = deparenthesizeExpression(operands[i]); if (operand == null) continue; processBinaryExpressionOperand(operand, lType, tokenType); lType = TypeConversionUtil.calcTypeForBinaryExpression(lType, operand.getType(), tokenType, true); } } super.visitPolyadicExpression(expression); } private void processBinaryExpressionOperand(final PsiExpression operand, final PsiType otherType, final IElementType binaryToken) { if (operand instanceof PsiTypeCastExpression) { PsiTypeCastExpression typeCast = (PsiTypeCastExpression)operand; PsiExpression toCast = typeCast.getOperand(); if (toCast != null && TypeConversionUtil.isBinaryOperatorApplicable(binaryToken, toCast.getType(), otherType, false)) { addToResults(typeCast); } } } private void processPossibleTypeCast(PsiExpression rExpr, @Nullable PsiType lType) { rExpr = deparenthesizeExpression(rExpr); if (rExpr instanceof PsiTypeCastExpression) { PsiExpression castOperand = ((PsiTypeCastExpression)rExpr).getOperand(); if (castOperand != null) { PsiType operandType; if (castOperand instanceof PsiTypeCastExpression) { final PsiExpression nestedCastOperand = ((PsiTypeCastExpression)castOperand).getOperand(); operandType = nestedCastOperand != null ? nestedCastOperand.getType() : null; } else if (castOperand instanceof PsiFunctionalExpression && lType != null) { final PsiTypeElement typeElement = ((PsiTypeCastExpression)rExpr).getCastType(); final PsiType castType = typeElement != null ? typeElement.getType() : null; operandType = lType.equals(castType) ? castOperand.getType() : null; } else { operandType = castOperand.getType(); } if (operandType != null) { if (lType != null && TypeConversionUtil.isAssignable(lType, operandType, false)) { addToResults((PsiTypeCastExpression)rExpr); } } } } } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { processCall(expression); checkForVirtual(expression); super.visitMethodCallExpression(expression); } private void checkForVirtual(PsiMethodCallExpression methodCall) { PsiReferenceExpression methodExpr = methodCall.getMethodExpression(); PsiExpression qualifier = methodExpr.getQualifierExpression(); if (!(qualifier instanceof PsiParenthesizedExpression)) return; PsiExpression operand = PsiUtil.skipParenthesizedExprDown(qualifier); if (!(operand instanceof PsiTypeCastExpression)) return; PsiTypeCastExpression typeCast = (PsiTypeCastExpression)operand; PsiExpression castOperand = typeCast.getOperand(); if (castOperand == null) return; PsiType type = castOperand.getType(); if (type == null) return; if (type instanceof PsiPrimitiveType) return; final JavaResolveResult resolveResult = methodExpr.advancedResolve(false); PsiMethod targetMethod = (PsiMethod)resolveResult.getElement(); if (targetMethod == null) return; if (targetMethod.hasModifierProperty(PsiModifier.STATIC)) return; try { Project project = methodExpr.getProject(); PsiElementFactory factory = JavaPsiFacade.getElementFactory(project); final PsiExpression expressionFromText = factory.createExpressionFromText(methodCall.getText(), methodCall); if (!(expressionFromText instanceof PsiMethodCallExpression)) return; PsiMethodCallExpression newCall = (PsiMethodCallExpression)expressionFromText; PsiExpression newQualifier = newCall.getMethodExpression().getQualifierExpression(); PsiTypeCastExpression newCast = Objects.requireNonNull((PsiTypeCastExpression)PsiUtil.skipParenthesizedExprDown(newQualifier)); PsiExpression newOperand = Objects.requireNonNull(newCast.getOperand()); newQualifier.replace(newOperand); final JavaResolveResult newResult = newCall.getMethodExpression().advancedResolve(false); if (!newResult.isValidResult()) return; final PsiMethod newTargetMethod = (PsiMethod)newResult.getElement(); PsiType newReturnType = newCall.getType(); PsiType oldReturnType = methodCall.getType(); if (newReturnType instanceof PsiCapturedWildcardType && oldReturnType instanceof PsiCapturedWildcardType) { newReturnType = ((PsiCapturedWildcardType)newReturnType).getUpperBound(); oldReturnType = ((PsiCapturedWildcardType)oldReturnType).getUpperBound(); } if (Comparing.equal(newReturnType, oldReturnType) && (Comparing.equal(newTargetMethod, targetMethod) || !(newTargetMethod.isDeprecated() && !targetMethod.isDeprecated()) && MethodSignatureUtil.isSuperMethod(newTargetMethod, targetMethod) && // see SCR11555, SCR14559 areThrownExceptionsCompatible(targetMethod, newTargetMethod) && areNullabilityCompatible(targetMethod, newTargetMethod))) { addToResults(typeCast); } } catch (IncorrectOperationException ignore) { } } private static boolean areNullabilityCompatible(final PsiMethod oldTargetMethod, final PsiMethod newTargetMethod) { // the cast may be for the @NotNull which newTargetMethod has whereas the oldTargetMethod doesn't Nullability oldNullability = NullableNotNullManager.getNullability(oldTargetMethod); Nullability newNullability = NullableNotNullManager.getNullability(newTargetMethod); return oldNullability == newNullability; } private static boolean areThrownExceptionsCompatible(final PsiMethod targetMethod, final PsiMethod newTargetMethod) { final PsiClassType[] oldThrowsTypes = targetMethod.getThrowsList().getReferencedTypes(); final PsiClassType[] newThrowsTypes = newTargetMethod.getThrowsList().getReferencedTypes(); for (final PsiClassType throwsType : newThrowsTypes) { if (!isExceptionThrown(throwsType, oldThrowsTypes)) return false; } return true; } private static boolean isExceptionThrown(PsiClassType exceptionType, PsiClassType[] thrownTypes) { for (final PsiClassType type : thrownTypes) { if (type.equals(exceptionType)) return true; } return false; } @Override public void visitNewExpression(PsiNewExpression expression) { processCall(expression); super.visitNewExpression(expression); } @Override public void visitEnumConstant(PsiEnumConstant enumConstant) { processCall(enumConstant); super.visitEnumConstant(enumConstant); } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { visitElement(expression); } private void processCall(PsiCall expression){ PsiExpressionList argumentList = expression.getArgumentList(); if (argumentList == null) return; PsiExpression[] args = argumentList.getExpressions(); final JavaResolveResult oldResult = expression.resolveMethodGenerics(); final PsiElement element = oldResult.getElement(); if (!(element instanceof PsiMethod)) return; PsiMethod oldMethod = (PsiMethod)element; PsiParameter[] parameters = oldMethod.getParameterList().getParameters(); try { for (int i = 0; i < args.length; i++) { ProgressManager.checkCanceled(); final PsiExpression arg = deparenthesizeExpression(args[i]); if (arg instanceof PsiTypeCastExpression) { PsiTypeCastExpression cast = (PsiTypeCastExpression)arg; if (i == args.length - 1 && args.length == parameters.length && parameters[i].isVarArgs()) { //do not mark cast to resolve ambiguity for calling varargs method with inexact argument continue; } final PsiType typeByParent = PsiTypesUtil.getExpectedTypeByParent(expression); final PsiCall newCall; if (typeByParent != null) { newCall = (PsiCall)LambdaUtil.copyWithExpectedType(expression, typeByParent); } else { final PsiCall call = LambdaUtil.treeWalkUp(expression); if (call != null) { Object marker = new Object(); PsiTreeUtil.mark(argumentList, marker); final PsiCall callCopy = LambdaUtil.copyTopLevelCall(call); newCall = PsiTreeUtil.getParentOfType(PsiTreeUtil.releaseMark(callCopy, marker), expression.getClass(), false); } else { newCall = (PsiCall)expression.copy(); } } final PsiExpressionList argList = newCall.getArgumentList(); LOG.assertTrue(argList != null); PsiExpression[] newArgs = argList.getExpressions(); LOG.assertTrue(newArgs.length == args.length, "oldCall: " + expression.getText() + "; old length: " + args.length + "; newCall: " + newCall.getText() + "; new length: " + newArgs.length); PsiTypeCastExpression castExpression = (PsiTypeCastExpression) deparenthesizeExpression(newArgs[i]); final PsiTypeElement castTypeElement = cast.getCastType(); final PsiType castType = castTypeElement != null ? castTypeElement.getType() : null; PsiExpression castOperand = castExpression.getOperand(); if (castOperand == null) return; newArgs[i] = (PsiExpression)castExpression.replace(castOperand); final JavaResolveResult newResult; if (newCall instanceof PsiEnumConstant) { // do this manually, because PsiEnumConstantImpl.resolveMethodGenerics() will assert (no containing class for the copy) final PsiEnumConstant enumConstant = (PsiEnumConstant)expression; PsiClass containingClass = enumConstant.getContainingClass(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(enumConstant.getProject()); final PsiClassType type = facade.getElementFactory().createType(containingClass); newResult = facade.getResolveHelper().resolveConstructor(type, newCall.getArgumentList(), enumConstant); } else { newResult = newCall.resolveMethodGenerics(); } final PsiAnonymousClass oldAnonymousClass = expression instanceof PsiNewExpression ? ((PsiNewExpression)expression).getAnonymousClass() : null; final PsiAnonymousClass newAnonymousClass = newCall instanceof PsiNewExpression ? ((PsiNewExpression)newCall).getAnonymousClass() : null; if (oldMethod.equals(newResult.getElement()) && (!(newCall instanceof PsiCallExpression) || oldAnonymousClass != null && newAnonymousClass != null && Comparing.equal(oldAnonymousClass.getBaseClassType(), newAnonymousClass.getBaseClassType()) || Comparing.equal(PsiUtil.recaptureWildcards(((PsiCallExpression)newCall).getType(), expression), ((PsiCallExpression)expression).getType())) && newResult.isValidResult() && !(newResult instanceof MethodCandidateInfo && ((MethodCandidateInfo)newResult).getInferenceErrorMessage() != null)) { PsiExpression newArg = PsiUtil.deparenthesizeExpression(newArgs[i]); if (newArg instanceof PsiConditionalExpression && PsiPolyExpressionUtil.isPolyExpression(newArgs[i])) { PsiType targetType = newArg.getType(); LOG.assertTrue(targetType != null); //target type is detected by method call //check that both sides are fine with that PsiExpression thenExpression = ((PsiConditionalExpression)newArg).getThenExpression(); PsiType thenType = thenExpression != null ? thenExpression.getType() : null; PsiExpression elseExpression = ((PsiConditionalExpression)newArg).getElseExpression(); PsiType elseType = elseExpression != null ? elseExpression.getType() : null; if (thenType != null && targetType.isAssignableFrom(thenType) && elseType != null && targetType.isAssignableFrom(elseType)) { addToResults(cast); } } else if (!(newArg instanceof PsiFunctionalExpression)) { addToResults(cast); } else { final boolean varargs = newResult instanceof MethodCandidateInfo && ((MethodCandidateInfo)newResult).isVarargs(); final PsiType parameterType = PsiTypesUtil.getParameterType(parameters, i, varargs); final PsiType newArgType = newResult.getSubstitutor().substitute(parameterType); if (Comparing.equal(castType, ((PsiFunctionalExpression)newArg).getGroundTargetType(newArgType))) { addToResults(cast); } } } } else if (arg instanceof PsiLambdaExpression) { final PsiType interfaceType = ((PsiLambdaExpression)arg).getFunctionalInterfaceType(); if (interfaceType != null) { List<PsiExpression> expressions = LambdaUtil.getReturnExpressions((PsiLambdaExpression)arg); for (int returnExprIdx = 0; returnExprIdx < expressions.size(); returnExprIdx++) { ProgressManager.checkCanceled(); PsiExpression returnExpression = deparenthesizeExpression(expressions.get(returnExprIdx)); if (returnExpression instanceof PsiTypeCastExpression) { processLambdaReturnExpression(expression, i, interfaceType, (PsiTypeCastExpression)returnExpression, returnExprIdx, expression13 -> (PsiTypeCastExpression)expression13); } else if (returnExpression instanceof PsiConditionalExpression) { final PsiExpression thenExpression = ((PsiConditionalExpression)returnExpression).getThenExpression(); if (thenExpression instanceof PsiTypeCastExpression) { processLambdaReturnExpression(expression, i, interfaceType, (PsiTypeCastExpression)thenExpression, returnExprIdx, expression12 -> (PsiTypeCastExpression)((PsiConditionalExpression)expression12).getThenExpression()); } final PsiExpression elseExpression = ((PsiConditionalExpression)returnExpression).getElseExpression(); if (elseExpression instanceof PsiTypeCastExpression) { processLambdaReturnExpression(expression, i, interfaceType, (PsiTypeCastExpression)elseExpression, returnExprIdx, expression1 -> (PsiTypeCastExpression)((PsiConditionalExpression)expression1).getElseExpression()); } } } } } } } catch (IncorrectOperationException e) { return; } for (PsiExpression arg : args) { if (arg instanceof PsiTypeCastExpression) { PsiExpression castOperand = ((PsiTypeCastExpression)arg).getOperand(); if (castOperand != null) { castOperand.accept(this); } } else { arg.accept(this); } } } private void processLambdaReturnExpression(PsiCall expression, int i, PsiType interfaceType, PsiTypeCastExpression returnExpression, int returnExprIdx, Function<? super PsiExpression, ? extends PsiTypeCastExpression> computeCastExpression) { final PsiCall newCall = LambdaUtil.copyTopLevelCall(expression); if (newCall == null) return; final PsiExpressionList newArgsList = newCall.getArgumentList(); LOG.assertTrue(newArgsList != null); final PsiExpression[] newArgs = newArgsList.getExpressions(); final PsiLambdaExpression lambdaExpression = (PsiLambdaExpression)deparenthesizeExpression(newArgs[i]); LOG.assertTrue(lambdaExpression != null, newCall); final PsiExpression newReturnExpression = deparenthesizeExpression(LambdaUtil.getReturnExpressions(lambdaExpression).get(returnExprIdx)); PsiTypeCastExpression castExpression = computeCastExpression.fun(newReturnExpression); PsiExpression castOperand = castExpression.getOperand(); if (castOperand == null) return; castOperand = (PsiExpression)castExpression.replace(castOperand); final PsiType functionalInterfaceType = lambdaExpression.getFunctionalInterfaceType(); if (interfaceType.equals(functionalInterfaceType)) { final PsiType interfaceReturnType = LambdaUtil.getFunctionalInterfaceReturnType(interfaceType); final PsiType castExprType = castOperand.getType(); if (interfaceReturnType != null && castExprType != null && interfaceReturnType.isAssignableFrom(castExprType)) { addToResults(returnExpression); } } } @Override public void visitTypeCastExpression(PsiTypeCastExpression typeCast) { PsiExpression operand = typeCast.getOperand(); if (operand == null) return; PsiExpression expr = deparenthesizeExpression(operand); final PsiType topCastType = typeCast.getType(); if (expr instanceof PsiTypeCastExpression) { PsiTypeElement typeElement = ((PsiTypeCastExpression)expr).getCastType(); if (typeElement == null) return; PsiType castType = typeElement.getType(); final PsiExpression innerOperand = ((PsiTypeCastExpression)expr).getOperand(); final PsiType operandType = innerOperand != null ? innerOperand.getType() : null; if (!(castType instanceof PsiPrimitiveType) && !(topCastType instanceof PsiPrimitiveType)) { if (operandType != null && topCastType != null && TypeConversionUtil.areTypesConvertible(operandType, topCastType)) { addToResults((PsiTypeCastExpression)expr); } } else if (Comparing.equal(PsiPrimitiveType.getUnboxedType(operandType), topCastType)) { addToResults((PsiTypeCastExpression)expr); } } else { PsiElement parent = typeCast.getParent(); if (parent instanceof PsiConditionalExpression) { //branches need to be of the same type final PsiType operandType = operand.getType(); final PsiType conditionalType = ((PsiConditionalExpression)parent).getType(); if (!Comparing.equal(operandType, conditionalType)) { if (!PsiUtil.isLanguageLevel5OrHigher(typeCast)) { return; } if (!checkResolveAfterRemoveCast(parent)) return; final PsiExpression thenExpression = ((PsiConditionalExpression)parent).getThenExpression(); final PsiExpression elseExpression = ((PsiConditionalExpression)parent).getElseExpression(); final PsiExpression opposite = thenExpression == typeCast ? elseExpression : thenExpression; if (opposite == null || conditionalType instanceof PsiPrimitiveType && !Comparing.equal(conditionalType, opposite.getType())) return; } } else if (parent instanceof PsiSynchronizedStatement && expr != null && (expr.getType() instanceof PsiPrimitiveType || expr instanceof PsiFunctionalExpression)) { return; } else if (expr instanceof PsiLambdaExpression || expr instanceof PsiMethodReferenceExpression) { if (parent instanceof PsiParenthesizedExpression && PsiUtil.skipParenthesizedExprUp(parent.getParent()) instanceof PsiReferenceExpression) { return; } final PsiType functionalInterfaceType = PsiTypesUtil.getExpectedTypeByParent(typeCast); //noinspection SuspiciousNameCombination if (topCastType != null && functionalInterfaceType != null && !TypeConversionUtil.isAssignable(topCastType, functionalInterfaceType, false)) return; } processAlreadyHasTypeCast(typeCast); } super.visitTypeCastExpression(typeCast); } private static boolean checkResolveAfterRemoveCast(PsiElement parent) { PsiElement grandPa = parent.getParent(); if (grandPa instanceof PsiExpressionList) { PsiExpression[] expressions = ((PsiExpressionList)grandPa).getExpressions(); int idx = ArrayUtil.find(expressions, parent); PsiElement grandGrandPa = grandPa.getParent(); if (grandGrandPa instanceof PsiCall) { PsiMethod resolve = ((PsiCall)grandGrandPa).resolveMethod(); if (resolve != null) { PsiCall expression = LambdaUtil.copyTopLevelCall((PsiCall)grandGrandPa); if (expression == null) return false; PsiExpressionList argumentList = expression.getArgumentList(); LOG.assertTrue(argumentList != null); PsiExpression toReplace = argumentList.getExpressions()[idx]; if (toReplace instanceof PsiConditionalExpression) { PsiExpression thenExpression = ((PsiConditionalExpression)toReplace).getThenExpression(); PsiExpression elseExpression = ((PsiConditionalExpression)toReplace).getElseExpression(); if (thenExpression instanceof PsiTypeCastExpression) { final PsiExpression thenOperand = ((PsiTypeCastExpression)thenExpression).getOperand(); if (thenOperand != null) { thenExpression.replace(thenOperand); } } else if (elseExpression instanceof PsiTypeCastExpression) { final PsiExpression elseOperand = ((PsiTypeCastExpression)elseExpression).getOperand(); if (elseOperand != null) { elseExpression.replace(elseOperand); } } } if (expression.resolveMethod() != resolve) { return false; } } } } return true; } private void processAlreadyHasTypeCast(PsiTypeCastExpression typeCast){ PsiElement parent = typeCast.getParent(); while(parent instanceof PsiParenthesizedExpression) parent = parent.getParent(); if (parent instanceof PsiExpressionList) return; // do not replace in arg lists - should be handled by parent if (parent instanceof PsiReturnStatement) return; if (parent instanceof PsiTypeCastExpression) return; if (parent instanceof PsiLambdaExpression) return; if (parent instanceof PsiConditionalExpression) { PsiElement gParent = PsiUtil.skipParenthesizedExprUp(parent.getParent()); if (gParent instanceof PsiLambdaExpression) return; if (gParent instanceof PsiReturnStatement && PsiTreeUtil.getParentOfType(gParent, PsiMethod.class, PsiLambdaExpression.class) instanceof PsiLambdaExpression) return; } if (isTypeCastSemantic(typeCast)) return; PsiTypeElement typeElement = typeCast.getCastType(); if (typeElement == null) return; final PsiType castTo = typeElement.getType(); final PsiExpression operand = typeCast.getOperand(); PsiType opType = operand.getType(); final PsiType expectedTypeByParent = PsiTypesUtil.getExpectedTypeByParent(typeCast); if (expectedTypeByParent != null) { try { final Project project = operand.getProject(); final String uniqueVariableName = JavaCodeStyleManager.getInstance(project).suggestUniqueVariableName("l", parent, false); final PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement)JavaPsiFacade.getElementFactory(project).createStatementFromText( expectedTypeByParent.getCanonicalText() + " " + uniqueVariableName + " = " + operand.getText() + ";", parent); final PsiExpression initializer = ((PsiLocalVariable)declarationStatement.getDeclaredElements()[0]).getInitializer(); LOG.assertTrue(initializer != null, operand.getText()); opType = initializer.getType(); if (opType != null) { final PsiExpression expr = PsiUtil.skipParenthesizedExprDown(operand); if (expr instanceof PsiConditionalExpression) { if (!isApplicableForConditionalBranch(opType, ((PsiConditionalExpression)expr).getThenExpression())) return; if (!isApplicableForConditionalBranch(opType, ((PsiConditionalExpression)expr).getElseExpression())) return; } } } catch (IncorrectOperationException ignore) {} } if (opType == null) return; if (parent instanceof PsiReferenceExpression) { if (castTo instanceof PsiClassType && opType instanceof PsiPrimitiveType) return; //explicit boxing //Check accessibility if (opType instanceof PsiClassType) { final PsiReferenceExpression refExpression = (PsiReferenceExpression)parent; PsiElement element = refExpression.resolve(); if (!(element instanceof PsiMember)) return; PsiClass accessClass = ((PsiClassType)opType).resolve(); if (accessClass == null) return; if (!JavaPsiFacade.getInstance(parent.getProject()).getResolveHelper().isAccessible((PsiMember)element, typeCast, accessClass)) return; if (!isCastRedundantInRefExpression(refExpression, operand)) return; } } if (parent instanceof PsiConditionalExpression) { if (castTo instanceof PsiClassType && opType instanceof PsiPrimitiveType && opType != PsiType.NULL) { final PsiExpression thenExpression = ((PsiConditionalExpression)parent).getThenExpression(); final PsiExpression elseExpression = ((PsiConditionalExpression)parent).getElseExpression(); final PsiExpression opposite = PsiTreeUtil.isAncestor(thenExpression, typeCast, false) ? elseExpression : thenExpression; if (opposite != null && !(opposite.getType() instanceof PsiPrimitiveType) && !(PsiTypesUtil.getExpectedTypeByParent(parent) instanceof PsiPrimitiveType)) { return; } } if (operand instanceof PsiFunctionalExpression && !castTo.equals(PsiTypesUtil.getExpectedTypeByParent(parent))) { return; } } if (arrayAccessAtTheLeftSideOfAssignment(parent, typeCast)) { if (TypeConversionUtil.isAssignable(opType, castTo, false) && opType.getArrayDimensions() == castTo.getArrayDimensions()) { addToResults(typeCast); } } else { if (parent instanceof PsiInstanceOfExpression && opType instanceof PsiPrimitiveType) { return; } if (parent instanceof PsiForeachStatement) { final PsiClassType.ClassResolveResult castResolveResult = PsiUtil.resolveGenericsClassInType(opType); final PsiClass psiClass = castResolveResult.getElement(); if (psiClass != null) { final PsiClass iterableClass = JavaPsiFacade.getInstance(parent.getProject()).findClass(CommonClassNames.JAVA_LANG_ITERABLE, psiClass.getResolveScope()); if (iterableClass != null && InheritanceUtil.isInheritorOrSelf(psiClass, iterableClass, true)) { final PsiTypeParameter[] iterableTypeParameters = iterableClass.getTypeParameters(); if (iterableTypeParameters.length == 1) { final PsiType resultedParamType = TypeConversionUtil.getSuperClassSubstitutor(iterableClass, psiClass, castResolveResult.getSubstitutor()).substitute(iterableTypeParameters[0]); if (resultedParamType != null && TypeConversionUtil.isAssignable(((PsiForeachStatement)parent).getIterationParameter().getType(), resultedParamType)) { addToResults(typeCast); return; } } } } else { return; } } if (parent instanceof PsiThrowStatement) { final PsiClass thrownClass = PsiUtil.resolveClassInType(opType); if (InheritanceUtil.isInheritor(thrownClass, false, CommonClassNames.JAVA_LANG_RUNTIME_EXCEPTION)) { addToResults(typeCast); return; } if (InheritanceUtil.isInheritor(thrownClass, false, CommonClassNames.JAVA_LANG_THROWABLE)) { final PsiMethod method = PsiTreeUtil.getParentOfType(parent, PsiMethod.class); if (method != null) { for (PsiClassType thrownType : method.getThrowsList().getReferencedTypes()) { if (TypeConversionUtil.isAssignable(thrownType, opType, false)) { addToResults(typeCast); return; } } } } } if (parent instanceof PsiInstanceOfExpression) { //15.20.2. Type Comparison Operator instanceof: //If a cast (p15.16) of the RelationalExpression to the ReferenceType would be rejected as a compile-time error, //then the instanceof relational expression likewise produces a compile-time error. final PsiTypeElement checkTypeElement = ((PsiInstanceOfExpression)parent).getCheckType(); if (checkTypeElement != null && TypeConversionUtil.areTypesConvertible(opType, checkTypeElement.getType())) { addToResults(typeCast); } } else if (TypeConversionUtil.isAssignable(castTo, opType, false) && (expectedTypeByParent == null || TypeConversionUtil.isAssignable(expectedTypeByParent, opType, false))) { addToResults(typeCast); } } } private static boolean isApplicableForConditionalBranch(PsiType opType, PsiExpression thenExpression) { if (thenExpression != null) { final PsiType thenType = thenExpression.getType(); if (thenType != null && !TypeConversionUtil.isAssignable(opType, thenType)) { return false; } } return true; } private static boolean arrayAccessAtTheLeftSideOfAssignment(PsiElement parent, PsiElement element) { PsiAssignmentExpression assignment = PsiTreeUtil.getParentOfType(parent, PsiAssignmentExpression.class, false, PsiMember.class); if (assignment == null) return false; PsiExpression lExpression = assignment.getLExpression(); return lExpression instanceof PsiArrayAccessExpression && PsiTreeUtil.isAncestor(lExpression, parent, false) && !isIndexExpression(element, (PsiArrayAccessExpression)lExpression); } private static boolean isIndexExpression(PsiElement element, PsiArrayAccessExpression arrayAccessExpression) { if (PsiTreeUtil.isAncestor(arrayAccessExpression.getIndexExpression(), element, false)) { return true; } PsiExpression arrayExpression = arrayAccessExpression.getArrayExpression(); if (arrayExpression instanceof PsiArrayAccessExpression) { return isIndexExpression(element, (PsiArrayAccessExpression)arrayExpression); } return false; } } private static boolean isCastRedundantInRefExpression (final PsiReferenceExpression refExpression, final PsiExpression castOperand) { if (refExpression.getParent() instanceof PsiMethodCallExpression) return false; final PsiElement resolved = refExpression.resolve(); try { final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(refExpression.getProject()); final PsiExpression copyExpression = elementFactory.createExpressionFromText(refExpression.getText(), refExpression); if (copyExpression instanceof PsiReferenceExpression) { final PsiReferenceExpression copy = (PsiReferenceExpression)copyExpression; final PsiExpression qualifier = copy.getQualifierExpression(); if (qualifier != null) { qualifier.replace(castOperand); return copy.resolve() == resolved; } } } catch (IncorrectOperationException ignore) { } return false; } private static boolean isTypeCastSemantic(PsiTypeCastExpression typeCast) { PsiExpression operand = typeCast.getOperand(); if (operand == null) return false; if (isInPolymorphicCall(typeCast)) return true; PsiType opType = operand.getType(); PsiTypeElement typeElement = typeCast.getCastType(); if (typeElement == null) return false; PsiType castType = typeElement.getType(); if (castType instanceof PsiPrimitiveType) { if (opType instanceof PsiPrimitiveType) { return !opType.equals(castType); // let's suppose all not equal primitive casts are necessary } final PsiPrimitiveType unboxedOpType = PsiPrimitiveType.getUnboxedType(opType); if (unboxedOpType != null && !unboxedOpType.equals(castType) ) { return true; } } else if (castType instanceof PsiClassType && ((PsiClassType)castType).hasParameters()) { if (opType instanceof PsiClassType && ((PsiClassType)opType).isRaw()) return true; } final PsiExpression stripParenthesisOperand = PsiUtil.skipParenthesizedExprDown(operand); if (stripParenthesisOperand instanceof PsiFunctionalExpression) { if (isCastToSerializable(castType)) return true; } else if (stripParenthesisOperand instanceof PsiConditionalExpression) { final PsiExpression thenExpr = PsiUtil.skipParenthesizedExprDown(((PsiConditionalExpression)stripParenthesisOperand).getThenExpression()); final PsiExpression elseExpr = PsiUtil.skipParenthesizedExprDown(((PsiConditionalExpression)stripParenthesisOperand).getElseExpression()); if (thenExpr instanceof PsiFunctionalExpression || elseExpr instanceof PsiFunctionalExpression) { return true; } } PsiElement parent = typeCast.getParent(); while(parent instanceof PsiParenthesizedExpression) parent = parent.getParent(); if (parent instanceof PsiBinaryExpression) { PsiBinaryExpression expression = (PsiBinaryExpression)parent; PsiExpression firstOperand = expression.getLOperand(); PsiExpression otherOperand = expression.getROperand(); if (PsiTreeUtil.isAncestor(otherOperand, typeCast, false)) { PsiExpression temp = otherOperand; otherOperand = firstOperand; firstOperand = temp; } if (otherOperand != null && wrapperCastChangeSemantics(firstOperand, otherOperand, operand)) { return true; } } else if (parent instanceof PsiConditionalExpression) { if (opType instanceof PsiPrimitiveType && !(((PsiConditionalExpression)parent).getType() instanceof PsiPrimitiveType)) { if (PsiPrimitiveType.getUnboxedType(PsiTypesUtil.getExpectedTypeByParent(parent)) != null) { return true; } } } else if (parent instanceof PsiLocalVariable) { return ((PsiLocalVariable)parent).getTypeElement().isInferredType(); } return false; } private static boolean isCastToSerializable(PsiType castType) { return InheritanceUtil.isInheritor(castType, CommonClassNames.JAVA_IO_SERIALIZABLE); } private static boolean wrapperCastChangeSemantics(PsiExpression operand, PsiExpression otherOperand, PsiExpression toCast) { final boolean isPrimitiveComparisonWithCast; final boolean isPrimitiveComparisonWithoutCast; if (TypeConversionUtil.isPrimitiveAndNotNull(otherOperand.getType())) { // IDEA-111450: A primitive comparison requires one primitive operand and one primitive or wrapper operand. isPrimitiveComparisonWithCast = TypeConversionUtil.isPrimitiveAndNotNullOrWrapper(operand.getType()); isPrimitiveComparisonWithoutCast = TypeConversionUtil.isPrimitiveAndNotNullOrWrapper(toCast.getType()); } else { // We do not check whether `otherOperand` is a wrapper, because a reference-to-primitive cast has a // side effect regardless of whether we end up doing a primitive or reference comparison. isPrimitiveComparisonWithCast = TypeConversionUtil.isPrimitiveAndNotNull(operand.getType()); isPrimitiveComparisonWithoutCast = TypeConversionUtil.isPrimitiveAndNotNull(toCast.getType()); } // wrapper casted to primitive vs wrapper comparison return isPrimitiveComparisonWithCast != isPrimitiveComparisonWithoutCast; } // see http://download.java.net/jdk7/docs/api/java/lang/invoke/MethodHandle.html#sigpoly public static boolean isInPolymorphicCall(final PsiTypeCastExpression typeCast) { if (!PsiUtil.isLanguageLevel7OrHigher(typeCast)) return false; // return type final PsiExpression operand = typeCast.getOperand(); if (operand instanceof PsiMethodCallExpression) { if (isPolymorphicMethod((PsiMethodCallExpression)operand)) return true; } // argument type final PsiElement exprList = typeCast.getParent(); if (exprList instanceof PsiExpressionList) { final PsiElement methodCall = exprList.getParent(); if (methodCall instanceof PsiMethodCallExpression) { if (isPolymorphicMethod((PsiMethodCallExpression)methodCall)) return true; } } return false; } private static boolean isPolymorphicMethod(PsiMethodCallExpression expression) { final PsiElement method = expression.getMethodExpression().resolve(); return method instanceof PsiMethod && AnnotationUtil.isAnnotated((PsiMethod)method, CommonClassNames.JAVA_LANG_INVOKE_MH_POLYMORPHIC, 0); } }
/* * Copyright 2004,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.axis2.transport.jms; import org.apache.axiom.om.OMElement; import org.apache.axis2.AxisFault; import org.apache.axis2.description.Parameter; import org.apache.axis2.description.ParameterIncludeImpl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.jms.*; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import java.util.Hashtable; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Encapsulate a JMS Connection factory definition within an Axis2.xml * * JMS Connection Factory definitions, allows JNDI properties as well as other service * level parameters to be defined, and re-used by each service that binds to it * * When used for sending messages out, the JMSConnectionFactory'ies are able to cache * a Connection, Session or Producer */ public class JMSConnectionFactory { private static final Log log = LogFactory.getLog(JMSConnectionFactory.class); /** The name used for the connection factory definition within Axis2 */ private String name = null; /** The list of parameters from the axis2.xml definition */ private Hashtable<String, String> parameters = new Hashtable<String, String>(); /** The cached InitialContext reference */ private Context context = null; /** The JMS ConnectionFactory this definition refers to */ private ConnectionFactory conFactory = null; /** The shared JMS Connection for this JMS connection factory */ private Connection sharedConnection = null; /** The shared JMS Session for this JMS connection factory */ private Session sharedSession = null; /** The shared JMS MessageProducer for this JMS connection factory */ private MessageProducer sharedProducer = null; /** The Shared Destination */ private Destination sharedDestination = null; /** The shared JMS connection for this JMS connection factory */ private int cacheLevel = JMSConstants.CACHE_CONNECTION; private Map<Integer, Connection> sharedConnectionMap = new ConcurrentHashMap<Integer,Connection>(); private int maxSharedConnectionCount = 10; private int lastReturnedConnectionIndex = 0; /** * Digest a JMS CF definition from an axis2.xml 'Parameter' and construct * @param parameter the axis2.xml 'Parameter' that defined the JMS CF */ public JMSConnectionFactory(Parameter parameter) { this.name = parameter.getName(); ParameterIncludeImpl pi = new ParameterIncludeImpl(); try { pi.deserializeParameters((OMElement) parameter.getValue()); } catch (AxisFault axisFault) { handleException("Error reading parameters for JMS connection factory" + name, axisFault); } for (Object o : pi.getParameters()) { Parameter p = (Parameter) o; parameters.put(p.getName(), (String) p.getValue()); } digestCacheLevel(); try { context = new InitialContext(parameters); conFactory = JMSUtils.lookup(context, ConnectionFactory.class, parameters.get(JMSConstants.PARAM_CONFAC_JNDI_NAME)); if (parameters.get(JMSConstants.PARAM_DESTINATION) != null) { sharedDestination = JMSUtils.lookup(context, Destination.class, parameters.get(JMSConstants.PARAM_DESTINATION)); } log.info("JMS ConnectionFactory : " + name + " initialized"); } catch (NamingException e) { throw new AxisJMSException("Cannot acquire JNDI context, JMS Connection factory : " + parameters.get(JMSConstants.PARAM_CONFAC_JNDI_NAME) + " or default destination : " + parameters.get(JMSConstants.PARAM_DESTINATION) + " for JMS CF : " + name + " using : " + parameters, e); } setMaxSharedJMSConnectionsCount(); } /** * Digest, the cache value iff specified */ private void digestCacheLevel() { String key = JMSConstants.PARAM_CACHE_LEVEL; String val = parameters.get(key); if ("none".equalsIgnoreCase(val)) { this.cacheLevel = JMSConstants.CACHE_NONE; } else if ("connection".equalsIgnoreCase(val)) { this.cacheLevel = JMSConstants.CACHE_CONNECTION; } else if ("session".equals(val)){ this.cacheLevel = JMSConstants.CACHE_SESSION; } else if ("producer".equals(val)) { this.cacheLevel = JMSConstants.CACHE_PRODUCER; } else if ("consumer".equals(val)) { this.cacheLevel = JMSConstants.CACHE_CONSUMER; } else if (val != null) { throw new AxisJMSException("Invalid cache level : " + val + " for JMS CF : " + name); } } private void setMaxSharedJMSConnectionsCount(){ if(parameters.get(JMSConstants.MAX_JMS_CONNECTIONS_) != null){ String maxConnectionCount = parameters.get(JMSConstants.MAX_JMS_CONNECTIONS_); try { int maxCount = Integer.parseInt(maxConnectionCount); if(maxCount > 0){ this.maxSharedConnectionCount = maxCount; log.info("---- Max Shared JMS Connection Count Set to "+ maxSharedConnectionCount); } } catch (NumberFormatException e) { this.maxSharedConnectionCount = 10; log.error("Error in setting up the max shared jms connection count. Setting it to default value 10 ", e); } } } /** * Close all connections, sessions etc.. and stop this connection factory */ public synchronized void stop() { if (sharedConnection != null) { try { sharedConnection.close(); } catch (JMSException e) { log.warn("Error shutting down connection factory : " + name, e); } } if (context != null) { try { context.close(); } catch (NamingException e) { log.warn("Error while closing the InitialContext of factory : " + name, e); } } } /** * Return the name assigned to this JMS CF definition * @return name of the JMS CF */ public String getName() { return name; } /** * The list of properties (including JNDI and non-JNDI) * @return properties defined on the JMS CF */ public Hashtable<String, String> getParameters() { return parameters; } /** * Get cached InitialContext * @return cache InitialContext */ public Context getContext() { return context; } /** * Cache level applicable for this JMS CF * @return applicable cache level */ public int getCacheLevel() { return cacheLevel; } /** * Get the shared Destination - if defined * @return */ public Destination getSharedDestination() { return sharedDestination; } /** * Lookup a Destination using this JMS CF definitions and JNDI name * @param destinationName JNDI name of the Destionation * @param destinationType looking up destination type * @return JMS Destination for the given JNDI name or null */ public Destination getDestination(String destinationName, String destinationType) { try { return JMSUtils.lookupDestination(context, destinationName, destinationType); } catch (NamingException e) { handleException("Error looking up the JMS destination with name " + destinationName + " of type " + destinationType, e); } // never executes but keeps the compiler happy return null; } /** * Get the reply Destination from the PARAM_REPLY_DESTINATION parameter * @return reply destination defined in the JMS CF */ public String getReplyToDestination() { return parameters.get(JMSConstants.PARAM_REPLY_DESTINATION); } /** * Get the reply destination type from the PARAM_REPLY_DEST_TYPE parameter * @return reply destination defined in the JMS CF */ public String getReplyDestinationType() { return parameters.get(JMSConstants.PARAM_REPLY_DEST_TYPE) != null ? parameters.get(JMSConstants.PARAM_REPLY_DEST_TYPE) : JMSConstants.DESTINATION_TYPE_GENERIC; } private void handleException(String msg, Exception e) { log.error(msg, e); throw new AxisJMSException(msg, e); } /** * Should the JMS 1.1 API be used? - defaults to yes * @return true, if JMS 1.1 api should be used */ public boolean isJmsSpec11() { return parameters.get(JMSConstants.PARAM_JMS_SPEC_VER) == null || "1.1".equals(parameters.get(JMSConstants.PARAM_JMS_SPEC_VER)); } /** * Return the type of the JMS CF Destination * @return TRUE if a Queue, FALSE for a Topic and NULL for a JMS 1.1 Generic Destination */ public Boolean isQueue() { if (parameters.get(JMSConstants.PARAM_CONFAC_TYPE) == null && parameters.get(JMSConstants.PARAM_DEST_TYPE) == null) { return null; } if (parameters.get(JMSConstants.PARAM_CONFAC_TYPE) != null) { if ("queue".equalsIgnoreCase(parameters.get(JMSConstants.PARAM_CONFAC_TYPE))) { return true; } else if ("topic".equalsIgnoreCase(parameters.get(JMSConstants.PARAM_CONFAC_TYPE))) { return false; } else { throw new AxisJMSException("Invalid " + JMSConstants.PARAM_CONFAC_TYPE + " : " + parameters.get(JMSConstants.PARAM_CONFAC_TYPE) + " for JMS CF : " + name); } } else { if ("queue".equalsIgnoreCase(parameters.get(JMSConstants.PARAM_DEST_TYPE))) { return true; } else if ("topic".equalsIgnoreCase(parameters.get(JMSConstants.PARAM_DEST_TYPE))) { return false; } else { throw new AxisJMSException("Invalid " + JMSConstants.PARAM_DEST_TYPE + " : " + parameters.get(JMSConstants.PARAM_DEST_TYPE) + " for JMS CF : " + name); } } } /** * Is a session transaction requested from users of this JMS CF? * @return session transaction required by the clients of this? */ private boolean isSessionTransacted() { return parameters.get(JMSConstants.PARAM_SESSION_TRANSACTED) != null && Boolean.valueOf(parameters.get(JMSConstants.PARAM_SESSION_TRANSACTED)); } private boolean isDurable() { if (parameters.get(JMSConstants.PARAM_SUB_DURABLE) != null) { return Boolean.valueOf(parameters.get(JMSConstants.PARAM_SUB_DURABLE)); } return false; } private String getClientId() { return parameters.get(JMSConstants.PARAM_DURABLE_SUB_CLIENT_ID); } /** * Create a new Connection * @return a new Connection */ private Connection createConnection() { Connection connection = null; try { connection = JMSUtils.createConnection( conFactory, parameters.get(JMSConstants.PARAM_JMS_USERNAME), parameters.get(JMSConstants.PARAM_JMS_PASSWORD), isJmsSpec11(), isQueue(), isDurable(), getClientId()); if (log.isDebugEnabled()) { log.debug("New JMS Connection from JMS CF : " + name + " created"); } } catch (JMSException e) { handleException("Error acquiring a Connection from the JMS CF : " + name + " using properties : " + parameters, e); } return connection; } /** * Create a new Session * @param connection Connection to use * @return A new Session */ private Session createSession(Connection connection) { try { if (log.isDebugEnabled()) { log.debug("Creating a new JMS Session from JMS CF : " + name); } return JMSUtils.createSession( connection, isSessionTransacted(), Session.AUTO_ACKNOWLEDGE, isJmsSpec11(), isQueue()); } catch (JMSException e) { handleException("Error creating JMS session from JMS CF : " + name, e); } return null; } /** * Create a new MessageProducer * @param session Session to be used * @param destination Destination to be used * @return a new MessageProducer */ private MessageProducer createProducer(Session session, Destination destination) { try { if (log.isDebugEnabled()) { log.debug("Creating a new JMS MessageProducer from JMS CF : " + name); } return JMSUtils.createProducer( session, destination, isQueue(), isJmsSpec11()); } catch (JMSException e) { handleException("Error creating JMS producer from JMS CF : " + name,e); } return null; } /** * Get a new Connection or shared Connection from this JMS CF * @return new or shared Connection from this JMS CF */ public Connection getConnection() { if (cacheLevel > JMSConstants.CACHE_NONE) { return getSharedConnection(); } else { return createConnection(); } } /** * Get a new Session or shared Session from this JMS CF * @param connection the Connection to be used * @return new or shared Session from this JMS CF */ public Session getSession(Connection connection) { if (cacheLevel > JMSConstants.CACHE_CONNECTION) { return getSharedSession(); } else { return createSession((connection == null ? getConnection() : connection)); } } /** * Get a new MessageProducer or shared MessageProducer from this JMS CF * @param connection the Connection to be used * @param session the Session to be used * @param destination the Destination to bind MessageProducer to * @return new or shared MessageProducer from this JMS CF */ public MessageProducer getMessageProducer( Connection connection, Session session, Destination destination) { if (cacheLevel > JMSConstants.CACHE_SESSION) { return getSharedProducer(); } else { return createProducer((session == null ? getSession(connection) : session), destination); } } /** * Get a new Connection or shared Connection from this JMS CF * @return new or shared Connection from this JMS CF */ private synchronized Connection getSharedConnection() { Connection connection = sharedConnectionMap.get(lastReturnedConnectionIndex); if (connection == null) { connection = createConnection(); sharedConnectionMap.put(lastReturnedConnectionIndex, connection); } lastReturnedConnectionIndex++; if (lastReturnedConnectionIndex >= maxSharedConnectionCount) { lastReturnedConnectionIndex = 0; } return connection; } /** * Get a shared Session from this JMS CF * @return shared Session from this JMS CF */ private synchronized Session getSharedSession() { if (sharedSession == null) { sharedSession = createSession(getSharedConnection()); if (log.isDebugEnabled()) { log.debug("Created shared JMS Session for JMS CF : " + name); } } return sharedSession; } /** * Get a shared MessageProducer from this JMS CF * @return shared MessageProducer from this JMS CF */ private synchronized MessageProducer getSharedProducer() { if (sharedProducer == null) { sharedProducer = createProducer(getSharedSession(), sharedDestination); if (log.isDebugEnabled()) { log.debug("Created shared JMS MessageConsumer for JMS CF : " + name); } } return sharedProducer; } }
package network; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.ServerSocket; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Observable; import java.util.Observer; import java.util.SortedSet; import java.util.TreeSet; import lang.Lang; import network.message.FindMyselfMessage; import network.message.Message; import network.message.MessageFactory; import settings.Settings; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import utils.ObserverMessage; import controller.Controller; public class Network extends Observable implements ConnectionCallback { private static final Logger LOGGER = LogManager.getLogger(Network.class); public static final int MAINNET_PORT = 9084; public static final int TESTNET_PORT = 4809; private static final int MAX_HANDLED_MESSAGES_SIZE = 10000; private ConnectionCreator creator; private ConnectionAcceptor acceptor; private List<Peer> connectedPeers; private SortedSet<String> handledMessages; private boolean run; public Network() { this.connectedPeers = new ArrayList<Peer>(); this.run = true; this.start(); } private void start() { this.handledMessages = Collections.synchronizedSortedSet(new TreeSet<String>()); // START ConnectionCreator THREAD creator = new ConnectionCreator(this); creator.start(); // START ConnectionAcceptor THREAD acceptor = new ConnectionAcceptor(this); acceptor.start(); } @Override public void onConnect(Peer peer) { LOGGER.debug(Lang.getInstance().translate("Connection successful: ") + peer.getAddress()); // ADD TO CONNECTED PEERS synchronized (this.connectedPeers) { this.connectedPeers.add(peer); } // ADD TO WHITELIST PeerManager.getInstance().addPeer(peer); // PASS TO CONTROLLER Controller.getInstance().onConnect(peer); // NOTIFY OBSERVERS this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.ADD_PEER_TYPE, peer)); this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.LIST_PEER_TYPE, this.connectedPeers)); } @Override public void onDisconnect(Peer peer) { LOGGER.info(Lang.getInstance().translate("Connection closed: ") + peer.getAddress()); // REMOVE FROM CONNECTED PEERS synchronized (this.connectedPeers) { this.connectedPeers.remove(peer); } // PASS TO CONTROLLER Controller.getInstance().onDisconnect(peer); // CLOSE CONNECTION IF STILL ACTIVE peer.close(); // NOTIFY OBSERVERS this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.REMOVE_PEER_TYPE, peer)); this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.LIST_PEER_TYPE, this.connectedPeers)); } @Override public void onError(Peer peer, String error) { LOGGER.warn(Lang.getInstance().translate("Connection error: ") + peer.getAddress() + " : " + error); // REMOVE FROM CONNECTED PEERS synchronized (this.connectedPeers) { this.connectedPeers.remove(peer); } // ADD TO BLACKLIST // PeerManager.getInstance().blacklistPeer(peer); // PASS TO CONTROLLER Controller.getInstance().onError(peer); // CLOSE CONNECTION IF STILL ACTIVE peer.close(); // NOTIFY OBSERVERS this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.REMOVE_PEER_TYPE, peer)); this.setChanged(); this.notifyObservers(new ObserverMessage(ObserverMessage.LIST_PEER_TYPE, this.connectedPeers)); } @Override public boolean isConnectedTo(InetAddress address) { try { synchronized (this.connectedPeers) { // FOR ALL connectedPeers for (Peer connectedPeer : this.connectedPeers) { // CHECK IF ADDRESS IS THE SAME if (address.equals(connectedPeer.getAddress())) return true; } } } catch (Exception e) { // CONCURRENCY ERROR } return false; } @Override public boolean isConnectedTo(Peer peer) { return this.isConnectedTo(peer.getAddress()); } @Override public List<Peer> getActiveConnections() { return this.connectedPeers; } private void addHandledMessage(byte[] hash) { try { synchronized (this.handledMessages) { // CHECK IF LIST IS FULL if (this.handledMessages.size() > MAX_HANDLED_MESSAGES_SIZE) this.handledMessages.remove(this.handledMessages.first()); this.handledMessages.add(new String(hash)); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } } @Override public void onMessage(Message message) { // CHECK IF WE ARE STILL PROCESSING MESSAGES if (!this.run) return; // ONLY HANDLE BLOCK AND TRANSACTION MESSAGES ONCE if (message.getType() == Message.TRANSACTION_TYPE || message.getType() == Message.BLOCK_TYPE) { synchronized (this.handledMessages) { // CHECK IF NOT HANDLED ALREADY if (this.handledMessages.contains(new String(message.getHash()))) return; // ADD TO HANDLED MESSAGES this.addHandledMessage(message.getHash()); } } switch (message.getType()) { // PING case Message.PING_TYPE: // CREATE PING Message response = MessageFactory.getInstance().createPingMessage(); // SET ID response.setId(message.getId()); // SEND BACK TO SENDER message.getSender().sendMessage(response); break; // GETPEERS case Message.GET_PEERS_TYPE: // CREATE NEW PEERS MESSAGE WITH PEERS Message answer = MessageFactory.getInstance().createPeersMessage(PeerManager.getInstance().getBestPeers()); answer.setId(message.getId()); // SEND TO SENDER message.getSender().sendMessage(answer); break; case Message.FIND_MYSELF_TYPE: FindMyselfMessage findMyselfMessage = (FindMyselfMessage) message; if (Arrays.equals(findMyselfMessage.getFoundMyselfID(), Controller.getInstance().getFoundMyselfID())) { LOGGER.info(Lang.getInstance().translate("Connected to self. Disconnection.")); message.getSender().close(); } break; // SEND TO CONTROLLER default: Controller.getInstance().onMessage(message); break; } } public void broadcast(Message message, List<Peer> exclude) { LOGGER.trace(Lang.getInstance().translate("Broadcasting") + " message type " + message.getType()); try { synchronized (this.connectedPeers) { for (Peer peer : this.connectedPeers) { // Check exclusion list if (exclude.contains(peer)) continue; peer.sendMessage(message); } } } catch (Exception e) { // Iterator fast-fail due to change in connectedPeers } LOGGER.trace(Lang.getInstance().translate("Broadcasting end") + " message type " + message.getType()); } @Override public void addObserver(Observer o) { super.addObserver(o); // SEND CONNECTEDPEERS ON REGISTER o.update(this, new ObserverMessage(ObserverMessage.LIST_PEER_TYPE, this.connectedPeers)); } public static boolean isPortAvailable(int port) { try { ServerSocket socket = new ServerSocket(port); socket.close(); return true; } catch (Exception e) { return false; } } public void stop() { this.run = false; this.acceptor.halt(); this.creator.halt(); while (this.connectedPeers.size() > 0) { try { this.connectedPeers.get(0).close(); } catch (Exception e) { LOGGER.debug(e.getMessage(), e); } } } public static boolean isHostLocalAddress(InetAddress address) { // easy address checks first if (address.isLoopbackAddress() || address.isAnyLocalAddress()) return true; return Settings.getInstance().isLocalAddress(address); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.07 at 08:01:35 PM IST // package com.mozu.qbintegration.model.qbmodel.allgen; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for InventoryAdjustmentQueryRqType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="InventoryAdjustmentQueryRqType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;group ref="{}InventoryAdjustmentQuery"/> * &lt;/sequence> * &lt;attribute name="requestID" type="{}STRTYPE" /> * &lt;attribute name="metaData" default="NoMetaData"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;enumeration value="NoMetaData"/> * &lt;enumeration value="MetaDataOnly"/> * &lt;enumeration value="MetaDataAndResponseData"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;attribute name="iterator"> * &lt;simpleType> * &lt;restriction base="{}STRTYPE"> * &lt;enumeration value="Start"/> * &lt;enumeration value="Continue"/> * &lt;enumeration value="Stop"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;attribute name="iteratorID" type="{}STRTYPE" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "InventoryAdjustmentQueryRqType", propOrder = { "txnID", "refNumber", "refNumberCaseSensitive", "maxReturned", "modifiedDateRangeFilter", "txnDateRangeFilter", "entityFilter", "accountFilter", "itemFilter", "refNumberFilter", "refNumberRangeFilter", "includeLineItems", "includeRetElement", "ownerID" }) public class InventoryAdjustmentQueryRqType { @XmlElement(name = "TxnID") protected List<String> txnID; @XmlElement(name = "RefNumber") protected List<String> refNumber; @XmlElement(name = "RefNumberCaseSensitive") protected List<String> refNumberCaseSensitive; @XmlElement(name = "MaxReturned") protected BigInteger maxReturned; @XmlElement(name = "ModifiedDateRangeFilter") protected ModifiedDateRangeFilter modifiedDateRangeFilter; @XmlElement(name = "TxnDateRangeFilter") protected TxnDateRangeFilter txnDateRangeFilter; @XmlElement(name = "EntityFilter") protected EntityFilter entityFilter; @XmlElement(name = "AccountFilter") protected AccountFilter accountFilter; @XmlElement(name = "ItemFilter") protected ItemFilter itemFilter; @XmlElement(name = "RefNumberFilter") protected RefNumberFilter refNumberFilter; @XmlElement(name = "RefNumberRangeFilter") protected RefNumberRangeFilter refNumberRangeFilter; @XmlElement(name = "IncludeLineItems") protected String includeLineItems; @XmlElement(name = "IncludeRetElement") protected List<String> includeRetElement; @XmlElement(name = "OwnerID") protected List<String> ownerID; @XmlAttribute(name = "requestID") protected String requestID; @XmlAttribute(name = "metaData") protected String metaData; @XmlAttribute(name = "iterator") protected String iterator; @XmlAttribute(name = "iteratorID") protected String iteratorID; /** * Gets the value of the txnID property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the txnID property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTxnID().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getTxnID() { if (txnID == null) { txnID = new ArrayList<String>(); } return this.txnID; } /** * Gets the value of the refNumber property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the refNumber property. * * <p> * For example, to add a new item, do as follows: * <pre> * getRefNumber().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getRefNumber() { if (refNumber == null) { refNumber = new ArrayList<String>(); } return this.refNumber; } /** * Gets the value of the refNumberCaseSensitive property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the refNumberCaseSensitive property. * * <p> * For example, to add a new item, do as follows: * <pre> * getRefNumberCaseSensitive().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getRefNumberCaseSensitive() { if (refNumberCaseSensitive == null) { refNumberCaseSensitive = new ArrayList<String>(); } return this.refNumberCaseSensitive; } /** * Gets the value of the maxReturned property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getMaxReturned() { return maxReturned; } /** * Sets the value of the maxReturned property. * * @param value * allowed object is * {@link BigInteger } * */ public void setMaxReturned(BigInteger value) { this.maxReturned = value; } /** * Gets the value of the modifiedDateRangeFilter property. * * @return * possible object is * {@link ModifiedDateRangeFilter } * */ public ModifiedDateRangeFilter getModifiedDateRangeFilter() { return modifiedDateRangeFilter; } /** * Sets the value of the modifiedDateRangeFilter property. * * @param value * allowed object is * {@link ModifiedDateRangeFilter } * */ public void setModifiedDateRangeFilter(ModifiedDateRangeFilter value) { this.modifiedDateRangeFilter = value; } /** * Gets the value of the txnDateRangeFilter property. * * @return * possible object is * {@link TxnDateRangeFilter } * */ public TxnDateRangeFilter getTxnDateRangeFilter() { return txnDateRangeFilter; } /** * Sets the value of the txnDateRangeFilter property. * * @param value * allowed object is * {@link TxnDateRangeFilter } * */ public void setTxnDateRangeFilter(TxnDateRangeFilter value) { this.txnDateRangeFilter = value; } /** * Gets the value of the entityFilter property. * * @return * possible object is * {@link EntityFilter } * */ public EntityFilter getEntityFilter() { return entityFilter; } /** * Sets the value of the entityFilter property. * * @param value * allowed object is * {@link EntityFilter } * */ public void setEntityFilter(EntityFilter value) { this.entityFilter = value; } /** * Gets the value of the accountFilter property. * * @return * possible object is * {@link AccountFilter } * */ public AccountFilter getAccountFilter() { return accountFilter; } /** * Sets the value of the accountFilter property. * * @param value * allowed object is * {@link AccountFilter } * */ public void setAccountFilter(AccountFilter value) { this.accountFilter = value; } /** * Gets the value of the itemFilter property. * * @return * possible object is * {@link ItemFilter } * */ public ItemFilter getItemFilter() { return itemFilter; } /** * Sets the value of the itemFilter property. * * @param value * allowed object is * {@link ItemFilter } * */ public void setItemFilter(ItemFilter value) { this.itemFilter = value; } /** * Gets the value of the refNumberFilter property. * * @return * possible object is * {@link RefNumberFilter } * */ public RefNumberFilter getRefNumberFilter() { return refNumberFilter; } /** * Sets the value of the refNumberFilter property. * * @param value * allowed object is * {@link RefNumberFilter } * */ public void setRefNumberFilter(RefNumberFilter value) { this.refNumberFilter = value; } /** * Gets the value of the refNumberRangeFilter property. * * @return * possible object is * {@link RefNumberRangeFilter } * */ public RefNumberRangeFilter getRefNumberRangeFilter() { return refNumberRangeFilter; } /** * Sets the value of the refNumberRangeFilter property. * * @param value * allowed object is * {@link RefNumberRangeFilter } * */ public void setRefNumberRangeFilter(RefNumberRangeFilter value) { this.refNumberRangeFilter = value; } /** * Gets the value of the includeLineItems property. * * @return * possible object is * {@link String } * */ public String getIncludeLineItems() { return includeLineItems; } /** * Sets the value of the includeLineItems property. * * @param value * allowed object is * {@link String } * */ public void setIncludeLineItems(String value) { this.includeLineItems = value; } /** * Gets the value of the includeRetElement property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the includeRetElement property. * * <p> * For example, to add a new item, do as follows: * <pre> * getIncludeRetElement().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getIncludeRetElement() { if (includeRetElement == null) { includeRetElement = new ArrayList<String>(); } return this.includeRetElement; } /** * Gets the value of the ownerID property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the ownerID property. * * <p> * For example, to add a new item, do as follows: * <pre> * getOwnerID().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getOwnerID() { if (ownerID == null) { ownerID = new ArrayList<String>(); } return this.ownerID; } /** * Gets the value of the requestID property. * * @return * possible object is * {@link String } * */ public String getRequestID() { return requestID; } /** * Sets the value of the requestID property. * * @param value * allowed object is * {@link String } * */ public void setRequestID(String value) { this.requestID = value; } /** * Gets the value of the metaData property. * * @return * possible object is * {@link String } * */ public String getMetaData() { if (metaData == null) { return "NoMetaData"; } else { return metaData; } } /** * Sets the value of the metaData property. * * @param value * allowed object is * {@link String } * */ public void setMetaData(String value) { this.metaData = value; } /** * Gets the value of the iterator property. * * @return * possible object is * {@link String } * */ public String getIterator() { return iterator; } /** * Sets the value of the iterator property. * * @param value * allowed object is * {@link String } * */ public void setIterator(String value) { this.iterator = value; } /** * Gets the value of the iteratorID property. * * @return * possible object is * {@link String } * */ public String getIteratorID() { return iteratorID; } /** * Sets the value of the iteratorID property. * * @param value * allowed object is * {@link String } * */ public void setIteratorID(String value) { this.iteratorID = value; } }
/* * Copyright (C) 2005-2008 Jive Software, 2022 Ignite Realtime Foundation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.pubsub; import org.dom4j.Element; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.openfire.pep.PEPServiceManager; import org.jivesoftware.util.SAXReaderUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; import java.io.Serializable; import java.util.Date; import java.util.Objects; /** * A published item to a node. Once an item was published to a node, node subscribers will be * notified of the new published item. The item publisher may be allowed to delete published * items. After a published item was deleted node subscribers will get an event notification.<p> * * Published items may be persisted to the database depending on the node configuration. * Actually, even when the node is configured to not persist items the last published * item is going to be persisted to the database. The reason for this is that the node * may need to send the last published item to new subscribers. * * @author Matt Tucker */ public class PublishedItem implements Serializable { private static final Logger log = LoggerFactory.getLogger(PublishedItem.class); private static final long serialVersionUID = 7012925993623144574L; /** * JID of the entity that published the item to the node. This is the full JID * of the publisher. */ private JID publisher; /** * The node where the item was published. */ private volatile transient LeafNode node; /** * The id for the node where the item was published. */ private String nodeId; /** * The id for the service hosting the node for this item */ private String serviceId; /** * ID that uniquely identifies the published item in the node. */ private String id; /** * The datetime when the items was published. */ private Date creationDate; /** * The optional payload is included when publishing the item. This value * is created from the payload XML and cached as/when needed. */ private volatile transient Element payload; /** * XML representation of the payload (for serialization) */ private String payloadXML; /** * Creates a published item * @param node The node the published item is created in * @param publisher The JID of the account creating the item * @param id The unique ID of the item * @param creationDate The date it was created */ PublishedItem(LeafNode node, JID publisher, String id, Date creationDate) { this.node = node; this.nodeId = node.getUniqueIdentifier().getNodeId(); this.serviceId = node.getUniqueIdentifier().getServiceIdentifier().getServiceId(); this.publisher = publisher; this.id = id; this.creationDate = creationDate; } /** * Returns the id for the {@link LeafNode} where this item was published. * * @return the ID for the leaf node where this item was published. */ public String getNodeID() { return nodeId; } /** * Returns the {@link LeafNode} where this item was published. * * @return the leaf node where this item was published. */ public LeafNode getNode() { if (node == null) { synchronized (this) { if (node == null) { if (XMPPServer.getInstance().getPubSubModule().getServiceID().equals(serviceId)) { node = (LeafNode) XMPPServer.getInstance().getPubSubModule().getNode(nodeId); } else { PEPServiceManager serviceMgr = XMPPServer.getInstance().getIQPEPHandler().getServiceManager(); JID service = new JID( serviceId ); node = serviceMgr.hasCachedService(service) ? (LeafNode) serviceMgr.getPEPService(service).getNode(nodeId) : null; } } } } return node; } /** * Returns the ID that uniquely identifies the published item in the node. * * @return the ID that uniquely identifies the published item in the node. */ public String getID() { return id; } /** * Returns the JID of the entity that published the item to the node. * * @return the JID of the entity that published the item to the node. */ public JID getPublisher() { return publisher; } /** * Returns the datetime when the items was published. * * @return the datetime when the items was published. */ public Date getCreationDate() { return creationDate; } /** * Returns the payload included when publishing the item. A published item may or may not * have a payload. Transient nodes that are configured to not broadcast payloads may allow * published items to have no payload. * * @return the payload included when publishing the item or {@code null} if none was found. */ public Element getPayload() { if (payload == null && payloadXML != null) { synchronized (this) { if (payload == null) { // payload initialized as XML string from DB try { payload = SAXReaderUtil.readRootElement(payloadXML); } catch (Exception ex) { log.error("Failed to parse payload XML", ex); if (ex instanceof InterruptedException) { Thread.currentThread().interrupt(); } } } } } return payload; } /** * Returns a textual representation of the payload or {@code null} if no payload * was specified with the item. * * @return a textual representation of the payload or null if no payload was specified * with the item. */ public String getPayloadXML() { return payloadXML; } /** * Sets the payload included when publishing the item. A published item may or may not * have a payload. Transient nodes that are configured to not broadcast payloads may allow * published items to have no payload. * * @param payloadXML the payload included when publishing the item or {@code null} * if none was found. */ void setPayloadXML(String payloadXML) { this.payloadXML = payloadXML; this.payload = null; // will be recreated only if needed } /** * Sets the payload included when publishing the item. A published item may or may not * have a payload. Transient nodes that are configured to not broadcast payloads may allow * published items to have no payload. * * @param payload the payload included when publishing the item or {@code null} * if none was found. */ void setPayload(Element payload) { this.payload = payload; // Update XML representation of the payload if (payload == null) { payloadXML = null; } else { payloadXML = payload.asXML(); } } /** * Returns true if payload contains the specified keyword. If the item has no payload * or keyword is {@code null} then return true. * * @param keyword the keyword to look for in the payload. * @return true if payload contains the specified keyword. */ boolean containsKeyword(String keyword) { if (getPayloadXML() == null || keyword == null) { return true; } return payloadXML.contains(keyword); } /** * Returns true if the user that is trying to delete an item is allowed to delete it. * Only the publisher or node admins (i.e. owners and sysadmins) are allowed to delete items. * * @param user the full JID of the user trying to delete the item. * @return true if the user that is trying to delete an item is allowed to delete it. */ public boolean canDelete(JID user) { return publisher.equals(user) || publisher.toBareJID().equals(user.toBareJID()) || getNode().isAdmin(user); } /** * Returns a value that uniquely identifies this published item in the XMPP domain. * * @return Unique identifier for this item */ public UniqueIdentifier getUniqueIdentifier() { return getUniqueIdentifier( getNode(), id ); } /** * Returns a value that uniquely identifies this published item in the XMPP domain. * * @param node Node for the published item * @param itemId Id for the published item (unique within the node) * @return Unique identifier for this item */ public static UniqueIdentifier getUniqueIdentifier(LeafNode node, String itemId) { return getUniqueIdentifier( node.getUniqueIdentifier().getServiceIdentifier().getServiceId(), node.getUniqueIdentifier().getNodeId(), itemId ); } /** * Returns a value that uniquely identifies this published item in the XMPP domain. * * @param serviceId Id of the service that contains the node. * @param nodeId Node id for the published item * @param itemId Id for the published item (unique within the node) * @return Unique identifier for this item */ public static UniqueIdentifier getUniqueIdentifier(String serviceId, String nodeId, String itemId) { return new UniqueIdentifier( serviceId, nodeId, itemId ); } /** * A unique identifier for an item, in context of all nodes of all services in the system. * * The properties that uniquely identify an item are its node, and its itemId. */ public final static class UniqueIdentifier implements Serializable { private final String serviceId; private final String nodeId; private final String itemId; public UniqueIdentifier( final String serviceId, final String nodeId, final String itemId ) { if ( serviceId == null ) { throw new IllegalArgumentException( "Argument 'serviceId' cannot be null." ); } if ( nodeId == null ) { throw new IllegalArgumentException( "Argument 'nodeId' cannot be null." ); } if ( itemId == null ) { throw new IllegalArgumentException( "Argument 'itemId' cannot be null." ); } this.serviceId = serviceId; this.nodeId = nodeId; this.itemId = itemId; } public UniqueIdentifier( final Node.UniqueIdentifier nodeIdentifier, final String itemId ) { if ( nodeIdentifier == null ) { throw new IllegalArgumentException( "Argument 'nodeIdentifier' cannot be null." ); } if ( itemId == null ) { throw new IllegalArgumentException( "Argument 'itemId' cannot be null." ); } this.serviceId = nodeIdentifier.getServiceIdentifier().getServiceId(); this.nodeId = nodeIdentifier.getNodeId(); this.itemId = itemId; } public PubSubService.UniqueIdentifier getServiceIdentifier() { return new PubSubService.UniqueIdentifier( serviceId ); } public Node.UniqueIdentifier getNodeIdentifier() { return new Node.UniqueIdentifier( serviceId, nodeId ); } public String getItemId() { return itemId; }; @Override public boolean equals( final Object o ) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } final UniqueIdentifier that = (UniqueIdentifier) o; return serviceId.equals(that.serviceId) && nodeId.equals(that.nodeId) && itemId.equals(that.itemId); } @Override public int hashCode() { return Objects.hash(serviceId, nodeId, itemId); } @Override public String toString() { return "UniqueIdentifier{" + "serviceId='" + serviceId + '\'' + ", nodeId='" + nodeId + '\'' + ", itemId='" + itemId + '\'' + '}'; } } }
package com.mapswithme.maps.routing; import android.app.Activity; import android.content.Context; import android.content.DialogInterface; import androidx.annotation.DimenRes; import androidx.annotation.IntRange; import androidx.annotation.MainThread; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.fragment.app.FragmentActivity; import androidx.core.util.Pair; import androidx.appcompat.app.AlertDialog; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.view.View; import android.widget.TextView; import com.mapswithme.maps.Framework; import com.mapswithme.maps.MwmApplication; import com.mapswithme.maps.R; import com.mapswithme.maps.bookmarks.data.FeatureId; import com.mapswithme.maps.bookmarks.data.MapObject; import com.mapswithme.maps.downloader.MapManager; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.maps.taxi.TaxiInfo; import com.mapswithme.maps.taxi.TaxiInfoError; import com.mapswithme.maps.taxi.TaxiManager; import com.mapswithme.util.Config; import com.mapswithme.util.ConnectionState; import com.mapswithme.util.NetworkPolicy; import com.mapswithme.util.StringUtils; import com.mapswithme.util.Utils; import com.mapswithme.util.concurrency.UiThread; import com.mapswithme.util.log.Logger; import com.mapswithme.util.log.LoggerFactory; import com.mapswithme.util.statistics.AlohaHelper; import com.mapswithme.util.statistics.Statistics; import java.util.Calendar; import java.util.concurrent.TimeUnit; import static com.mapswithme.util.statistics.Statistics.EventName.ROUTING_POINT_ADD; import static com.mapswithme.util.statistics.Statistics.EventName.ROUTING_POINT_REMOVE; @androidx.annotation.UiThread public class RoutingController implements TaxiManager.TaxiListener { private static final String TAG = RoutingController.class.getSimpleName(); private enum State { NONE, PREPARE, NAVIGATION } enum BuildState { NONE, BUILDING, BUILT, ERROR } public interface Container { FragmentActivity getActivity(); void showSearch(); void showRoutePlan(boolean show, @Nullable Runnable completionListener); void showNavigation(boolean show); void showDownloader(boolean openDownloaded); void updateMenu(); void onTaxiInfoReceived(@NonNull TaxiInfo info); void onTaxiError(@NonNull TaxiManager.ErrorCode code); void onNavigationCancelled(); void onNavigationStarted(); void onAddedStop(); void onRemovedStop(); void onBuiltRoute(); void onDrivingOptionsWarning(); boolean isSubwayEnabled(); void onCommonBuildError(int lastResultCode, @NonNull String[] lastMissingMaps); void onDrivingOptionsBuildError(); /** * @param progress progress to be displayed. * */ void updateBuildProgress(@IntRange(from = 0, to = 100) int progress, @Framework.RouterType int router); void onStartRouteBuilding(); } private static final int NO_WAITING_POI_PICK = -1; private static final RoutingController sInstance = new RoutingController(); private final Logger mLogger = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.ROUTING); @Nullable private Container mContainer; private BuildState mBuildState = BuildState.NONE; private State mState = State.NONE; @RoutePointInfo.RouteMarkType private int mWaitingPoiPickType = NO_WAITING_POI_PICK; private int mLastBuildProgress; @Framework.RouterType private int mLastRouterType; private boolean mHasContainerSavedState; private boolean mContainsCachedResult; private int mLastResultCode; private String[] mLastMissingMaps; @Nullable private RoutingInfo mCachedRoutingInfo; @Nullable private TransitRouteInfo mCachedTransitRouteInfo; private boolean mTaxiRequestHandled; private boolean mTaxiPlanning; private boolean mInternetConnected; private int mInvalidRoutePointsTransactionId; private int mRemovingIntermediatePointsTransactionId; @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingListener mRoutingListener = new Framework.RoutingListener() { @MainThread @Override public void onRoutingEvent(final int resultCode, @Nullable final String[] missingMaps) { mLogger.d(TAG, "onRoutingEvent(resultCode: " + resultCode + ")"); mLastResultCode = resultCode; mLastMissingMaps = missingMaps; mContainsCachedResult = true; if (mLastResultCode == ResultCodesHelper.NO_ERROR || ResultCodesHelper.isMoreMapsNeeded(mLastResultCode)) { onBuiltRoute(); } else if (mLastResultCode == ResultCodesHelper.HAS_WARNINGS) { onBuiltRoute(); if (mContainer != null) mContainer.onDrivingOptionsWarning(); } processRoutingEvent(); } }; private void onBuiltRoute() { mCachedRoutingInfo = Framework.nativeGetRouteFollowingInfo(); if (mLastRouterType == Framework.ROUTER_TYPE_TRANSIT) mCachedTransitRouteInfo = Framework.nativeGetTransitRouteInfo(); setBuildState(BuildState.BUILT); mLastBuildProgress = 100; if (mContainer != null) mContainer.onBuiltRoute(); } @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingProgressListener mRoutingProgressListener = new Framework.RoutingProgressListener() { @MainThread @Override public void onRouteBuildingProgress(float progress) { mLastBuildProgress = (int) progress; updateProgress(); } }; @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingRecommendationListener mRoutingRecommendationListener = recommendation -> UiThread.run(() -> { if (recommendation == Framework.ROUTE_REBUILD_AFTER_POINTS_LOADING) setStartPoint(LocationHelper.INSTANCE.getMyPosition()); }); @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingLoadPointsListener mRoutingLoadPointsListener = success -> { if (success) prepare(getStartPoint(), getEndPoint()); }; public static RoutingController get() { return sInstance; } private void processRoutingEvent() { if (!mContainsCachedResult || mContainer == null || mHasContainerSavedState) return; mContainsCachedResult = false; if (isDrivingOptionsBuildError()) mContainer.onDrivingOptionsWarning(); if (mLastResultCode == ResultCodesHelper.NO_ERROR || mLastResultCode == ResultCodesHelper.HAS_WARNINGS) { updatePlan(); return; } if (mLastResultCode == ResultCodesHelper.CANCELLED) { setBuildState(BuildState.NONE); updatePlan(); return; } if (!ResultCodesHelper.isMoreMapsNeeded(mLastResultCode)) { setBuildState(BuildState.ERROR); mLastBuildProgress = 0; updateProgress(); } if (isDrivingOptionsBuildError()) mContainer.onDrivingOptionsBuildError(); else mContainer.onCommonBuildError(mLastResultCode, mLastMissingMaps); } private boolean isDrivingOptionsBuildError() { return !ResultCodesHelper.isMoreMapsNeeded(mLastResultCode) && isVehicleRouterType() && RoutingOptions.hasAnyOptions(); } private void setState(State newState) { mLogger.d(TAG, "[S] State: " + mState + " -> " + newState + ", BuildState: " + mBuildState); mState = newState; if (mContainer != null) mContainer.updateMenu(); } private void setBuildState(BuildState newState) { mLogger.d(TAG, "[B] State: " + mState + ", BuildState: " + mBuildState + " -> " + newState); mBuildState = newState; if (mBuildState == BuildState.BUILT && !MapObject.isOfType(MapObject.MY_POSITION, getStartPoint())) Framework.nativeDisableFollowing(); if (mContainer != null) mContainer.updateMenu(); } private void updateProgress() { if (isTaxiPlanning()) return; if (mContainer != null) mContainer.updateBuildProgress(mLastBuildProgress, mLastRouterType); } private void showRoutePlan() { if (mContainer != null) mContainer.showRoutePlan(true, new Runnable() { @Override public void run() { updatePlan(); } }); } public void attach(@NonNull Container container) { mContainer = container; } public void initialize() { mLastRouterType = Framework.nativeGetLastUsedRouter(); mInvalidRoutePointsTransactionId = Framework.nativeInvalidRoutePointsTransactionId(); mRemovingIntermediatePointsTransactionId = mInvalidRoutePointsTransactionId; Framework.nativeSetRoutingListener(mRoutingListener); Framework.nativeSetRouteProgressListener(mRoutingProgressListener); Framework.nativeSetRoutingRecommendationListener(mRoutingRecommendationListener); Framework.nativeSetRoutingLoadPointsListener(mRoutingLoadPointsListener); TaxiManager.INSTANCE.setTaxiListener(this); } public void detach() { mContainer = null; } @MainThread public void restore() { mHasContainerSavedState = false; if (isPlanning()) showRoutePlan(); if (mContainer != null) { if (isTaxiPlanning()) mContainer.updateBuildProgress(0, mLastRouterType); mContainer.showNavigation(isNavigating()); mContainer.updateMenu(); } processRoutingEvent(); } public void onSaveState() { mHasContainerSavedState = true; } private void build() { Framework.nativeRemoveRoute(); mLogger.d(TAG, "build"); mTaxiRequestHandled = false; mLastBuildProgress = 0; mInternetConnected = ConnectionState.isConnected(); if (isTaxiRouterType()) { if (!mInternetConnected) { completeTaxiRequest(); return; } MapObject start = getStartPoint(); MapObject end = getEndPoint(); if (start != null && end != null) requestTaxiInfo(start, end); } setBuildState(BuildState.BUILDING); if (mContainer != null) mContainer.onStartRouteBuilding(); updatePlan(); Statistics.INSTANCE.trackRouteBuild(mLastRouterType, getStartPoint(), getEndPoint()); org.alohalytics.Statistics.logEvent(AlohaHelper.ROUTING_BUILD, new String[]{Statistics.EventParam.FROM, Statistics.getPointType(getStartPoint()), Statistics.EventParam.TO, Statistics.getPointType(getEndPoint())}); Framework.nativeBuildRoute(); } private void completeTaxiRequest() { mTaxiRequestHandled = true; if (mContainer != null) { mContainer.updateBuildProgress(100, mLastRouterType); mContainer.updateMenu(); } } private void showDisclaimer(final MapObject startPoint, final MapObject endPoint, final boolean fromApi) { if (mContainer == null) return; StringBuilder builder = new StringBuilder(); for (int resId : new int[] { R.string.dialog_routing_disclaimer_priority, R.string.dialog_routing_disclaimer_precision, R.string.dialog_routing_disclaimer_recommendations, R.string.dialog_routing_disclaimer_borders, R.string.dialog_routing_disclaimer_beware }) builder.append(MwmApplication.get().getString(resId)).append("\n\n"); new AlertDialog.Builder(mContainer.getActivity()) .setTitle(R.string.dialog_routing_disclaimer_title) .setMessage(builder.toString()) .setCancelable(false) .setNegativeButton(R.string.decline, null) .setPositiveButton(R.string.accept, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dlg, int which) { Config.acceptRoutingDisclaimer(); prepare(startPoint, endPoint, fromApi); } }).show(); } public void restoreRoute() { Framework.nativeLoadRoutePoints(); } public boolean hasSavedRoute() { return Framework.nativeHasSavedRoutePoints(); } public void saveRoute() { if (isNavigating() || (isPlanning() && isBuilt())) Framework.nativeSaveRoutePoints(); } public void deleteSavedRoute() { Framework.nativeDeleteSavedRoutePoints(); } public void rebuildLastRoute() { setState(State.NONE); setBuildState(BuildState.NONE); prepare(getStartPoint(), getEndPoint(), false); } public void prepare(boolean canUseMyPositionAsStart, @Nullable MapObject endPoint) { prepare(canUseMyPositionAsStart, endPoint, false); } public void prepare(boolean canUseMyPositionAsStart, @Nullable MapObject endPoint, boolean fromApi) { MapObject startPoint = canUseMyPositionAsStart ? LocationHelper.INSTANCE.getMyPosition() : null; prepare(startPoint, endPoint, fromApi); } public void prepare(boolean canUseMyPositionAsStart, @Nullable MapObject endPoint, @Framework.RouterType int type, boolean fromApi) { MapObject startPoint = canUseMyPositionAsStart ? LocationHelper.INSTANCE.getMyPosition() : null; prepare(startPoint, endPoint, type, fromApi); } public void prepare(@Nullable MapObject startPoint, @Nullable MapObject endPoint) { prepare(startPoint, endPoint, false); } public void prepare(@Nullable MapObject startPoint, @Nullable MapObject endPoint, boolean fromApi) { mLogger.d(TAG, "prepare (" + (endPoint == null ? "route)" : "p2p)")); if (!Config.isRoutingDisclaimerAccepted()) { showDisclaimer(startPoint, endPoint, fromApi); return; } initLastRouteType(startPoint, endPoint, fromApi); prepare(startPoint, endPoint, mLastRouterType, fromApi); } private void initLastRouteType(@Nullable MapObject startPoint, @Nullable MapObject endPoint, boolean fromApi) { if (isSubwayEnabled() && !fromApi) { mLastRouterType = Framework.ROUTER_TYPE_TRANSIT; return; } if (startPoint != null && endPoint != null) mLastRouterType = Framework.nativeGetBestRouter(startPoint.getLat(), startPoint.getLon(), endPoint.getLat(), endPoint.getLon()); } private boolean isSubwayEnabled() { return mContainer != null && mContainer.isSubwayEnabled(); } public void prepare(final @Nullable MapObject startPoint, final @Nullable MapObject endPoint, @Framework.RouterType int routerType) { prepare(startPoint, endPoint, routerType, false); } public void prepare(final @Nullable MapObject startPoint, final @Nullable MapObject endPoint, @Framework.RouterType int routerType, boolean fromApi) { cancel(); setState(State.PREPARE); mLastRouterType = routerType; Framework.nativeSetRouter(mLastRouterType); if (startPoint != null || endPoint != null) setPointsInternal(startPoint, endPoint); if (mContainer != null) mContainer.showRoutePlan(true, new Runnable() { @Override public void run() { if (startPoint == null || endPoint == null) updatePlan(); else build(); } }); if (startPoint != null) trackPointAdd(startPoint, RoutePointInfo.ROUTE_MARK_START, false, false, fromApi); if (endPoint != null) trackPointAdd(endPoint, RoutePointInfo.ROUTE_MARK_FINISH, false, false, fromApi); } private static void trackPointAdd(@NonNull MapObject point, @RoutePointInfo.RouteMarkType int type, boolean isPlanning, boolean isNavigating, boolean fromApi) { boolean isMyPosition = point.getMapObjectType() == MapObject.MY_POSITION; Statistics.INSTANCE.trackRoutingPoint(ROUTING_POINT_ADD, type, isPlanning, isNavigating, isMyPosition, fromApi); } private static void trackPointRemove(@NonNull MapObject point, @RoutePointInfo.RouteMarkType int type, boolean isPlanning, boolean isNavigating, boolean fromApi) { boolean isMyPosition = point.getMapObjectType() == MapObject.MY_POSITION; Statistics.INSTANCE.trackRoutingPoint(ROUTING_POINT_REMOVE, type, isPlanning, isNavigating, isMyPosition, fromApi); } public void start() { mLogger.d(TAG, "start"); // This saving is needed just for situation when the user starts navigation // and then app crashes. So, the previous route will be restored on the next app launch. saveRoute(); MapObject my = LocationHelper.INSTANCE.getMyPosition(); if (my == null || !MapObject.isOfType(MapObject.MY_POSITION, getStartPoint())) { Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_START_SUGGEST_REBUILD); AlohaHelper.logClick(AlohaHelper.ROUTING_START_SUGGEST_REBUILD); suggestRebuildRoute(); return; } setState(State.NAVIGATION); if (mContainer != null) { mContainer.showRoutePlan(false, null); mContainer.showNavigation(true); mContainer.onNavigationStarted(); } Framework.nativeFollowRoute(); LocationHelper.INSTANCE.restart(); } public void addStop(@NonNull MapObject mapObject) { addRoutePoint(RoutePointInfo.ROUTE_MARK_INTERMEDIATE, mapObject); trackPointAdd(mapObject, RoutePointInfo.ROUTE_MARK_INTERMEDIATE, isPlanning(), isNavigating(), false); build(); if (mContainer != null) mContainer.onAddedStop(); backToPlaningStateIfNavigating(); } public void removeStop(@NonNull MapObject mapObject) { RoutePointInfo info = mapObject.getRoutePointInfo(); if (info == null) throw new AssertionError("A stop point must have the route point info!"); applyRemovingIntermediatePointsTransaction(); Framework.nativeRemoveRoutePoint(info.mMarkType, info.mIntermediateIndex); trackPointRemove(mapObject, info.mMarkType, isPlanning(), isNavigating(), false); build(); if (mContainer != null) mContainer.onRemovedStop(); backToPlaningStateIfNavigating(); } private void backToPlaningStateIfNavigating() { if (!isNavigating()) return; setState(State.PREPARE); if (mContainer != null) { mContainer.showNavigation(false); mContainer.showRoutePlan(true, null); mContainer.updateMenu(); mContainer.onNavigationCancelled(); } } private void removeIntermediatePoints() { Framework.nativeRemoveIntermediateRoutePoints(); } @NonNull private MapObject toMapObject(@NonNull RouteMarkData point) { return MapObject.createMapObject(FeatureId.EMPTY, point.mIsMyPosition ? MapObject.MY_POSITION : MapObject.POI, point.mTitle == null ? "" : point.mTitle, point.mSubtitle == null ? "" : point.mSubtitle, point.mLat, point.mLon); } public boolean isStopPointAllowed() { return Framework.nativeCouldAddIntermediatePoint() && !isTaxiRouterType(); } public boolean isRoutePoint(@NonNull MapObject mapObject) { return mapObject.getRoutePointInfo() != null; } private void suggestRebuildRoute() { if (mContainer == null) return; final AlertDialog.Builder builder = new AlertDialog.Builder(mContainer.getActivity()) .setMessage(R.string.p2p_reroute_from_current) .setCancelable(false) .setNegativeButton(R.string.cancel, null); TextView titleView = (TextView)View.inflate(mContainer.getActivity(), R.layout.dialog_suggest_reroute_title, null); titleView.setText(R.string.p2p_only_from_current); builder.setCustomTitle(titleView); if (MapObject.isOfType(MapObject.MY_POSITION, getEndPoint())) { builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { swapPoints(); } }); } else { if (LocationHelper.INSTANCE.getMyPosition() == null) builder.setMessage(null).setNegativeButton(null, null); builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { setStartFromMyPosition(); } }); } builder.show(); } private void updatePlan() { updateProgress(); } private void cancelInternal() { mLogger.d(TAG, "cancelInternal"); //noinspection WrongConstant mWaitingPoiPickType = NO_WAITING_POI_PICK; mTaxiRequestHandled = false; setBuildState(BuildState.NONE); setState(State.NONE); applyRemovingIntermediatePointsTransaction(); Framework.nativeDeleteSavedRoutePoints(); Framework.nativeCloseRouting(); } public boolean cancel() { if (isPlanning()) { mLogger.d(TAG, "cancel: planning"); cancelInternal(); if (mContainer != null) mContainer.showRoutePlan(false, null); return true; } if (isNavigating()) { mLogger.d(TAG, "cancel: navigating"); cancelInternal(); if (mContainer != null) { mContainer.showNavigation(false); mContainer.updateMenu(); } if (mContainer != null) mContainer.onNavigationCancelled(); return true; } mLogger.d(TAG, "cancel: none"); return false; } public boolean isPlanning() { return mState == State.PREPARE; } boolean isTaxiPlanning() { return isTaxiRouterType() && mTaxiPlanning; } boolean isTaxiRouterType() { return mLastRouterType == Framework.ROUTER_TYPE_TAXI; } boolean isTransitType() { return mLastRouterType == Framework.ROUTER_TYPE_TRANSIT; } boolean isVehicleRouterType() { return mLastRouterType == Framework.ROUTER_TYPE_VEHICLE; } public boolean isNavigating() { return mState == State.NAVIGATION; } public boolean isVehicleNavigation() { return isNavigating() && isVehicleRouterType(); } public boolean isBuilding() { return mState == State.PREPARE && mBuildState == BuildState.BUILDING; } public boolean isErrorEncountered() { return mBuildState == BuildState.ERROR; } public boolean isBuilt() { return mBuildState == BuildState.BUILT; } public void waitForPoiPick(@RoutePointInfo.RouteMarkType int pointType){ mWaitingPoiPickType = pointType; } public boolean isWaitingPoiPick() { return mWaitingPoiPickType != NO_WAITING_POI_PICK; } public boolean isTaxiRequestHandled() { return mTaxiRequestHandled; } boolean isInternetConnected() { return mInternetConnected; } BuildState getBuildState() { return mBuildState; } @Nullable public MapObject getStartPoint() { return getStartOrEndPointByType(RoutePointInfo.ROUTE_MARK_START); } @Nullable public MapObject getEndPoint() { return getStartOrEndPointByType(RoutePointInfo.ROUTE_MARK_FINISH); } @Nullable private MapObject getStartOrEndPointByType(@RoutePointInfo.RouteMarkType int type) { RouteMarkData[] points = Framework.nativeGetRoutePoints(); int size = points.length; if (size == 0) return null; if (size == 1) { RouteMarkData point = points[0]; return point.mPointType == type ? toMapObject(point) : null; } if (type == RoutePointInfo.ROUTE_MARK_START) return toMapObject(points[0]); if (type == RoutePointInfo.ROUTE_MARK_FINISH) return toMapObject(points[size - 1]); return null; } public boolean hasStartPoint() { return getStartPoint() != null; } public boolean hasEndPoint() { return getEndPoint() != null; } @Nullable RoutingInfo getCachedRoutingInfo() { return mCachedRoutingInfo; } @Nullable TransitRouteInfo getCachedTransitInfo() { return mCachedTransitRouteInfo; } private void setPointsInternal(@Nullable MapObject startPoint, @Nullable MapObject endPoint) { final boolean hasStart = startPoint != null; final boolean hasEnd = endPoint != null; final boolean hasOnePointAtLeast = hasStart || hasEnd; if (hasOnePointAtLeast) applyRemovingIntermediatePointsTransaction(); if (hasStart) addRoutePoint(RoutePointInfo.ROUTE_MARK_START , startPoint); if (hasEnd) addRoutePoint(RoutePointInfo.ROUTE_MARK_FINISH , endPoint); if (hasOnePointAtLeast && mContainer != null) mContainer.updateMenu(); } void checkAndBuildRoute() { if (isWaitingPoiPick()) showRoutePlan(); if (getStartPoint() != null && getEndPoint() != null) build(); } private boolean setStartFromMyPosition() { mLogger.d(TAG, "setStartFromMyPosition"); MapObject my = LocationHelper.INSTANCE.getMyPosition(); if (my == null) { mLogger.d(TAG, "setStartFromMyPosition: no my position - skip"); return false; } return setStartPoint(my); } /** * Sets starting point. * <ul> * <li>If {@code point} matches ending one and the starting point was set &mdash; swap points. * <li>The same as the currently set starting point is skipped. * </ul> * Route starts to build if both points were set. * * @return {@code true} if the point was set. */ @SuppressWarnings("Duplicates") public boolean setStartPoint(@Nullable MapObject point) { mLogger.d(TAG, "setStartPoint"); MapObject startPoint = getStartPoint(); MapObject endPoint = getEndPoint(); boolean isSamePoint = MapObject.same(startPoint, point); if (point != null) { applyRemovingIntermediatePointsTransaction(); addRoutePoint(RoutePointInfo.ROUTE_MARK_START, point); startPoint = getStartPoint(); } if (isSamePoint) { mLogger.d(TAG, "setStartPoint: skip the same starting point"); return false; } if (point != null && point.sameAs(endPoint)) { if (startPoint == null) { mLogger.d(TAG, "setStartPoint: skip because starting point is empty"); return false; } mLogger.d(TAG, "setStartPoint: swap with end point"); endPoint = startPoint; } startPoint = point; setPointsInternal(startPoint, endPoint); checkAndBuildRoute(); if (startPoint != null) trackPointAdd(startPoint, RoutePointInfo.ROUTE_MARK_START, isPlanning(), isNavigating(), false); return true; } /** * Sets ending point. * <ul> * <li>If {@code point} is the same as starting point &mdash; swap points if ending point is set, skip otherwise. * <li>Set starting point to MyPosition if it was not set before. * </ul> * Route starts to build if both points were set. * * @return {@code true} if the point was set. */ @SuppressWarnings("Duplicates") public boolean setEndPoint(@Nullable MapObject point) { mLogger.d(TAG, "setEndPoint"); MapObject startPoint = getStartPoint(); MapObject endPoint = getEndPoint(); boolean isSamePoint = MapObject.same(endPoint, point); if (point != null) { applyRemovingIntermediatePointsTransaction(); addRoutePoint(RoutePointInfo.ROUTE_MARK_FINISH, point); endPoint = getEndPoint(); } if (isSamePoint) { mLogger.d(TAG, "setEndPoint: skip the same end point"); return false; } if (point != null && point.sameAs(startPoint)) { if (endPoint == null) { mLogger.d(TAG, "setEndPoint: skip because end point is empty"); return false; } mLogger.d(TAG, "setEndPoint: swap with starting point"); startPoint = endPoint; } endPoint = point; if (endPoint != null) trackPointAdd(endPoint, RoutePointInfo.ROUTE_MARK_FINISH, isPlanning(), isNavigating(), false); setPointsInternal(startPoint, endPoint); checkAndBuildRoute(); return true; } private static void addRoutePoint(@RoutePointInfo.RouteMarkType int type, @NonNull MapObject point) { Pair<String, String> description = getDescriptionForPoint(point); Framework.nativeAddRoutePoint(description.first /* title */, description.second /* subtitle */, type, 0 /* intermediateIndex */, MapObject.isOfType(MapObject.MY_POSITION, point), point.getLat(), point.getLon()); } @NonNull private static Pair<String, String> getDescriptionForPoint(@NonNull MapObject point) { String title, subtitle = ""; if (!TextUtils.isEmpty(point.getTitle())) { title = point.getTitle(); subtitle = point.getSubtitle(); } else { if (!TextUtils.isEmpty(point.getSubtitle())) { title = point.getSubtitle(); } else if (!TextUtils.isEmpty(point.getAddress())) { title = point.getAddress(); } else { title = Framework.nativeFormatLatLon(point.getLat(), point.getLon(), false /* useDmsFormat */); } } return new Pair<>(title, subtitle); } private void swapPoints() { mLogger.d(TAG, "swapPoints"); MapObject startPoint = getStartPoint(); MapObject endPoint = getEndPoint(); MapObject point = startPoint; startPoint = endPoint; endPoint = point; Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_SWAP_POINTS); AlohaHelper.logClick(AlohaHelper.ROUTING_SWAP_POINTS); setPointsInternal(startPoint, endPoint); checkAndBuildRoute(); if (mContainer != null) mContainer.updateMenu(); } public void setRouterType(@Framework.RouterType int router) { mLogger.d(TAG, "setRouterType: " + mLastRouterType + " -> " + router); // Repeating tap on Taxi icon should trigger the route building always, // because it may be "No internet connection, try later" case if (router == mLastRouterType && !isTaxiRouterType()) return; mLastRouterType = router; Framework.nativeSetRouter(router); // Taxi routing does not support intermediate points. if (isTaxiRouterType()) { openRemovingIntermediatePointsTransaction(); removeIntermediatePoints(); } else { cancelRemovingIntermediatePointsTransaction(); } if (getStartPoint() != null && getEndPoint() != null) build(); } @Framework.RouterType public int getLastRouterType() { return mLastRouterType; } private void openRemovingIntermediatePointsTransaction() { if (mRemovingIntermediatePointsTransactionId == mInvalidRoutePointsTransactionId) mRemovingIntermediatePointsTransactionId = Framework.nativeOpenRoutePointsTransaction(); } private void cancelRemovingIntermediatePointsTransaction() { if (mRemovingIntermediatePointsTransactionId == mInvalidRoutePointsTransactionId) return; Framework.nativeCancelRoutePointsTransaction(mRemovingIntermediatePointsTransactionId); mRemovingIntermediatePointsTransactionId = mInvalidRoutePointsTransactionId; } private void applyRemovingIntermediatePointsTransaction() { // We have to apply removing intermediate points transaction each time // we add/remove route points in the taxi mode. if (mRemovingIntermediatePointsTransactionId == mInvalidRoutePointsTransactionId) return; Framework.nativeApplyRoutePointsTransaction(mRemovingIntermediatePointsTransactionId); mRemovingIntermediatePointsTransactionId = mInvalidRoutePointsTransactionId; } public void onPoiSelected(@Nullable MapObject point) { if (!isWaitingPoiPick()) return; if (mWaitingPoiPickType != RoutePointInfo.ROUTE_MARK_FINISH && mWaitingPoiPickType != RoutePointInfo.ROUTE_MARK_START) { throw new AssertionError("Only start and finish points can be added through search!"); } if (point != null) { if (mWaitingPoiPickType == RoutePointInfo.ROUTE_MARK_FINISH) setEndPoint(point); else setStartPoint(point); } if (mContainer != null) { mContainer.updateMenu(); showRoutePlan(); } //noinspection WrongConstant mWaitingPoiPickType = NO_WAITING_POI_PICK; } public static CharSequence formatRoutingTime(Context context, int seconds, @DimenRes int unitsSize) { long minutes = TimeUnit.SECONDS.toMinutes(seconds) % 60; long hours = TimeUnit.SECONDS.toHours(seconds); String min = context.getString(R.string.minute); String hour = context.getString(R.string.hour); @DimenRes int textSize = R.dimen.text_size_routing_number; SpannableStringBuilder displayedH = Utils.formatUnitsText(context, textSize, unitsSize, String.valueOf(hours), hour); SpannableStringBuilder displayedM = Utils.formatUnitsText(context, textSize, unitsSize, String.valueOf(minutes), min); return hours == 0 ? displayedM : TextUtils.concat(displayedH + " ", displayedM); } static String formatArrivalTime(int seconds) { Calendar current = Calendar.getInstance(); current.set(Calendar.SECOND, 0); current.add(Calendar.SECOND, seconds); return StringUtils.formatUsingUsLocale("%d:%02d", current.get(Calendar.HOUR_OF_DAY), current.get(Calendar.MINUTE)); } private void requestTaxiInfo(@NonNull MapObject startPoint, @NonNull MapObject endPoint) { mTaxiPlanning = true; TaxiManager.INSTANCE.nativeRequestTaxiProducts(NetworkPolicy.newInstance(true /* canUse */), startPoint.getLat(), startPoint.getLon(), endPoint.getLat(), endPoint.getLon()); if (mContainer != null) mContainer.updateBuildProgress(0, mLastRouterType); } @Override public void onTaxiProviderReceived(@NonNull TaxiInfo provider) { mTaxiPlanning = false; mLogger.d(TAG, "onTaxiInfoReceived provider = " + provider); if (isTaxiRouterType() && mContainer != null) { mContainer.onTaxiInfoReceived(provider); completeTaxiRequest(); Statistics.INSTANCE.trackTaxiEvent(Statistics.EventName.ROUTING_TAXI_ROUTE_BUILT, provider.getType().getProviderName()); } } @Override public void onTaxiErrorReceived(@NonNull TaxiInfoError error) { mTaxiPlanning = false; mLogger.e(TAG, "onTaxiError error = " + error); if (isTaxiRouterType() && mContainer != null) { mContainer.onTaxiError(error.getCode()); completeTaxiRequest(); Statistics.INSTANCE.trackTaxiError(error); } } @Override public void onNoTaxiProviders() { mTaxiPlanning = false; mLogger.e(TAG, "onNoTaxiProviders"); if (isTaxiRouterType() && mContainer != null) { mContainer.onTaxiError(TaxiManager.ErrorCode.NoProviders); completeTaxiRequest(); Statistics.INSTANCE.trackNoTaxiProvidersError(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.google.sheets.stream; import java.util.Collections; import java.util.List; import com.google.api.services.sheets.v4.SheetsScopes; import org.apache.camel.RuntimeCamelException; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; /** * Component configuration for GoogleSheets stream component. */ @UriParams public class GoogleSheetsStreamConfiguration implements Cloneable { private static final List<String> DEFAULT_SCOPES = Collections.singletonList(SheetsScopes.SPREADSHEETS); @UriPath private String apiName; @UriParam private List<String> scopes = DEFAULT_SCOPES; @UriParam private String clientId; @UriParam private String clientSecret; @UriParam private String accessToken; @UriParam private String refreshToken; @UriParam private String applicationName; @UriParam private String spreadsheetId; @UriParam(defaultValue = "10") private int maxResults = 10; @UriParam private String range; @UriParam private boolean includeGridData; @UriParam(enums = "ROWS,COLUMNS,DIMENSION_UNSPECIFIED", defaultValue = "ROWS") private String majorDimension = "ROWS"; @UriParam(enums = "FORMATTED_VALUE,UNFORMATTED_VALUE,FORMULA", defaultValue = "FORMATTED_VALUE") private String valueRenderOption = "FORMATTED_VALUE"; public String getClientId() { return clientId; } /** * Client ID of the sheets application */ public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } /** * Client secret of the sheets application */ public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } public String getAccessToken() { return accessToken; } /** * OAuth 2 access token. This typically expires after an hour so * refreshToken is recommended for long term usage. */ public void setAccessToken(String accessToken) { this.accessToken = accessToken; } public String getRefreshToken() { return refreshToken; } /** * OAuth 2 refresh token. Using this, the Google Calendar component can * obtain a new accessToken whenever the current one expires - a necessity * if the application is long-lived. */ public void setRefreshToken(String refreshToken) { this.refreshToken = refreshToken; } public String getApplicationName() { return applicationName; } /** * Google sheets application name. Example would be "camel-google-sheets/1.0" */ public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public List<String> getScopes() { return scopes; } /** * Specifies the level of permissions you want a sheets application to have to * a user account. See https://developers.google.com/identity/protocols/googlescopes * for more info. */ public void setScopes(List<String> scopes) { this.scopes = scopes; } /** * Gets the apiName. * * @return */ public String getApiName() { return apiName; } /** * Sets the apiName. * * @param apiName */ public void setApiName(String apiName) { this.apiName = apiName; } public String getSpreadsheetId() { return spreadsheetId; } /** * Specifies the spreadsheet identifier that is used to identify the target to obtain. * * @param spreadsheetId */ public void setSpreadsheetId(String spreadsheetId) { this.spreadsheetId = spreadsheetId; } public int getMaxResults() { return maxResults; } /** * Specify the maximum number of returned results. This will limit the number of rows in a returned value range * data set or the number of returned value ranges in a batch request. * * @param maxResults */ public void setMaxResults(int maxResults) { this.maxResults = maxResults; } public String getRange() { return range; } /** * Specifies the range of rows and columns in a sheet to get data from. * * @param range */ public void setRange(String range) { this.range = range; } public String getMajorDimension() { return majorDimension; } /** * Specifies the major dimension that results should use.. * * @param majorDimension */ public void setMajorDimension(String majorDimension) { this.majorDimension = majorDimension; } public String getValueRenderOption() { return valueRenderOption; } /** * Determines how values should be rendered in the output. * * @param valueRenderOption */ public void setValueRenderOption(String valueRenderOption) { this.valueRenderOption = valueRenderOption; } public boolean isIncludeGridData() { return includeGridData; } /** * True if grid data should be returned. * * @param includeGridData */ public void setIncludeGridData(boolean includeGridData) { this.includeGridData = includeGridData; } // ************************************************* // // ************************************************* public GoogleSheetsStreamConfiguration copy() { try { return (GoogleSheetsStreamConfiguration)super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jasper; import java.io.File; import java.util.Enumeration; import java.util.Map; import java.util.Properties; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.jsp.tagext.TagLibraryInfo; import org.apache.jasper.compiler.JspConfig; import org.apache.jasper.compiler.Localizer; import org.apache.jasper.compiler.TagPluginManager; import org.apache.jasper.compiler.TldCache; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * A class to hold all init parameters specific to the JSP engine. * * @author Anil K. Vijendran * @author Hans Bergsten * @author Pierre Delisle */ public final class EmbeddedServletOptions implements Options { // Logger private final Log log = LogFactory.getLog(EmbeddedServletOptions.class); private Properties settings = new Properties(); /** * Is Jasper being used in development mode? */ private boolean development = true; /** * Should Ant fork its java compiles of JSP pages. */ public boolean fork = true; /** * Do you want to keep the generated Java files around? */ private boolean keepGenerated = true; /** * Should white spaces between directives or actions be trimmed? */ private boolean trimSpaces = false; /** * Determines whether tag handler pooling is enabled. */ private boolean isPoolingEnabled = true; /** * Do you want support for "mapped" files? This will generate * servlet that has a print statement per line of the JSP file. * This seems like a really nice feature to have for debugging. */ private boolean mappedFile = true; /** * Do we want to include debugging information in the class file? */ private boolean classDebugInfo = true; /** * Background compile thread check interval in seconds. */ private int checkInterval = 0; /** * Is the generation of SMAP info for JSR45 debugging suppressed? */ private boolean isSmapSuppressed = false; /** * Should SMAP info for JSR45 debugging be dumped to a file? */ private boolean isSmapDumped = false; /** * Are Text strings to be generated as char arrays? */ private boolean genStringAsCharArray = false; private boolean errorOnUseBeanInvalidClassAttribute = true; /** * I want to see my generated servlets. Which directory are they * in? */ private File scratchDir; /** * Need to have this as is for versions 4 and 5 of IE. Can be set from * the initParams so if it changes in the future all that is needed is * to have a jsp initParam of type ieClassId="<value>" */ private String ieClassId = "clsid:8AD9C840-044E-11D1-B3E9-00805F499D93"; /** * What classpath should I use while compiling generated servlets? */ private String classpath = null; /** * Compiler to use. */ private String compiler = null; /** * Compiler target VM. */ private String compilerTargetVM = "1.6"; /** * The compiler source VM. */ private String compilerSourceVM = "1.6"; /** * The compiler class name. */ private String compilerClassName = null; /** * Cache for the TLD URIs, resource paths and parsed files. */ private TldCache tldCache = null; /** * Jsp config information */ private JspConfig jspConfig = null; /** * TagPluginManager */ private TagPluginManager tagPluginManager = null; /** * Java platform encoding to generate the JSP * page servlet. */ private String javaEncoding = "UTF8"; /** * Modification test interval. */ private int modificationTestInterval = 4; /** * Is re-compilation attempted immediately after a failure? */ private boolean recompileOnFail = false; /** * Is generation of X-Powered-By response header enabled/disabled? */ private boolean xpoweredBy; /** * Should we include a source fragment in exception messages, which could be displayed * to the developer ? */ private boolean displaySourceFragment = true; /** * The maximum number of loaded jsps per web-application. If there are more * jsps loaded, they will be unloaded. */ private int maxLoadedJsps = -1; /** * The idle time in seconds after which a JSP is unloaded. * If unset or less or equal than 0, no jsps are unloaded. */ private int jspIdleTimeout = -1; public String getProperty(String name ) { return settings.getProperty( name ); } public void setProperty(String name, String value ) { if (name != null && value != null){ settings.setProperty( name, value ); } } /** * Are we keeping generated code around? */ @Override public boolean getKeepGenerated() { return keepGenerated; } /** * Should white spaces between directives or actions be trimmed? */ @Override public boolean getTrimSpaces() { return trimSpaces; } @Override public boolean isPoolingEnabled() { return isPoolingEnabled; } /** * Are we supporting HTML mapped servlets? */ @Override public boolean getMappedFile() { return mappedFile; } /** * Should class files be compiled with debug information? */ @Override public boolean getClassDebugInfo() { return classDebugInfo; } /** * Background JSP compile thread check interval */ @Override public int getCheckInterval() { return checkInterval; } /** * Modification test interval. */ @Override public int getModificationTestInterval() { return modificationTestInterval; } /** * Re-compile on failure. */ @Override public boolean getRecompileOnFail() { return recompileOnFail; } /** * Is Jasper being used in development mode? */ @Override public boolean getDevelopment() { return development; } /** * Is the generation of SMAP info for JSR45 debugging suppressed? */ @Override public boolean isSmapSuppressed() { return isSmapSuppressed; } /** * Should SMAP info for JSR45 debugging be dumped to a file? */ @Override public boolean isSmapDumped() { return isSmapDumped; } /** * Are Text strings to be generated as char arrays? */ @Override public boolean genStringAsCharArray() { return this.genStringAsCharArray; } /** * Class ID for use in the plugin tag when the browser is IE. */ @Override public String getIeClassId() { return ieClassId; } /** * What is my scratch dir? */ @Override public File getScratchDir() { return scratchDir; } /** * What classpath should I use while compiling the servlets * generated from JSP files? */ @Override public String getClassPath() { return classpath; } /** * Is generation of X-Powered-By response header enabled/disabled? */ @Override public boolean isXpoweredBy() { return xpoweredBy; } /** * Compiler to use. */ @Override public String getCompiler() { return compiler; } /** * @see Options#getCompilerTargetVM */ @Override public String getCompilerTargetVM() { return compilerTargetVM; } /** * @see Options#getCompilerSourceVM */ @Override public String getCompilerSourceVM() { return compilerSourceVM; } /** * Java compiler class to use. */ @Override public String getCompilerClassName() { return compilerClassName; } @Override public boolean getErrorOnUseBeanInvalidClassAttribute() { return errorOnUseBeanInvalidClassAttribute; } public void setErrorOnUseBeanInvalidClassAttribute(boolean b) { errorOnUseBeanInvalidClassAttribute = b; } @Override public TldCache getTldCache() { return tldCache; } public void setTldCache(TldCache tldCache) { this.tldCache = tldCache; } @Override public String getJavaEncoding() { return javaEncoding; } @Override public boolean getFork() { return fork; } @Override public JspConfig getJspConfig() { return jspConfig; } @Override public TagPluginManager getTagPluginManager() { return tagPluginManager; } @Override public boolean isCaching() { return false; } @Override public Map<String, TagLibraryInfo> getCache() { return null; } /** * Should we include a source fragment in exception messages, which could be displayed * to the developer ? */ @Override public boolean getDisplaySourceFragment() { return displaySourceFragment; } /** * Should jsps be unloaded if to many are loaded? * If set to a value greater than 0 eviction of jsps is started. Default: -1 */ @Override public int getMaxLoadedJsps() { return maxLoadedJsps; } /** * Should any jsps be unloaded when being idle for this time in seconds? * If set to a value greater than 0 eviction of jsps is started. Default: -1 */ @Override public int getJspIdleTimeout() { return jspIdleTimeout; } /** * Create an EmbeddedServletOptions object using data available from * ServletConfig and ServletContext. */ public EmbeddedServletOptions(ServletConfig config, ServletContext context) { Enumeration<String> enumeration=config.getInitParameterNames(); while( enumeration.hasMoreElements() ) { String k=enumeration.nextElement(); String v=config.getInitParameter( k ); setProperty( k, v); } String keepgen = config.getInitParameter("keepgenerated"); if (keepgen != null) { if (keepgen.equalsIgnoreCase("true")) { this.keepGenerated = true; } else if (keepgen.equalsIgnoreCase("false")) { this.keepGenerated = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.keepgen")); } } } String trimsp = config.getInitParameter("trimSpaces"); if (trimsp != null) { if (trimsp.equalsIgnoreCase("true")) { trimSpaces = true; } else if (trimsp.equalsIgnoreCase("false")) { trimSpaces = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.trimspaces")); } } } this.isPoolingEnabled = true; String poolingEnabledParam = config.getInitParameter("enablePooling"); if (poolingEnabledParam != null && !poolingEnabledParam.equalsIgnoreCase("true")) { if (poolingEnabledParam.equalsIgnoreCase("false")) { this.isPoolingEnabled = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.enablePooling")); } } } String mapFile = config.getInitParameter("mappedfile"); if (mapFile != null) { if (mapFile.equalsIgnoreCase("true")) { this.mappedFile = true; } else if (mapFile.equalsIgnoreCase("false")) { this.mappedFile = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.mappedFile")); } } } String debugInfo = config.getInitParameter("classdebuginfo"); if (debugInfo != null) { if (debugInfo.equalsIgnoreCase("true")) { this.classDebugInfo = true; } else if (debugInfo.equalsIgnoreCase("false")) { this.classDebugInfo = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.classDebugInfo")); } } } String checkInterval = config.getInitParameter("checkInterval"); if (checkInterval != null) { try { this.checkInterval = Integer.parseInt(checkInterval); } catch(NumberFormatException ex) { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.checkInterval")); } } } String modificationTestInterval = config.getInitParameter("modificationTestInterval"); if (modificationTestInterval != null) { try { this.modificationTestInterval = Integer.parseInt(modificationTestInterval); } catch(NumberFormatException ex) { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.modificationTestInterval")); } } } String recompileOnFail = config.getInitParameter("recompileOnFail"); if (recompileOnFail != null) { if (recompileOnFail.equalsIgnoreCase("true")) { this.recompileOnFail = true; } else if (recompileOnFail.equalsIgnoreCase("false")) { this.recompileOnFail = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.recompileOnFail")); } } } String development = config.getInitParameter("development"); if (development != null) { if (development.equalsIgnoreCase("true")) { this.development = true; } else if (development.equalsIgnoreCase("false")) { this.development = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.development")); } } } String suppressSmap = config.getInitParameter("suppressSmap"); if (suppressSmap != null) { if (suppressSmap.equalsIgnoreCase("true")) { isSmapSuppressed = true; } else if (suppressSmap.equalsIgnoreCase("false")) { isSmapSuppressed = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.suppressSmap")); } } } String dumpSmap = config.getInitParameter("dumpSmap"); if (dumpSmap != null) { if (dumpSmap.equalsIgnoreCase("true")) { isSmapDumped = true; } else if (dumpSmap.equalsIgnoreCase("false")) { isSmapDumped = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.dumpSmap")); } } } String genCharArray = config.getInitParameter("genStringAsCharArray"); if (genCharArray != null) { if (genCharArray.equalsIgnoreCase("true")) { genStringAsCharArray = true; } else if (genCharArray.equalsIgnoreCase("false")) { genStringAsCharArray = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.genchararray")); } } } String errBeanClass = config.getInitParameter("errorOnUseBeanInvalidClassAttribute"); if (errBeanClass != null) { if (errBeanClass.equalsIgnoreCase("true")) { errorOnUseBeanInvalidClassAttribute = true; } else if (errBeanClass.equalsIgnoreCase("false")) { errorOnUseBeanInvalidClassAttribute = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.errBean")); } } } String ieClassId = config.getInitParameter("ieClassId"); if (ieClassId != null) this.ieClassId = ieClassId; String classpath = config.getInitParameter("classpath"); if (classpath != null) this.classpath = classpath; /* * scratchdir */ String dir = config.getInitParameter("scratchdir"); if (dir != null) { scratchDir = new File(dir); } else { // First try the Servlet 2.2 javax.servlet.context.tempdir property scratchDir = (File) context.getAttribute(ServletContext.TEMPDIR); if (scratchDir == null) { // Not running in a Servlet 2.2 container. // Try to get the JDK 1.2 java.io.tmpdir property dir = System.getProperty("java.io.tmpdir"); if (dir != null) scratchDir = new File(dir); } } if (this.scratchDir == null) { log.fatal(Localizer.getMessage("jsp.error.no.scratch.dir")); return; } if (!(scratchDir.exists() && scratchDir.canRead() && scratchDir.canWrite() && scratchDir.isDirectory())) log.fatal(Localizer.getMessage("jsp.error.bad.scratch.dir", scratchDir.getAbsolutePath())); this.compiler = config.getInitParameter("compiler"); String compilerTargetVM = config.getInitParameter("compilerTargetVM"); if(compilerTargetVM != null) { this.compilerTargetVM = compilerTargetVM; } String compilerSourceVM = config.getInitParameter("compilerSourceVM"); if(compilerSourceVM != null) { this.compilerSourceVM = compilerSourceVM; } String javaEncoding = config.getInitParameter("javaEncoding"); if (javaEncoding != null) { this.javaEncoding = javaEncoding; } String compilerClassName = config.getInitParameter("compilerClassName"); if (compilerClassName != null) { this.compilerClassName = compilerClassName; } String fork = config.getInitParameter("fork"); if (fork != null) { if (fork.equalsIgnoreCase("true")) { this.fork = true; } else if (fork.equalsIgnoreCase("false")) { this.fork = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.fork")); } } } String xpoweredBy = config.getInitParameter("xpoweredBy"); if (xpoweredBy != null) { if (xpoweredBy.equalsIgnoreCase("true")) { this.xpoweredBy = true; } else if (xpoweredBy.equalsIgnoreCase("false")) { this.xpoweredBy = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.xpoweredBy")); } } } String displaySourceFragment = config.getInitParameter("displaySourceFragment"); if (displaySourceFragment != null) { if (displaySourceFragment.equalsIgnoreCase("true")) { this.displaySourceFragment = true; } else if (displaySourceFragment.equalsIgnoreCase("false")) { this.displaySourceFragment = false; } else { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.displaySourceFragment")); } } } String maxLoadedJsps = config.getInitParameter("maxLoadedJsps"); if (maxLoadedJsps != null) { try { this.maxLoadedJsps = Integer.parseInt(maxLoadedJsps); } catch(NumberFormatException ex) { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.maxLoadedJsps", ""+this.maxLoadedJsps)); } } } String jspIdleTimeout = config.getInitParameter("jspIdleTimeout"); if (jspIdleTimeout != null) { try { this.jspIdleTimeout = Integer.parseInt(jspIdleTimeout); } catch(NumberFormatException ex) { if (log.isWarnEnabled()) { log.warn(Localizer.getMessage("jsp.warning.jspIdleTimeout", ""+this.jspIdleTimeout)); } } } // Setup the global Tag Libraries location cache for this // web-application. tldCache = TldCache.getInstance(context); // Setup the jsp config info for this web app. jspConfig = new JspConfig(context); // Create a Tag plugin instance tagPluginManager = new TagPluginManager(context); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.fixtures; import com.google.common.base.Joiner; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupEx; import com.intellij.execution.actions.ConfigurationContext; import com.intellij.execution.actions.ConfigurationFromContext; import com.intellij.execution.actions.RunConfigurationProducer; import com.intellij.execution.configurations.RunConfiguration; import com.intellij.find.findUsages.CustomUsageSearcher; import com.intellij.find.findUsages.FindUsagesOptions; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.roots.impl.FilePropertyPusher; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.platform.DirectoryProjectConfigurator; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.refactoring.RefactoringActionHandler; import com.intellij.testFramework.LightProjectDescriptor; import com.intellij.testFramework.TestDataPath; import com.intellij.testFramework.UsefulTestCase; import com.intellij.testFramework.fixtures.*; import com.intellij.testFramework.fixtures.impl.LightTempDirTestFixtureImpl; import com.intellij.usageView.UsageInfo; import com.intellij.usages.Usage; import com.intellij.usages.rules.PsiElementUsage; import com.intellij.util.CommonProcessors.CollectProcessor; import com.intellij.util.IncorrectOperationException; import com.jetbrains.python.PythonDialectsTokenSetProvider; import com.jetbrains.python.PythonHelpersLocator; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.PythonTestUtil; import com.jetbrains.python.documentation.PyDocumentationSettings; import com.jetbrains.python.documentation.PythonDocumentationProvider; import com.jetbrains.python.documentation.docstrings.DocStringFormat; import com.jetbrains.python.formatter.PyCodeStyleSettings; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyFileImpl; import com.jetbrains.python.psi.impl.PythonLanguageLevelPusher; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; import java.io.File; import java.util.*; /** * @author yole */ @TestDataPath("$CONTENT_ROOT/../testData/") public abstract class PyTestCase extends UsefulTestCase { public static final String PYTHON_2_MOCK_SDK = "2.7"; public static final String PYTHON_3_MOCK_SDK = "3.4"; protected static final PyLightProjectDescriptor ourPyDescriptor = new PyLightProjectDescriptor(PYTHON_2_MOCK_SDK); protected static final PyLightProjectDescriptor ourPy3Descriptor = new PyLightProjectDescriptor(PYTHON_3_MOCK_SDK); private static final String PARSED_ERROR_MSG = "Operations should have been performed on stubs but caused file to be parsed"; protected CodeInsightTestFixture myFixture; @Nullable protected static VirtualFile getVirtualFileByName(String fileName) { final VirtualFile path = LocalFileSystem.getInstance().findFileByPath(fileName.replace(File.separatorChar, '/')); if (path != null) { refreshRecursively(path); return path; } return null; } /** * Reformats currently configured file. */ protected final void reformatFile() { WriteCommandAction.runWriteCommandAction(null, () -> doPerformFormatting()); } private void doPerformFormatting() throws IncorrectOperationException { final PsiFile file = myFixture.getFile(); final TextRange myTextRange = file.getTextRange(); CodeStyleManager.getInstance(myFixture.getProject()).reformatText(file, myTextRange.getStartOffset(), myTextRange.getEndOffset()); } @Override protected void setUp() throws Exception { super.setUp(); IdeaTestFixtureFactory factory = IdeaTestFixtureFactory.getFixtureFactory(); TestFixtureBuilder<IdeaProjectTestFixture> fixtureBuilder = factory.createLightFixtureBuilder(getProjectDescriptor()); final IdeaProjectTestFixture fixture = fixtureBuilder.getFixture(); myFixture = IdeaTestFixtureFactory.getFixtureFactory().createCodeInsightFixture(fixture, createTempDirFixture()); myFixture.setUp(); myFixture.setTestDataPath(getTestDataPath()); PythonDialectsTokenSetProvider.reset(); } /** * @return fixture to be used as temporary dir. */ @NotNull protected TempDirTestFixture createTempDirFixture() { return new LightTempDirTestFixtureImpl(true); // "tmp://" dir by default } protected String getTestDataPath() { return PythonTestUtil.getTestDataPath(); } @Override protected void tearDown() throws Exception { try { setLanguageLevel(null); myFixture.tearDown(); myFixture = null; Extensions.findExtension(FilePropertyPusher.EP_NAME, PythonLanguageLevelPusher.class).flushLanguageLevelCache(); } finally { super.tearDown(); clearFields(this); } } @Nullable protected LightProjectDescriptor getProjectDescriptor() { return ourPyDescriptor; } protected PsiReference findReferenceBySignature(final String signature) { int pos = findPosBySignature(signature); return findReferenceAt(pos); } protected PsiReference findReferenceAt(int pos) { return myFixture.getFile().findReferenceAt(pos); } protected int findPosBySignature(String signature) { return PsiDocumentManager.getInstance(myFixture.getProject()).getDocument(myFixture.getFile()).getText().indexOf(signature); } protected void setLanguageLevel(@Nullable LanguageLevel languageLevel) { PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), languageLevel); } protected void runWithLanguageLevel(@NotNull LanguageLevel languageLevel, @NotNull Runnable action) { setLanguageLevel(languageLevel); try { action.run(); } finally { setLanguageLevel(null); } } protected void runWithDocStringFormat(@NotNull DocStringFormat format, @NotNull Runnable runnable) { final PyDocumentationSettings settings = PyDocumentationSettings.getInstance(myFixture.getModule()); final DocStringFormat oldFormat = settings.getFormat(); settings.setFormat(format); try { runnable.run(); } finally { settings.setFormat(oldFormat); } } protected static void assertNotParsed(PyFile file) { assertNull(PARSED_ERROR_MSG, ((PyFileImpl)file).getTreeElement()); } /** * @param name * @return class by its name from file */ @NotNull protected PyClass getClassByName(@NotNull final String name) { return myFixture.findElementByText("class " + name, PyClass.class); } /** * @see #moveByText(com.intellij.testFramework.fixtures.CodeInsightTestFixture, String) */ protected void moveByText(@NotNull final String testToFind) { moveByText(myFixture, testToFind); } /** * Finds some text and moves cursor to it (if found) * * @param fixture test fixture * @param testToFind text to find * @throws AssertionError if element not found */ public static void moveByText(@NotNull final CodeInsightTestFixture fixture, @NotNull final String testToFind) { final PsiElement element = fixture.findElementByText(testToFind, PsiElement.class); assert element != null : "No element found by text: " + testToFind; fixture.getEditor().getCaretModel().moveToOffset(element.getTextOffset()); } /** * Finds all usages of element. Works much like method in {@link com.intellij.testFramework.fixtures.CodeInsightTestFixture#findUsages(com.intellij.psi.PsiElement)}, * but supports {@link com.intellij.find.findUsages.CustomUsageSearcher} and {@link com.intellij.psi.search.searches.ReferencesSearch} as well * * @param element what to find * @return usages */ @NotNull protected Collection<PsiElement> findUsage(@NotNull final PsiElement element) { final Collection<PsiElement> result = new ArrayList<>(); final CollectProcessor<Usage> usageCollector = new CollectProcessor<>(); for (final CustomUsageSearcher searcher : CustomUsageSearcher.EP_NAME.getExtensions()) { searcher.processElementUsages(element, usageCollector, new FindUsagesOptions(myFixture.getProject())); } for (final Usage usage : usageCollector.getResults()) { if (usage instanceof PsiElementUsage) { result.add(((PsiElementUsage)usage).getElement()); } } for (final PsiReference reference : ReferencesSearch.search(element).findAll()) { result.add(reference.getElement()); } for (final UsageInfo info : myFixture.findUsages(element)) { result.add(info.getElement()); } return result; } /** * Returns elements certain element allows to navigate to (emulates CTRL+Click, actually). * You need to pass element as argument or * make sure your fixture is configured for some element (see {@link com.intellij.testFramework.fixtures.CodeInsightTestFixture#getElementAtCaret()}) * * @param element element to fetch navigate elements from (may be null: element under caret would be used in this case) * @return elements to navigate to */ @NotNull protected Set<PsiElement> getElementsToNavigate(@Nullable final PsiElement element) { final Set<PsiElement> result = new HashSet<>(); final PsiElement elementToProcess = ((element != null) ? element : myFixture.getElementAtCaret()); for (final PsiReference reference : elementToProcess.getReferences()) { final PsiElement directResolve = reference.resolve(); if (directResolve != null) { result.add(directResolve); } if (reference instanceof PsiPolyVariantReference) { for (final ResolveResult resolveResult : ((PsiPolyVariantReference)reference).multiResolve(true)) { result.add(resolveResult.getElement()); } } } return result; } /** * Clears provided file * * @param file file to clear */ protected void clearFile(@NotNull final PsiFile file) { CommandProcessor.getInstance().executeCommand(myFixture.getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> { for (final PsiElement element : file.getChildren()) { element.delete(); } }), null, null); } /** * Runs refactoring using special handler * * @param handler handler to be used */ protected void refactorUsingHandler(@NotNull final RefactoringActionHandler handler) { final Editor editor = myFixture.getEditor(); assertInstanceOf(editor, EditorEx.class); handler.invoke(myFixture.getProject(), editor, myFixture.getFile(), ((EditorEx)editor).getDataContext()); } /** * Configures project by some path. It is here to emulate {@link com.intellij.platform.PlatformProjectOpenProcessor} * * @param path path to open * @param configurator configurator to use */ protected void configureProjectByProjectConfigurators(@NotNull final String path, @NotNull final DirectoryProjectConfigurator configurator) { final VirtualFile newPath = myFixture.copyDirectoryToProject(path, String.format("%s%s%s", "temp_for_project_conf", File.pathSeparator, path)); final Ref<Module> moduleRef = new Ref<>(myFixture.getModule()); configurator.configureProject(myFixture.getProject(), newPath, moduleRef); } public static String getHelpersPath() { return new File(PythonHelpersLocator.getPythonCommunityPath(), "helpers").getPath(); } /** * Creates run configuration from right click menu * * @param fixture test fixture * @param expectedClass expected class of run configuration * @param <C> expected class of run configuration * @return configuration (if created) or null (otherwise) */ @Nullable public static <C extends RunConfiguration> C createRunConfigurationFromContext( @NotNull final CodeInsightTestFixture fixture, @NotNull final Class<C> expectedClass) { final DataContext context = DataManager.getInstance().getDataContext(fixture.getEditor().getComponent()); for (final RunConfigurationProducer<?> producer : RunConfigurationProducer.EP_NAME.getExtensions()) { final ConfigurationFromContext fromContext = producer.createConfigurationFromContext(ConfigurationContext.getFromContext(context)); if (fromContext == null) { continue; } final C result = PyUtil.as(fromContext.getConfiguration(), expectedClass); if (result != null) { return result; } } return null; } /** * Compares sets with string sorting them and displaying one-per-line to make comparision easier * * @param message message to display in case of error * @param actual actual set * @param expected expected set */ protected static void compareStringSets(@NotNull final String message, @NotNull final Set<String> actual, @NotNull final Set<String> expected) { final Joiner joiner = Joiner.on("\n"); Assert.assertEquals(message, joiner.join(new TreeSet<>(actual)), joiner.join(new TreeSet<>(expected))); } /** * Clicks certain button in document on caret position * * @param action what button to click (const from {@link IdeActions}) (btw, there should be some way to express it using annotations) * @see IdeActions */ protected final void pressButton(@NotNull final String action) { CommandProcessor.getInstance().executeCommand(myFixture.getProject(), () -> myFixture.performEditorAction(action), "", null); } @NotNull protected CommonCodeStyleSettings getCommonCodeStyleSettings() { return getCodeStyleSettings().getCommonSettings(PythonLanguage.getInstance()); } @NotNull protected PyCodeStyleSettings getPythonCodeStyleSettings() { return getCodeStyleSettings().getCustomSettings(PyCodeStyleSettings.class); } @NotNull protected CodeStyleSettings getCodeStyleSettings() { return CodeStyleSettingsManager.getSettings(myFixture.getProject()); } @NotNull protected CommonCodeStyleSettings.IndentOptions getIndentOptions() { //noinspection ConstantConditions return getCommonCodeStyleSettings().getIndentOptions(); } /** * When you have more than one completion variant, you may use this method providing variant to choose. * It only works for one caret (multiple carets not supported) and since it puts tab after completion, be sure to limit * line somehow (i.e. with comment). * <br/> * Example: "user.n[caret]." There are "name" and "nose" fields. * By calling this function with "nose" you will end with "user.nose ". */ protected final void completeCaretWithMultipleVariants(@NotNull final String... desiredVariants) { final LookupElement[] lookupElements = myFixture.completeBasic(); final LookupEx lookup = myFixture.getLookup(); if (lookupElements != null && lookupElements.length > 1) { // More than one element returned, check directly because completion can't work in this case for (final LookupElement element : lookupElements) { final String suggestedString = element.getLookupString(); if (Arrays.asList(desiredVariants).contains(suggestedString)) { myFixture.getLookup().setCurrentItem(element); lookup.setCurrentItem(element); myFixture.completeBasicAllCarets('\t'); return; } } } } @NotNull protected PsiElement getElementAtCaret() { final PsiFile file = myFixture.getFile(); assertNotNull(file); return file.findElementAt(myFixture.getCaretOffset()); } public static void assertType(@NotNull String expectedType, @NotNull PyTypedElement element, @NotNull TypeEvalContext context) { assertType("Failed in " + context + " context", expectedType, element, context); } public static void assertType(@NotNull String message, @NotNull String expectedType, @NotNull PyTypedElement element, @NotNull TypeEvalContext context) { final PyType actual = context.getType(element); final String actualType = PythonDocumentationProvider.getTypeName(actual, context); assertEquals(message, expectedType, actualType); } }
package org.bouncycastle.x509; import java.io.IOException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.Provider; import java.security.SecureRandom; import java.security.Security; import java.security.Signature; import java.security.SignatureException; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Set; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1Encoding; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.DERNull; import org.bouncycastle.asn1.ASN1ObjectIdentifier; import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers; import org.bouncycastle.asn1.nist.NISTObjectIdentifiers; import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.pkcs.RSASSAPSSparams; import org.bouncycastle.asn1.teletrust.TeleTrusTObjectIdentifiers; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x9.X9ObjectIdentifiers; import org.bouncycastle.jce.X509Principal; import org.bouncycastle.util.Strings; class X509Util { private static Hashtable algorithms = new Hashtable(); private static Hashtable params = new Hashtable(); private static Set noParams = new HashSet(); static { algorithms.put("MD2WITHRSAENCRYPTION", PKCSObjectIdentifiers.md2WithRSAEncryption); algorithms.put("MD2WITHRSA", PKCSObjectIdentifiers.md2WithRSAEncryption); algorithms.put("MD5WITHRSAENCRYPTION", PKCSObjectIdentifiers.md5WithRSAEncryption); algorithms.put("MD5WITHRSA", PKCSObjectIdentifiers.md5WithRSAEncryption); algorithms.put("SHA1WITHRSAENCRYPTION", PKCSObjectIdentifiers.sha1WithRSAEncryption); algorithms.put("SHA1WITHRSA", PKCSObjectIdentifiers.sha1WithRSAEncryption); algorithms.put("SHA224WITHRSAENCRYPTION", PKCSObjectIdentifiers.sha224WithRSAEncryption); algorithms.put("SHA224WITHRSA", PKCSObjectIdentifiers.sha224WithRSAEncryption); algorithms.put("SHA256WITHRSAENCRYPTION", PKCSObjectIdentifiers.sha256WithRSAEncryption); algorithms.put("SHA256WITHRSA", PKCSObjectIdentifiers.sha256WithRSAEncryption); algorithms.put("SHA384WITHRSAENCRYPTION", PKCSObjectIdentifiers.sha384WithRSAEncryption); algorithms.put("SHA384WITHRSA", PKCSObjectIdentifiers.sha384WithRSAEncryption); algorithms.put("SHA512WITHRSAENCRYPTION", PKCSObjectIdentifiers.sha512WithRSAEncryption); algorithms.put("SHA512WITHRSA", PKCSObjectIdentifiers.sha512WithRSAEncryption); algorithms.put("SHA1WITHRSAANDMGF1", PKCSObjectIdentifiers.id_RSASSA_PSS); algorithms.put("SHA224WITHRSAANDMGF1", PKCSObjectIdentifiers.id_RSASSA_PSS); algorithms.put("SHA256WITHRSAANDMGF1", PKCSObjectIdentifiers.id_RSASSA_PSS); algorithms.put("SHA384WITHRSAANDMGF1", PKCSObjectIdentifiers.id_RSASSA_PSS); algorithms.put("SHA512WITHRSAANDMGF1", PKCSObjectIdentifiers.id_RSASSA_PSS); algorithms.put("RIPEMD160WITHRSAENCRYPTION", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd160); algorithms.put("RIPEMD160WITHRSA", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd160); algorithms.put("RIPEMD128WITHRSAENCRYPTION", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd128); algorithms.put("RIPEMD128WITHRSA", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd128); algorithms.put("RIPEMD256WITHRSAENCRYPTION", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd256); algorithms.put("RIPEMD256WITHRSA", TeleTrusTObjectIdentifiers.rsaSignatureWithripemd256); algorithms.put("SHA1WITHDSA", X9ObjectIdentifiers.id_dsa_with_sha1); algorithms.put("DSAWITHSHA1", X9ObjectIdentifiers.id_dsa_with_sha1); algorithms.put("SHA224WITHDSA", NISTObjectIdentifiers.dsa_with_sha224); algorithms.put("SHA256WITHDSA", NISTObjectIdentifiers.dsa_with_sha256); algorithms.put("SHA384WITHDSA", NISTObjectIdentifiers.dsa_with_sha384); algorithms.put("SHA512WITHDSA", NISTObjectIdentifiers.dsa_with_sha512); algorithms.put("SHA1WITHECDSA", X9ObjectIdentifiers.ecdsa_with_SHA1); algorithms.put("ECDSAWITHSHA1", X9ObjectIdentifiers.ecdsa_with_SHA1); algorithms.put("SHA224WITHECDSA", X9ObjectIdentifiers.ecdsa_with_SHA224); algorithms.put("SHA256WITHECDSA", X9ObjectIdentifiers.ecdsa_with_SHA256); algorithms.put("SHA384WITHECDSA", X9ObjectIdentifiers.ecdsa_with_SHA384); algorithms.put("SHA512WITHECDSA", X9ObjectIdentifiers.ecdsa_with_SHA512); algorithms.put("GOST3411WITHGOST3410", CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_94); algorithms.put("GOST3411WITHGOST3410-94", CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_94); algorithms.put("GOST3411WITHECGOST3410", CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001); algorithms.put("GOST3411WITHECGOST3410-2001", CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001); algorithms.put("GOST3411WITHGOST3410-2001", CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001); // // According to RFC 3279, the ASN.1 encoding SHALL (id-dsa-with-sha1) or MUST (ecdsa-with-SHA*) omit the parameters field. // The parameters field SHALL be NULL for RSA based signature algorithms. // noParams.add(X9ObjectIdentifiers.ecdsa_with_SHA1); noParams.add(X9ObjectIdentifiers.ecdsa_with_SHA224); noParams.add(X9ObjectIdentifiers.ecdsa_with_SHA256); noParams.add(X9ObjectIdentifiers.ecdsa_with_SHA384); noParams.add(X9ObjectIdentifiers.ecdsa_with_SHA512); noParams.add(X9ObjectIdentifiers.id_dsa_with_sha1); noParams.add(NISTObjectIdentifiers.dsa_with_sha224); noParams.add(NISTObjectIdentifiers.dsa_with_sha256); noParams.add(NISTObjectIdentifiers.dsa_with_sha384); noParams.add(NISTObjectIdentifiers.dsa_with_sha512); // // RFC 4491 // noParams.add(CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_94); noParams.add(CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001); // // explicit params // AlgorithmIdentifier sha1AlgId = new AlgorithmIdentifier(OIWObjectIdentifiers.idSHA1, new DERNull()); params.put("SHA1WITHRSAANDMGF1", creatPSSParams(sha1AlgId, 20)); AlgorithmIdentifier sha224AlgId = new AlgorithmIdentifier(NISTObjectIdentifiers.id_sha224, new DERNull()); params.put("SHA224WITHRSAANDMGF1", creatPSSParams(sha224AlgId, 28)); AlgorithmIdentifier sha256AlgId = new AlgorithmIdentifier(NISTObjectIdentifiers.id_sha256, new DERNull()); params.put("SHA256WITHRSAANDMGF1", creatPSSParams(sha256AlgId, 32)); AlgorithmIdentifier sha384AlgId = new AlgorithmIdentifier(NISTObjectIdentifiers.id_sha384, new DERNull()); params.put("SHA384WITHRSAANDMGF1", creatPSSParams(sha384AlgId, 48)); AlgorithmIdentifier sha512AlgId = new AlgorithmIdentifier(NISTObjectIdentifiers.id_sha512, new DERNull()); params.put("SHA512WITHRSAANDMGF1", creatPSSParams(sha512AlgId, 64)); } private static RSASSAPSSparams creatPSSParams(AlgorithmIdentifier hashAlgId, int saltSize) { return new RSASSAPSSparams( hashAlgId, new AlgorithmIdentifier(PKCSObjectIdentifiers.id_mgf1, hashAlgId), new ASN1Integer(saltSize), new ASN1Integer(1)); } static ASN1ObjectIdentifier getAlgorithmOID( String algorithmName) { algorithmName = Strings.toUpperCase(algorithmName); if (algorithms.containsKey(algorithmName)) { return (ASN1ObjectIdentifier)algorithms.get(algorithmName); } return new ASN1ObjectIdentifier(algorithmName); } static AlgorithmIdentifier getSigAlgID( ASN1ObjectIdentifier sigOid, String algorithmName) { if (noParams.contains(sigOid)) { return new AlgorithmIdentifier(sigOid); } algorithmName = Strings.toUpperCase(algorithmName); if (params.containsKey(algorithmName)) { return new AlgorithmIdentifier(sigOid, (ASN1Encodable)params.get(algorithmName)); } else { return new AlgorithmIdentifier(sigOid, new DERNull()); } } static Iterator getAlgNames() { Enumeration e = algorithms.keys(); List l = new ArrayList(); while (e.hasMoreElements()) { l.add(e.nextElement()); } return l.iterator(); } static Signature getSignatureInstance( String algorithm) throws NoSuchAlgorithmException { return Signature.getInstance(algorithm); } static Signature getSignatureInstance( String algorithm, String provider) throws NoSuchProviderException, NoSuchAlgorithmException { if (provider != null) { return Signature.getInstance(algorithm, provider); } else { return Signature.getInstance(algorithm); } } static byte[] calculateSignature( ASN1ObjectIdentifier sigOid, String sigName, PrivateKey key, SecureRandom random, ASN1Encodable object) throws IOException, NoSuchAlgorithmException, InvalidKeyException, SignatureException { Signature sig; if (sigOid == null) { throw new IllegalStateException("no signature algorithm specified"); } sig = X509Util.getSignatureInstance(sigName); if (random != null) { sig.initSign(key); } else { sig.initSign(key); } sig.update(object.toASN1Primitive().getEncoded(ASN1Encoding.DER)); return sig.sign(); } static byte[] calculateSignature( ASN1ObjectIdentifier sigOid, String sigName, String provider, PrivateKey key, SecureRandom random, ASN1Encodable object) throws IOException, NoSuchProviderException, NoSuchAlgorithmException, InvalidKeyException, SignatureException { Signature sig; if (sigOid == null) { throw new IllegalStateException("no signature algorithm specified"); } sig = X509Util.getSignatureInstance(sigName, provider); if (random != null) { sig.initSign(key); } else { sig.initSign(key); } sig.update(object.toASN1Primitive().getEncoded(ASN1Encoding.DER)); return sig.sign(); } static class Implementation { Object engine; Provider provider; Implementation( Object engine, Provider provider) { this.engine = engine; this.provider = provider; } Object getEngine() { return engine; } Provider getProvider() { return provider; } } /** * see if we can find an algorithm (or its alias and what it represents) in * the property table for the given provider. */ static Implementation getImplementation( String baseName, String algorithm, Provider prov) throws NoSuchAlgorithmException { algorithm = Strings.toUpperCase(algorithm); String alias; while ((alias = prov.getProperty("Alg.Alias." + baseName + "." + algorithm)) != null) { algorithm = alias; } String className = prov.getProperty(baseName + "." + algorithm); if (className != null) { try { Class cls; ClassLoader clsLoader = prov.getClass().getClassLoader(); if (clsLoader != null) { cls = clsLoader.loadClass(className); } else { cls = Class.forName(className); } return new Implementation(cls.newInstance(), prov); } catch (ClassNotFoundException e) { throw new IllegalStateException( "algorithm " + algorithm + " in provider " + prov.getName() + " but no class \"" + className + "\" found!"); } catch (Exception e) { throw new IllegalStateException( "algorithm " + algorithm + " in provider " + prov.getName() + " but class \"" + className + "\" inaccessible!"); } } throw new NoSuchAlgorithmException("cannot find implementation " + algorithm + " for provider " + prov.getName()); } /** * return an implementation for a given algorithm/provider. * If the provider is null, we grab the first avalaible who has the required algorithm. */ static Implementation getImplementation( String baseName, String algorithm) throws NoSuchAlgorithmException { Provider[] prov = Security.getProviders(); // // search every provider looking for the algorithm we want. // for (int i = 0; i != prov.length; i++) { // // try case insensitive // Implementation imp = getImplementation(baseName, Strings.toUpperCase(algorithm), prov[i]); if (imp != null) { return imp; } try { imp = getImplementation(baseName, algorithm, prov[i]); } catch (NoSuchAlgorithmException e) { // continue } } throw new NoSuchAlgorithmException("cannot find implementation " + algorithm); } static Provider getProvider(String provider) throws NoSuchProviderException { Provider prov = Security.getProvider(provider); if (prov == null) { throw new NoSuchProviderException("Provider " + provider + " not found"); } return prov; } }
package org.ovirt.engine.ui.uicommonweb.models.volumes; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.ovirt.engine.core.common.action.VdcActionParametersBase; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.action.gluster.CreateGlusterVolumeParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeActionParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeOptionParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeParameters; import org.ovirt.engine.core.common.action.gluster.GlusterVolumeRebalanceParameters; import org.ovirt.engine.core.common.action.gluster.UpdateGlusterVolumeSnapshotConfigParameters; import org.ovirt.engine.core.common.asynctasks.gluster.GlusterAsyncTask; import org.ovirt.engine.core.common.asynctasks.gluster.GlusterTaskType; import org.ovirt.engine.core.common.businessentities.Cluster; import org.ovirt.engine.core.common.businessentities.StoragePool; import org.ovirt.engine.core.common.businessentities.gluster.GlusterBrickEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterStatus; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeOptionEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeSnapshotConfig; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeTaskStatusEntity; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeType; import org.ovirt.engine.core.common.businessentities.gluster.TransportType; import org.ovirt.engine.core.common.interfaces.SearchType; import org.ovirt.engine.core.common.job.JobExecutionStatus; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.core.common.queries.ConfigurationValues; import org.ovirt.engine.core.common.queries.GetConfigurationValueParameters; import org.ovirt.engine.core.common.queries.SearchParameters; import org.ovirt.engine.core.common.queries.VdcQueryReturnValue; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.searchbackend.SearchObjects; import org.ovirt.engine.ui.frontend.AsyncQuery; import org.ovirt.engine.ui.frontend.Frontend; import org.ovirt.engine.ui.frontend.INewAsyncCallback; import org.ovirt.engine.ui.frontend.utils.GlusterVolumeUtils; import org.ovirt.engine.ui.frontend.utils.GlusterVolumeUtils.VolumeStatus; import org.ovirt.engine.ui.uicommonweb.Linq; import org.ovirt.engine.ui.uicommonweb.Linq.IPredicate; import org.ovirt.engine.ui.uicommonweb.UICommand; import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider; import org.ovirt.engine.ui.uicommonweb.help.HelpTag; import org.ovirt.engine.ui.uicommonweb.models.ConfirmationModel; import org.ovirt.engine.ui.uicommonweb.models.EntityModel; import org.ovirt.engine.ui.uicommonweb.models.HasEntity; import org.ovirt.engine.ui.uicommonweb.models.ISupportSystemTreeContext; import org.ovirt.engine.ui.uicommonweb.models.ListWithSimpleDetailsModel; import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemModel; import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemType; import org.ovirt.engine.ui.uicommonweb.models.configure.PermissionListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.GlusterClusterSnapshotConfigModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.GlusterVolumeSnapshotConfigModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.GlusterVolumeSnapshotListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeBrickListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeEventListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeGeneralModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeGeoRepListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeParameterListModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeProfileStatisticsModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeRebalanceStatusModel; import org.ovirt.engine.ui.uicommonweb.models.gluster.VolumeSnapshotOptionModel; import org.ovirt.engine.ui.uicommonweb.place.WebAdminApplicationPlaces; import org.ovirt.engine.ui.uicompat.ConstantsManager; import org.ovirt.engine.ui.uicompat.Event; import org.ovirt.engine.ui.uicompat.EventArgs; import org.ovirt.engine.ui.uicompat.FrontendActionAsyncResult; import org.ovirt.engine.ui.uicompat.FrontendMultipleActionAsyncResult; import org.ovirt.engine.ui.uicompat.IEventListener; import org.ovirt.engine.ui.uicompat.IFrontendActionAsyncCallback; import org.ovirt.engine.ui.uicompat.IFrontendMultipleActionAsyncCallback; import org.ovirt.engine.ui.uicompat.UIConstants; import com.google.inject.Inject; public class VolumeListModel extends ListWithSimpleDetailsModel<Void, GlusterVolumeEntity> implements ISupportSystemTreeContext { public static final Integer REPLICATE_COUNT_DEFAULT = 2; public static final Integer STRIPE_COUNT_DEFAULT = 4; private UICommand newVolumeCommand; public UICommand getNewVolumeCommand() { return newVolumeCommand; } private void setNewVolumeCommand(UICommand value) { newVolumeCommand = value; } private UICommand removeVolumeCommand; public UICommand getRemoveVolumeCommand() { return removeVolumeCommand; } private void setRemoveVolumeCommand(UICommand value) { removeVolumeCommand = value; } private UICommand startCommand; private UICommand stopCommand; private UICommand startRebalanceCommand; private UICommand stopRebalanceCommand; private UICommand optimizeForVirtStoreCommand; private UICommand startVolumeProfilingCommand; private UICommand showVolumeProfileDetailsCommand; private UICommand stopVolumeProfilingCommand; private UICommand configureClusterSnapshotOptionsCommand; private UICommand configureVolumeSnapshotOptionsCommand; private UICommand createSnapshotCommand; private UICommand editSnapshotScheduleCommand; private UICommand newGeoRepSessionCommand; private String glusterMetaVolumeName; public UICommand getNewGeoRepSessionCommand() { return newGeoRepSessionCommand; } public void setNewGeoRepSessionCommand(UICommand newGeoRepSessionCommand) { this.newGeoRepSessionCommand = newGeoRepSessionCommand; } public UICommand getStartRebalanceCommand() { return startRebalanceCommand; } public void setStartRebalanceCommand(UICommand startRebalanceCommand) { this.startRebalanceCommand = startRebalanceCommand; } public UICommand getStopRebalanceCommand() { return stopRebalanceCommand; } public void setStopRebalanceCommand(UICommand stopRebalanceCommand) { this.stopRebalanceCommand = stopRebalanceCommand; } private UICommand statusRebalanceCommand; public UICommand getStatusRebalanceCommand() { return statusRebalanceCommand; } public void setStatusRebalanceCommand(UICommand statusRebalanceCommand) { this.statusRebalanceCommand = statusRebalanceCommand; } public UICommand getStartCommand() { return startCommand; } public void setStartCommand(UICommand startCommand) { this.startCommand = startCommand; } public UICommand getStopCommand() { return stopCommand; } public void setStopCommand(UICommand stopCommand) { this.stopCommand = stopCommand; } public UICommand getOptimizeForVirtStoreCommand() { return optimizeForVirtStoreCommand; } public void setOptimizeForVirtStoreCommand(UICommand optimizeForVirtStoreCommand) { this.optimizeForVirtStoreCommand = optimizeForVirtStoreCommand; } private final VolumeBrickListModel brickListModel; public VolumeBrickListModel getBrickListModel() { return this.brickListModel; } private final VolumeGeoRepListModel geoRepListModel; public VolumeGeoRepListModel getGeoRepListModel() { return geoRepListModel; } private final GlusterVolumeSnapshotListModel snapshotListModel; public GlusterVolumeSnapshotListModel getSnapshotListModel() { return snapshotListModel; } public UICommand getStartVolumeProfilingCommand() { return startVolumeProfilingCommand; } public void setStartVolumeProfilingCommand(UICommand startVolumeProfilingCommand) { this.startVolumeProfilingCommand = startVolumeProfilingCommand; } public UICommand getShowVolumeProfileDetailsCommand() { return showVolumeProfileDetailsCommand; } public void setShowVolumeProfileDetailsCommand(UICommand showVolumeProfileDetailsCommand) { this.showVolumeProfileDetailsCommand = showVolumeProfileDetailsCommand; } public UICommand getStopVolumeProfilingCommand() { return stopVolumeProfilingCommand; } public void setStopVolumeProfilingCommand(UICommand stopVolumeProfilingCommand) { this.stopVolumeProfilingCommand = stopVolumeProfilingCommand; } public UICommand getConfigureClusterSnapshotOptionsCommand() { return this.configureClusterSnapshotOptionsCommand; } public void setConfigureClusterSnapshotOptionsCommand(UICommand command) { this.configureClusterSnapshotOptionsCommand = command; } public UICommand getConfigureVolumeSnapshotOptionsCommand() { return this.configureVolumeSnapshotOptionsCommand; } public void setConfigureVolumeSnapshotOptionsCommand(UICommand command) { this.configureVolumeSnapshotOptionsCommand = command; } public UICommand getCreateSnapshotCommand() { return this.createSnapshotCommand; } public void setCreateSnapshotCommand(UICommand command) { this.createSnapshotCommand = command; } public UICommand getEditSnapshotScheduleCommand() { return this.editSnapshotScheduleCommand; } public void setEditSnapshotScheduleCommand(UICommand command) { this.editSnapshotScheduleCommand = command; } @Inject public VolumeListModel(final VolumeBrickListModel volumeBrickListModel, final VolumeGeneralModel volumeGeneralModel, final VolumeParameterListModel volumeParameterListModel, final PermissionListModel<GlusterVolumeEntity> permissionListModel, final VolumeEventListModel volumeEventListModel, final VolumeGeoRepListModel geoRepListModel, final GlusterVolumeSnapshotListModel snapshotListModel) { this.brickListModel = volumeBrickListModel; this.geoRepListModel = geoRepListModel; this.snapshotListModel = snapshotListModel; setDetailList(volumeGeneralModel, volumeParameterListModel, permissionListModel, volumeEventListModel); setTitle(ConstantsManager.getInstance().getConstants().volumesTitle()); setApplicationPlace(WebAdminApplicationPlaces.volumeMainTabPlace); setDefaultSearchString("Volumes:"); //$NON-NLS-1$ setSearchString(getDefaultSearchString()); setSearchObjects(new String[] { SearchObjects.GLUSTER_VOLUME_OBJ_NAME, SearchObjects.GLUSTER_VOLUME_PLU_OBJ_NAME }); setAvailableInModes(ApplicationMode.GlusterOnly); setNewVolumeCommand(new UICommand("Create Volume", this)); //$NON-NLS-1$ setRemoveVolumeCommand(new UICommand("Remove", this)); //$NON-NLS-1$ setStartCommand(new UICommand("Start", this)); //$NON-NLS-1$ setStopCommand(new UICommand("Stop", this)); //$NON-NLS-1$ setStartRebalanceCommand(new UICommand("StartRebalance", this)); //$NON-NLS-1$ setStopRebalanceCommand(new UICommand("StopRebalace", this)); //$NON-NLS-1$ setStatusRebalanceCommand(new UICommand("StatusRebalance", this)); //$NON-NLS-1$ setStartVolumeProfilingCommand(new UICommand("startProfiling", this));//$NON-NLS-1$ setShowVolumeProfileDetailsCommand(new UICommand("showProfileDetails", this));//$NON-NLS-1$ setStopVolumeProfilingCommand(new UICommand("stopProfiling", this));//$NON-NLS-1$ setOptimizeForVirtStoreCommand(new UICommand("OptimizeForVirtStore", this)); //$NON-NLS-1$ setConfigureClusterSnapshotOptionsCommand(new UICommand("configureClusterSnapshotOptions", this)); //$NON-NLS-1$ setConfigureVolumeSnapshotOptionsCommand(new UICommand("configureVolumeSnapshotOptions", this)); //$NON-NLS-1$ setCreateSnapshotCommand(new UICommand("createSnapshot", this)); //$NON-NLS-1$ setEditSnapshotScheduleCommand(new UICommand("editSnapshotSchedule", this)); //$NON-NLS-1$ setNewGeoRepSessionCommand(new UICommand("createGeoRepSession", this));//$NON-NLS-1$ getRemoveVolumeCommand().setIsExecutionAllowed(false); getStartCommand().setIsExecutionAllowed(false); getStopCommand().setIsExecutionAllowed(false); getStartRebalanceCommand().setIsExecutionAllowed(false); getStopRebalanceCommand().setIsExecutionAllowed(false); getStartVolumeProfilingCommand().setIsExecutionAllowed(false); getStopVolumeProfilingCommand().setIsExecutionAllowed(false); getShowVolumeProfileDetailsCommand().setIsExecutionAllowed(true); getSearchNextPageCommand().setIsAvailable(true); getSearchPreviousPageCommand().setIsAvailable(true); // Get the meta volume name AsyncQuery aQuery = new AsyncQuery(); aQuery.setModel(this); aQuery.asyncCallback = new INewAsyncCallback() { @Override public void onSuccess(Object model, Object returnValue) { glusterMetaVolumeName = (String) returnValue; } }; AsyncDataProvider.getInstance() .getConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterMetaVolumeName, AsyncDataProvider.getInstance().getDefaultConfigurationVersion()), aQuery); } private void setDetailList(final VolumeGeneralModel volumeGeneralModel, final VolumeParameterListModel volumeParameterListModel, final PermissionListModel<GlusterVolumeEntity> permissionListModel, final VolumeEventListModel volumeEventListModel) { List<HasEntity<GlusterVolumeEntity>> list = new ArrayList<>(); list.add(volumeGeneralModel); list.add(volumeParameterListModel); list.add(getBrickListModel()); list.add(getGeoRepListModel()); list.add(permissionListModel); list.add(volumeEventListModel); list.add(getSnapshotListModel()); setDetailModels(list); } private void newVolume() { if (getWindow() != null) { return; } VolumeModel volumeModel = new VolumeModel(); volumeModel.setHelpTag(HelpTag.new_volume); volumeModel.setHashName("new_volume"); //$NON-NLS-1$ volumeModel.setTitle(ConstantsManager.getInstance().getConstants().newVolumeTitle()); setWindow(volumeModel); AsyncQuery _asyncQuery = new AsyncQuery(); _asyncQuery.setModel(this); _asyncQuery.asyncCallback = new INewAsyncCallback() { @Override public void onSuccess(Object model, Object result) { VolumeListModel volumeListModel = (VolumeListModel) model; VolumeModel innerVolumeModel = (VolumeModel) volumeListModel.getWindow(); ArrayList<StoragePool> dataCenters = (ArrayList<StoragePool>) result; final UIConstants constants = ConstantsManager.getInstance().getConstants(); if (volumeListModel.getSystemTreeSelectedItem() != null) { switch (volumeListModel.getSystemTreeSelectedItem().getType()) { case Volumes: case Cluster: case Cluster_Gluster: Cluster cluster = (Cluster) volumeListModel.getSystemTreeSelectedItem().getEntity(); for (StoragePool dc : dataCenters) { if (dc.getId().equals(cluster.getStoragePoolId())) { innerVolumeModel.getDataCenter() .setItems(new ArrayList<>(Arrays.asList(new StoragePool[]{dc}))); innerVolumeModel.getDataCenter().setSelectedItem(dc); break; } } innerVolumeModel.getDataCenter().setIsChangeable(false); innerVolumeModel.getDataCenter().setChangeProhibitionReason( constants.cannotChangeDCInTreeContext()); innerVolumeModel.getCluster().setItems(Arrays.asList(cluster)); innerVolumeModel.getCluster().setSelectedItem(cluster); innerVolumeModel.getCluster().setIsChangeable(false); innerVolumeModel.getCluster().setChangeProhibitionReason( constants.cannotChangeClusterInTreeContext()); break; case Clusters: case DataCenter: StoragePool selectDataCenter = (StoragePool) volumeListModel.getSystemTreeSelectedItem().getEntity(); innerVolumeModel.getDataCenter() .setItems(new ArrayList<>(Arrays.asList(new StoragePool[]{selectDataCenter}))); innerVolumeModel.getDataCenter().setSelectedItem(selectDataCenter); innerVolumeModel.getDataCenter().setIsChangeable(false); innerVolumeModel.getDataCenter().setChangeProhibitionReason( constants.cannotChangeDCInTreeContext()); break; default: innerVolumeModel.getDataCenter().setItems(dataCenters); innerVolumeModel.getDataCenter().setSelectedItem(Linq.firstOrNull(dataCenters)); break; } } else { innerVolumeModel.getDataCenter().setItems(dataCenters); innerVolumeModel.getDataCenter().setSelectedItem(Linq.firstOrNull(dataCenters)); } UICommand command = UICommand.createDefaultOkUiCommand("onCreateVolume", volumeListModel); //$NON-NLS-1$ innerVolumeModel.getCommands().add(command); innerVolumeModel.getCommands().add(UICommand.createCancelUiCommand("Cancel", volumeListModel)); //$NON-NLS-1$ } }; AsyncDataProvider.getInstance().getDataCenterByClusterServiceList(_asyncQuery, false, true); } private boolean isMetaVolumeInList(List<GlusterVolumeEntity> volumes) { for (GlusterVolumeEntity volume : volumes) { if (volume.getName().equals(glusterMetaVolumeName)) { return true; } } return false; } private void removeVolume() { if (getWindow() != null) { return; } ConfirmationModel model = new ConfirmationModel(); setWindow(model); model.setTitle(ConstantsManager.getInstance().getConstants().removeVolumesTitle()); model.setHelpTag(HelpTag.remove_volume); model.setHashName("remove_volume"); //$NON-NLS-1$ if (isMetaVolumeInList(Linq.<GlusterVolumeEntity> cast(getSelectedItems()))) { model.setNote(ConstantsManager.getInstance().getConstants().removeMetaVolumeWarning()); } else { model.setNote(ConstantsManager.getInstance().getConstants().removeVolumesWarning()); } if (getSelectedItems() == null) { return; } ArrayList<String> list = new ArrayList<>(); for (GlusterVolumeEntity item : Linq.<GlusterVolumeEntity> cast(getSelectedItems())) { list.add(item.getName()); } model.setItems(list); UICommand tempVar = UICommand.createDefaultOkUiCommand("OnRemove", this); //$NON-NLS-1$ model.getCommands().add(tempVar); UICommand tempVar2 = UICommand.createCancelUiCommand("Cancel", this); //$NON-NLS-1$ model.getCommands().add(tempVar2); } private void onRemoveVolume() { if (getWindow() == null) { return; } ConfirmationModel model = (ConfirmationModel) getWindow(); if (model.getProgress() != null) { return; } if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new ArrayList<>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } model.startProgress(); Frontend.getInstance().runMultipleAction(VdcActionType.DeleteGlusterVolume, list, new IFrontendMultipleActionAsyncCallback() { @Override public void executed(FrontendMultipleActionAsyncResult result) { ConfirmationModel localModel = (ConfirmationModel) result.getState(); localModel.stopProgress(); cancel(); } }, model); } @Override protected void syncSearch() { SearchParameters tempVar = new SearchParameters(getSearchString(), SearchType.GlusterVolume, isCaseSensitiveSearch()); tempVar.setMaxCount(getSearchPageSize()); super.syncSearch(VdcQueryType.Search, tempVar); } @Override protected void onSelectedItemChanged() { super.onSelectedItemChanged(); updateActionAvailability(); GlusterVolumeEntity selectedVolume = provideDetailModelEntity(getSelectedItem()); getBrickListModel().setVolumeEntity(selectedVolume); getGeoRepListModel().setEntity(selectedVolume); getSnapshotListModel().setEntity(selectedVolume); } @Override protected void selectedItemsChanged() { super.onSelectedItemChanged(); updateActionAvailability(); } private void updateActionAvailability() { boolean allowStart = true; boolean allowStop = true; boolean allowRemove = true; boolean allowStartRebalance = true; boolean allowStopRebalance = true; boolean allowStatusRebalance = true; boolean allowOptimize = true; boolean allowStartProfiling = false; boolean allowStopProfiling = false; boolean allowProfileStatisticsDetails = false; boolean allowConfigureClusterSnapshotOptions = true; boolean allowConfigureVolumeSnapshotOptions = false; boolean allowCreateSnapshot = false; boolean allowEditSnapshotSchedule = false; boolean allowCreateGeoRepSession = false; if (getSelectedItems() == null || getSelectedItems().size() == 0) { allowStart = false; allowStop = false; allowRemove = false; allowStartRebalance = false; allowStopRebalance = false; allowStatusRebalance = false; allowOptimize = false; } else { List<GlusterVolumeEntity> list = Linq.<GlusterVolumeEntity> cast(getSelectedItems()); allowStartProfiling = isStartProfileAvailable(list); allowStopProfiling = isStopProfileAvailable(list); for (GlusterVolumeEntity volume : list) { if (volume.getStatus() == GlusterStatus.UP) { VolumeStatus status = GlusterVolumeUtils.getVolumeStatus(volume); allowStart = status == VolumeStatus.ALL_BRICKS_DOWN || status == VolumeStatus.SOME_BRICKS_DOWN; allowRemove = false; if (!volume.getVolumeType().isDistributedType()) { allowStartRebalance = false; } } else if (volume.getStatus() == GlusterStatus.DOWN) { allowStop = false; allowStartRebalance = false; } GlusterAsyncTask asyncTask = volume.getAsyncTask(); if (asyncTask != null) { allowStartRebalance = allowStartRebalance && ( asyncTask.getStatus() == null ? asyncTask.getJobStatus() != JobExecutionStatus.STARTED : asyncTask.getStatus() != JobExecutionStatus.STARTED); } } if (list.size() == 1) { GlusterVolumeEntity volumeEntity = list.get(0); GlusterAsyncTask asyncTask = volumeEntity.getAsyncTask(); allowStopRebalance = volumeEntity.getStatus() == GlusterStatus.UP && asyncTask != null && asyncTask.getType() == GlusterTaskType.REBALANCE && asyncTask.getStatus() == JobExecutionStatus.STARTED; allowConfigureVolumeSnapshotOptions = volumeEntity.getStatus() == GlusterStatus.UP; allowCreateGeoRepSession = volumeEntity.getStatus() == GlusterStatus.UP; allowCreateSnapshot = isCreateSnapshotAvailable(volumeEntity); } else { allowStopRebalance = false; } allowStatusRebalance = getRebalanceStatusAvailability(getSelectedItems()); allowProfileStatisticsDetails = getProfileStatisticsAvailability(list); allowEditSnapshotSchedule = isEditSnapshotScheduleAvailable(list); } getStartCommand().setIsExecutionAllowed(allowStart); getStopCommand().setIsExecutionAllowed(allowStop); getRemoveVolumeCommand().setIsExecutionAllowed(allowRemove); getStartRebalanceCommand().setIsExecutionAllowed(allowStartRebalance); getStopRebalanceCommand().setIsExecutionAllowed(allowStopRebalance); getStatusRebalanceCommand().setIsExecutionAllowed(allowStatusRebalance); getOptimizeForVirtStoreCommand().setIsExecutionAllowed(allowOptimize); getConfigureClusterSnapshotOptionsCommand().setIsExecutionAllowed(allowConfigureClusterSnapshotOptions); getConfigureVolumeSnapshotOptionsCommand().setIsExecutionAllowed(allowConfigureVolumeSnapshotOptions); getCreateSnapshotCommand().setIsExecutionAllowed(allowCreateSnapshot); getEditSnapshotScheduleCommand().setIsExecutionAllowed(allowEditSnapshotSchedule); // System tree dependent actions. boolean isAvailable = !(getSystemTreeSelectedItem() != null && getSystemTreeSelectedItem().getType() == SystemTreeItemType.Volume); getNewVolumeCommand().setIsAvailable(isAvailable); getRemoveVolumeCommand().setIsAvailable(isAvailable); getStartVolumeProfilingCommand().setIsExecutionAllowed(allowStartProfiling); getStopVolumeProfilingCommand().setIsExecutionAllowed(allowStopProfiling); getShowVolumeProfileDetailsCommand().setIsExecutionAllowed(allowProfileStatisticsDetails); getNewGeoRepSessionCommand().setIsExecutionAllowed(allowCreateGeoRepSession); } private boolean isCreateSnapshotAvailable(GlusterVolumeEntity volume) { if (volume.getStatus() == GlusterStatus.UP) { List<GlusterBrickEntity> bricks = volume.getBricks(); for (GlusterBrickEntity brick : bricks) { if (brick.getStatus() != GlusterStatus.UP) { return false; } } return true; } else { return false; } } private boolean isEditSnapshotScheduleAvailable(List<GlusterVolumeEntity> list) { return (list.size() == 1) && (list.get(0).getStatus() == GlusterStatus.UP) && list.get(0) .getSnapshotScheduled(); } private boolean isStopProfileAvailable(List<GlusterVolumeEntity> list) { if (getSelectedItems().size() == 0) { return false; } else { for (GlusterVolumeEntity volumeEntity : list) { if (volumeEntity.getStatus() == GlusterStatus.DOWN) { return false; } if ((volumeEntity.getOptionValue("diagnostics.latency-measurement") == null)|| !volumeEntity.getOptionValue("diagnostics.latency-measurement").equals("on")) {//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$ return false; } } return true; } } private boolean isStartProfileAvailable(List<GlusterVolumeEntity> list) { if (getSelectedItems().size() == 0) { return false; } else { for (GlusterVolumeEntity volumeEntity : list) { if (volumeEntity.getStatus() == GlusterStatus.DOWN) { return false; } if ((volumeEntity.getOptionValue("diagnostics.latency-measurement") != null) && volumeEntity.getOptionValue("diagnostics.latency-measurement").equals("on")) {//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$ return false; } } return true; } } private boolean getRebalanceStatusAvailability(List<GlusterVolumeEntity> selectedVolumes) { if (selectedVolumes.size() == 1) { GlusterVolumeEntity selectedVolume = selectedVolumes.get(0); if (selectedVolume.getStatus() == GlusterStatus.UP && selectedVolume.getVolumeType().isDistributedType() && selectedVolume.getBricks().size() > 1) { return true; } } return false; } private boolean getProfileStatisticsAvailability(List<GlusterVolumeEntity> selectedVolumes) { if(selectedVolumes.size() == 1) { GlusterVolumeEntity selectedVolume = selectedVolumes.get(0); if(selectedVolume.getStatus() == GlusterStatus.UP) { return true; } } return false; } private void cancel() { setWindow(null); } @Override public void executeCommand(UICommand command) { super.executeCommand(command); if (command.equals(getNewVolumeCommand())) { newVolume(); } else if (command.equals(getRemoveVolumeCommand())) { removeVolume(); } else if(command.getName().equals("closeConfirmationWindow")) {//$NON-NLS-1$ closeConfirmationWindow(); } else if (command.getName().equals("Cancel")) { //$NON-NLS-1$ cancel(); } else if (command.getName().equals("onCreateVolume")) { //$NON-NLS-1$ onCreateVolume(); } else if (command.equals(getStartCommand())) { start(); } else if (command.equals(getStopCommand())) { stop(); } else if (command.equals(getStartRebalanceCommand())) { startRebalance(); } else if (command.equals(getStopRebalanceCommand())) { stopRebalance(); } else if (command.equals(getNewGeoRepSessionCommand())) { getGeoRepListModel().getNewSessionCommand().execute(); } else if (command.getName().equals("onStopRebalance")) { //$NON-NLS-1$ onStopRebalance(); } else if (command.equals(getStatusRebalanceCommand())) { showRebalanceStatus(); } else if (command.getName().equals("CancelConfirmation")) { //$NON-NLS-1$ setConfirmWindow(null); } else if (command.getName().equals("CancelRebalanceStatus")) {//$NON-NLS-1$ cancelRebalanceStatus(); }else if (command.equals(getOptimizeForVirtStoreCommand())) { optimizeForVirtStore(); } else if (command.getName().equals("onStop")) {//$NON-NLS-1$ onStop(); } else if (command.getName().equals("OnRemove")) { //$NON-NLS-1$ onRemoveVolume(); } else if(command.getName().equals("stop_rebalance_from_status")) {//$NON-NLS-1$ stopRebalance(); } else if(command.equals(getStartVolumeProfilingCommand()) || command.getName().equals("startProfiling")) {//$NON-NLS-1$ startVolumeProfiling(); } else if(command.equals(getStopVolumeProfilingCommand()) || command.getName().equals("stopProfiling")) {//$NON-NLS-1$ stopVolumeProfiling(); } else if(command.equals(getShowVolumeProfileDetailsCommand()) || command.getName().equals("showProfileDetails")) {//$NON-NLS-1$ showVolumeProfiling(); }else if(command.getName().equalsIgnoreCase("closeProfileStats")) {//$NON-NLS-1$ setWindow(null); } else if(command.getName().equalsIgnoreCase("CancelOptimizeForVirtStore")) {//$NON-NLS-1$ setConfirmWindow(null); } else if (command.getName().equalsIgnoreCase("ConfirmOptimiseForVirtStore")) {//$NON-NLS-1$ List<GlusterVolumeEntity> selectedVolumes = new ArrayList<>(); for(Object selectedVolume : getSelectedItems()) { selectedVolumes.add((GlusterVolumeEntity) selectedVolume); } optimizeVolumesForVirtStore(selectedVolumes); } else if (command.equals(getConfigureClusterSnapshotOptionsCommand())) { configureClusterSnapshotOptions(); } else if (command.getName().equalsIgnoreCase("confirmConfigureClusterSnapshotOptions")) {//$NON-NLS-1$ confirmConfigureClusterSnapshotOptions(); } else if (command.getName().equalsIgnoreCase("onConfigureClusterSnapshotOptions")) {//$NON-NLS-1$ onConfigureClusterSnapshotOptions(); } else if (command.equals(getConfigureVolumeSnapshotOptionsCommand())) { configureVolumeSnapshotOptions(); } else if (command.getName().equalsIgnoreCase("confirmConfigureVolumeSnapshotOptions")) {//$NON-NLS-1$ confirmConfigureVolumeSnapshotOptions(); } else if (command.getName().equalsIgnoreCase("onConfigureVolumeSnapshotOptions")) {//$NON-NLS-1$ onConfigureVolumeSnapshotOptions(); } else if (command.equals(getCreateSnapshotCommand())) { getSnapshotListModel().getCreateSnapshotCommand().execute(); } else if (command.equals(getEditSnapshotScheduleCommand())) { getSnapshotListModel().getEditSnapshotScheduleCommand().execute(); } else if (command.getName().equals("startVolumeWithForceOption")) {//$NON-NLS-1$ prepareForStartVolume(false); } } private void startVolumeProfiling() { if (getSelectedItems() == null) { return; } List<GlusterVolumeEntity> selectedVolumesList = Linq.<GlusterVolumeEntity> cast(getSelectedItems()); ArrayList<VdcActionParametersBase> parameters = new ArrayList<>(); for (GlusterVolumeEntity currentSelectedVolume : selectedVolumesList) { GlusterVolumeParameters parameter = new GlusterVolumeParameters(currentSelectedVolume.getId()); parameters.add(parameter); } Frontend.getInstance().runMultipleAction(VdcActionType.StartGlusterVolumeProfile, parameters); } private void stopVolumeProfiling() { if (getSelectedItems() == null) { return; } List<GlusterVolumeEntity> selectedVolumesList = Linq.<GlusterVolumeEntity> cast(getSelectedItems()); ArrayList<VdcActionParametersBase> parameters = new ArrayList<>(); for (GlusterVolumeEntity currentSelectedVolume : selectedVolumesList) { GlusterVolumeParameters parameter = new GlusterVolumeParameters(currentSelectedVolume.getId()); parameters.add(parameter); } Frontend.getInstance().runMultipleAction(VdcActionType.StopGlusterVolumeProfile, parameters); } private void closeConfirmationWindow() { if(getConfirmWindow() == null) { return; } getConfirmWindow().stopProgress(); setConfirmWindow(null); } private void startRebalance() { if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new ArrayList<>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeRebalanceParameters(volume.getId(), false, false)); } Frontend.getInstance().runMultipleAction(VdcActionType.StartRebalanceGlusterVolume, list, null, true, true); } private void stopRebalance() { if (getSelectedItem() == null) { return; } ConfirmationModel model = new ConfirmationModel(); GlusterVolumeEntity volumeEntity = getSelectedItem(); setConfirmWindow(model); model.setTitle(ConstantsManager.getInstance().getConstants().confirmStopVolumeRebalanceTitle()); model.setHelpTag(HelpTag.volume_rebalance_stop); model.setHashName("volume_rebalance_stop"); //$NON-NLS-1$ model.setMessage(ConstantsManager.getInstance().getMessages().confirmStopVolumeRebalance( volumeEntity.getName())); UICommand okCommand = UICommand.createDefaultOkUiCommand("onStopRebalance", this); //$NON-NLS-1$ model.getCommands().add(okCommand); UICommand cancelCommand = UICommand.createCancelUiCommand("CancelConfirmation", this); //$NON-NLS-1$ model.getCommands().add(cancelCommand); } private void onStopRebalance() { ConfirmationModel model = (ConfirmationModel) getConfirmWindow(); if (model.getProgress() != null) { return; } if (getSelectedItems() == null) { return; } model.startProgress(); final GlusterVolumeEntity volumeEntity = getSelectedItem(); GlusterVolumeRebalanceParameters param = new GlusterVolumeRebalanceParameters(volumeEntity.getId(), false, false); Frontend.getInstance().runAction(VdcActionType.StopRebalanceGlusterVolume, param, new IFrontendActionAsyncCallback() { @Override public void executed(FrontendActionAsyncResult result) { ConfirmationModel localModel = (ConfirmationModel) getConfirmWindow(); localModel.stopProgress(); setConfirmWindow(null); if (result.getReturnValue().getSucceeded()) { showRebalanceStatus(); } } }); } private void showRebalanceStatus() { if (getSelectedItem() == null) { return; } final ConfirmationModel cModel = new ConfirmationModel(); final GlusterVolumeEntity volumeEntity = getSelectedItem(); setConfirmWindow(cModel); cModel.setTitle(ConstantsManager.getInstance().getConstants().rebalanceStatusTitle()); cModel.startProgress(ConstantsManager.getInstance().getConstants().fetchingDataMessage());//$NON-NLS-1$ cModel.setHelpTag(HelpTag.volume_rebalance_status); cModel.setHashName("volume_rebalance_status"); //$NON-NLS-1$ final UICommand rebalanceStatusOk = new UICommand("closeConfirmationWindow", VolumeListModel.this);//$NON-NLS-1$ rebalanceStatusOk.setTitle(ConstantsManager.getInstance().getConstants().ok()); rebalanceStatusOk.setIsCancel(true); cModel.getCommands().add(rebalanceStatusOk); AsyncDataProvider.getInstance().getGlusterRebalanceStatus(new AsyncQuery(this, new INewAsyncCallback() { @Override public void onSuccess(Object model, Object returnValue) { cModel.stopProgress(); VdcQueryReturnValue vdcValue = (VdcQueryReturnValue) returnValue; GlusterVolumeTaskStatusEntity rebalanceStatusEntity =vdcValue.getReturnValue(); if ((rebalanceStatusEntity == null) || !vdcValue.getSucceeded()) { cModel.setMessage(ConstantsManager.getInstance().getMessages().rebalanceStatusFailed(volumeEntity.getName())); } else { setConfirmWindow(null); if (getWindow() == null) { VolumeRebalanceStatusModel rebalanceStatusModel = new VolumeRebalanceStatusModel(volumeEntity); rebalanceStatusModel.setTitle(ConstantsManager.getInstance() .getConstants() .volumeRebalanceStatusTitle()); setWindow(rebalanceStatusModel); rebalanceStatusModel.setHelpTag(HelpTag.volume_rebalance_status); rebalanceStatusModel.setHashName("volume_rebalance_status"); //$NON-NLS-1$ rebalanceStatusModel.getVolume().setEntity(volumeEntity.getName()); rebalanceStatusModel.getCluster().setEntity(volumeEntity.getClusterName()); UICommand stopRebalanceFromStatus = new UICommand("stop_rebalance_from_status", VolumeListModel.this);//$NON-NLS-1$ stopRebalanceFromStatus.setTitle(ConstantsManager.getInstance().getConstants().stopRebalance()); rebalanceStatusModel.getCommands().add(stopRebalanceFromStatus); rebalanceStatusModel.setStopReblanceFromStatus(stopRebalanceFromStatus); UICommand cancelRebalance = new UICommand("CancelRebalanceStatus", VolumeListModel.this);//$NON-NLS-1$ cancelRebalance.setTitle(ConstantsManager.getInstance().getConstants().close()); cancelRebalance.setIsCancel(true); rebalanceStatusModel.getCommands().add(cancelRebalance); rebalanceStatusModel.showStatus(rebalanceStatusEntity); }else { VolumeRebalanceStatusModel statusModel = (VolumeRebalanceStatusModel) getWindow(); statusModel.getCommands().get(0).setIsExecutionAllowed(false); statusModel.showStatus(rebalanceStatusEntity); } } } }), volumeEntity.getClusterId(), volumeEntity.getId()); } private void showVolumeProfiling() { if(getSelectedItem() == null || getWindow()!= null) { return; } GlusterVolumeEntity selectedVolume = getSelectedItem(); VolumeProfileStatisticsModel profileStatsModel = new VolumeProfileStatisticsModel(selectedVolume.getClusterId(), selectedVolume.getId(), selectedVolume.getName()); setWindow(profileStatsModel); setHelpTag(HelpTag.volume_profile_statistics); setHashName("volume_profile_statistics"); //$NON-NLS-1$ profileStatsModel.startProgress(ConstantsManager.getInstance().getConstants().fetchingDataMessage());//$NON-NLS-1$ UICommand closeProfilingStats = new UICommand("closeProfileStats", VolumeListModel.this);//$NON-NLS-1$ closeProfilingStats.setTitle(ConstantsManager.getInstance().getConstants().close()); closeProfilingStats.setIsCancel(true); profileStatsModel.getCommands().add(closeProfilingStats); profileStatsModel.queryBackend(true); profileStatsModel.queryBackend(false); } private void cancelRebalanceStatus() { if (getWindow() == null) { return; } ((VolumeRebalanceStatusModel)getWindow()).cancelRefresh(); cancel(); } private void optimizeForVirtStore() { UIConstants constants = ConstantsManager.getInstance().getConstants(); if (getSelectedItems() == null || getSelectedItems().size() == 0) { return; } ArrayList<GlusterVolumeEntity> volumesForOptimiseForVirtStore = new ArrayList<>(); Boolean isDiscouragedVolumePresent = false; StringBuilder discouragedConfigVolumeNamesBuilder = new StringBuilder(); discouragedConfigVolumeNamesBuilder.append(constants.optimiseForVirtStoreWarning()); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; volumesForOptimiseForVirtStore.add(volume); if(volume.getReplicaCount() != 3) { discouragedConfigVolumeNamesBuilder.append(volume.getName() + "\n");//$NON-NLS-1$ isDiscouragedVolumePresent = true; } } discouragedConfigVolumeNamesBuilder.append(constants.optimiseForVirtStoreContinueMessage()); if(isDiscouragedVolumePresent) { ConfirmationModel cModel = new ConfirmationModel(); cModel.setMessage(discouragedConfigVolumeNamesBuilder.toString()); cModel.setTitle(ConstantsManager.getInstance().getConstants().optimiseForVirtStoreTitle()); UICommand cancelOptimiseVirtStoreCommand = new UICommand("CancelOptimizeForVirtStore", this);//$NON-NLS-1$ cancelOptimiseVirtStoreCommand.setTitle(constants.doNotOptimiseForVirtStore()); cancelOptimiseVirtStoreCommand.setIsCancel(true); cModel.getCommands().add(cancelOptimiseVirtStoreCommand); UICommand confirmOptimiseForVirtStoreCommand = new UICommand("ConfirmOptimiseForVirtStore", this);//$NON-NLS-1$ confirmOptimiseForVirtStoreCommand.setTitle(constants.continueOptimiseForVirtStore()); confirmOptimiseForVirtStoreCommand.setIsDefault(true); cModel.getCommands().add(confirmOptimiseForVirtStoreCommand); setConfirmWindow(cModel); } else { optimizeVolumesForVirtStore(volumesForOptimiseForVirtStore); } } private void optimizeVolumesForVirtStore(final List<GlusterVolumeEntity> volumeList) { if(getConfirmWindow() != null) { setConfirmWindow(null); } AsyncQuery aQuery = new AsyncQuery(); aQuery.setModel(this); aQuery.asyncCallback = new INewAsyncCallback() { @Override public void onSuccess(Object model, final Object result) { AsyncQuery aQueryInner = new AsyncQuery(); aQueryInner.setModel(this); aQueryInner.asyncCallback = new INewAsyncCallback() { @Override public void onSuccess(Object modelInner, final Object resultInner) { AsyncQuery aQueryInner1 = new AsyncQuery(); aQueryInner1.setModel(this); aQueryInner1.asyncCallback = new INewAsyncCallback() { @Override public void onSuccess(Object modelInner1, Object resultInner1) { String optionGroupVirt = (String) result; String optionOwnerUserVirt = (String) resultInner; String optionOwnerGroupVirt = (String) resultInner1; ArrayList<VdcActionParametersBase> list = new ArrayList<>(); for (GlusterVolumeEntity volume : volumeList) { Guid volumeId = volume.getId(); list.add(new GlusterVolumeOptionParameters(getOption(volumeId, "group", optionGroupVirt)));//$NON-NLS-1$ list.add(new GlusterVolumeOptionParameters(getOption(volumeId, "storage.owner-uid", optionOwnerUserVirt)));//$NON-NLS-1$ list.add(new GlusterVolumeOptionParameters(getOption(volumeId, "storage.owner-gid", optionOwnerGroupVirt)));//$NON-NLS-1$ list.add(new GlusterVolumeOptionParameters(getOption(volumeId, "server.allow-insecure", "on")));//$NON-NLS-1$ $NON-NLS-2$ final GlusterVolumeOptionEntity checkOption = getOption(volumeId, "network.ping-timeout", "10");//$NON-NLS-1$//$NON-NLS-2$ IPredicate<GlusterVolumeOptionEntity> predicaetFilter = new IPredicate<GlusterVolumeOptionEntity>() { @Override public boolean match(GlusterVolumeOptionEntity obj) { return obj.getKey().equalsIgnoreCase(checkOption.getKey()); } }; if(!isOptionEnabledOnVolume(volume, predicaetFilter)) { list.add(new GlusterVolumeOptionParameters(checkOption));//$NON-NLS-1$ } } Frontend.getInstance().runMultipleAction(VdcActionType.SetGlusterVolumeOption, list); } }; AsyncDataProvider.getInstance().getConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionOwnerGroupVirtValue, AsyncDataProvider.getInstance().getDefaultConfigurationVersion()), aQueryInner1); } }; AsyncDataProvider.getInstance().getConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionOwnerUserVirtValue, AsyncDataProvider.getInstance().getDefaultConfigurationVersion()), aQueryInner); } }; AsyncDataProvider.getInstance().getConfigFromCache(new GetConfigurationValueParameters(ConfigurationValues.GlusterVolumeOptionGroupVirtValue, AsyncDataProvider.getInstance().getDefaultConfigurationVersion()), aQuery); } private boolean isOptionEnabledOnVolume(GlusterVolumeEntity volume, IPredicate<GlusterVolumeOptionEntity> predicate) { return Linq.firstOrNull(volume.getOptions(), predicate) != null; } private GlusterVolumeOptionEntity getOption(Guid volumeId, String key, String value) { return new GlusterVolumeOptionEntity(volumeId, key, value); } private void stop() { if (getWindow() != null) { return; } ConfirmationModel model = new ConfirmationModel(); setWindow(model); model.setTitle(ConstantsManager.getInstance().getConstants().confirmStopVolume()); model.setHelpTag(HelpTag.volume_stop); model.setHashName("volume_stop"); //$NON-NLS-1$ model.setMessage(ConstantsManager.getInstance().getConstants().stopVolumeMessage()); if (isMetaVolumeInList(Linq.<GlusterVolumeEntity> cast(getSelectedItems()))) { model.setNote(ConstantsManager.getInstance().getConstants().stopMetaVolumeWarning()); } else { model.setNote(ConstantsManager.getInstance().getConstants().stopVolumeWarning()); } if (getSelectedItems() == null) { return; } ArrayList<String> list = new ArrayList<>(); for (GlusterVolumeEntity item : Linq.<GlusterVolumeEntity> cast(getSelectedItems())) { list.add(item.getName()); } model.setItems(list); UICommand tempVar = UICommand.createDefaultOkUiCommand("onStop", this); //$NON-NLS-1$ model.getCommands().add(tempVar); UICommand tempVar2 = UICommand.createCancelUiCommand("Cancel", this); //$NON-NLS-1$ model.getCommands().add(tempVar2); } public void onStop() { if (getWindow() == null) { return; } ConfirmationModel model = (ConfirmationModel) getWindow(); if (model.getProgress() != null) { return; } if (getSelectedItems() == null) { return; } ArrayList<VdcActionParametersBase> list = new ArrayList<>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } model.startProgress(); Frontend.getInstance().runMultipleAction(VdcActionType.StopGlusterVolume, list, new IFrontendMultipleActionAsyncCallback() { @Override public void executed(FrontendMultipleActionAsyncResult result) { ConfirmationModel localModel = (ConfirmationModel) result.getState(); localModel.stopProgress(); cancel(); } }, model); } private void start() { if (getSelectedItems() == null) { return; } ArrayList<String> volumesForForceStartWarning = new ArrayList<>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; VolumeStatus status = GlusterVolumeUtils.getVolumeStatus(volume); if (status == VolumeStatus.ALL_BRICKS_DOWN || status == VolumeStatus.SOME_BRICKS_DOWN) { volumesForForceStartWarning.add(volume.getName()); } } prepareForStartVolume(true); if (!volumesForForceStartWarning.isEmpty()) { final ConfirmationModel cModel = new ConfirmationModel(); cModel.setHelpTag(HelpTag.volume_start); cModel.setHashName("volume_start");//$NON-NLS-1$ setConfirmWindow(cModel); cModel.setMessage(ConstantsManager.getInstance().getConstants().startForceVolumeMessage()); cModel.setTitle(ConstantsManager.getInstance().getConstants().confirmStartVolume()); cModel.setForceLabel(ConstantsManager.getInstance().getConstants().startForceLabel()); cModel.setItems(volumesForForceStartWarning); cModel.getForce().setIsAvailable(true); cModel.getForce().setEntity(true); cModel.getForce().getEntityChangedEvent().addListener(new IEventListener<EventArgs>() { @Override public void eventRaised(Event<? extends EventArgs> ev, Object sender, EventArgs args) { if(cModel.getCommands() != null && cModel.getCommands().get(0) !=null) { cModel.getCommands().get(0).setIsExecutionAllowed(cModel.getForce().getEntity()); } } }); cModel.getCommands().add(UICommand.createDefaultOkUiCommand("startVolumeWithForceOption", this));//$NON-NLS-1$ cModel.getCommands().add(UICommand.createCancelUiCommand("closeConfirmationWindow", this));//$NON-NLS-1$ } } private void onStartVolume(ArrayList<VdcActionParametersBase> parameters) { Frontend.getInstance().runMultipleAction(VdcActionType.StartGlusterVolume, parameters, null, true, true); } private void prepareForStartVolume(boolean noForceStart) { boolean force = false; ConfirmationModel cModel; if (getConfirmWindow() != null) { cModel = (ConfirmationModel) getConfirmWindow(); closeConfirmationWindow(); force = cModel.getForce().getEntity(); } ArrayList<VdcActionParametersBase> list = new ArrayList<>(); for (Object item : getSelectedItems()) { GlusterVolumeEntity volume = (GlusterVolumeEntity) item; VolumeStatus status = GlusterVolumeUtils.getVolumeStatus(volume); if (!noForceStart && (status == VolumeStatus.ALL_BRICKS_DOWN || status == VolumeStatus.SOME_BRICKS_DOWN)) { list.add(new GlusterVolumeActionParameters(volume.getId(), force)); } else if (noForceStart && status == VolumeStatus.DOWN) { list.add(new GlusterVolumeActionParameters(volume.getId(), false)); } } onStartVolume(list); } private void onCreateVolume() { VolumeModel volumeModel = (VolumeModel) getWindow(); if (!volumeModel.validate()) { return; } Guid clusterId = volumeModel.getCluster().getSelectedItem().getId(); final GlusterVolumeEntity volume = new GlusterVolumeEntity(); volume.setClusterId(clusterId); volume.setName(volumeModel.getName().getEntity()); GlusterVolumeType type = volumeModel.getTypeList().getSelectedItem(); if (type.isStripedType()) { volume.setStripeCount(volumeModel.getStripeCountValue()); } if (type.isReplicatedType()) { volume.setReplicaCount(volumeModel.getReplicaCountValue()); } volume.setVolumeType(type); if (volumeModel.getTcpTransportType().getEntity()) { volume.getTransportTypes().add(TransportType.TCP); } if (volumeModel.getRdmaTransportType().getEntity()) { volume.getTransportTypes().add(TransportType.RDMA); } ArrayList<GlusterBrickEntity> brickList = new ArrayList<>(); for (Object model : volumeModel.getBricks().getItems()) { brickList.add((GlusterBrickEntity) ((EntityModel) model).getEntity()); } volume.setBricks(brickList); if (volumeModel.getNfs_accecssProtocol().getEntity()) { volume.enableNFS(); } else { volume.disableNFS(); } if (volumeModel.getCifs_accecssProtocol().getEntity()) { volume.enableCifs(); } else { volume.disableCifs(); } volume.setAccessControlList(volumeModel.getAllowAccess().getEntity()); volumeModel.startProgress(); CreateGlusterVolumeParameters parameter = new CreateGlusterVolumeParameters(volume, volumeModel.isForceAddBricks()); Frontend.getInstance().runAction(VdcActionType.CreateGlusterVolume, parameter, new IFrontendActionAsyncCallback() { @Override public void executed(FrontendActionAsyncResult result) { VolumeListModel localModel = (VolumeListModel) result.getState(); localModel.postOnCreateVolume(result.getReturnValue(), volume); } }, this); } public void postOnCreateVolume(VdcReturnValueBase returnValue, GlusterVolumeEntity volume) { VolumeModel model = (VolumeModel) getWindow(); model.stopProgress(); if (returnValue != null && returnValue.getSucceeded()) { cancel(); if (model.getOptimizeForVirtStore().getEntity()) { volume.setId((Guid) returnValue.getActionReturnValue()); optimizeVolumesForVirtStore(Arrays.asList(volume)); } } } /** * This action is handled here in VolumeLisModel only, because there is a use case where no volume would be selected * for setting the configuration. And in this scenario the GlusrerVolumeSnapshotListModel would not be initialized. */ public void configureClusterSnapshotOptions() { if (getWindow() != null) { return; } final UIConstants constants = ConstantsManager.getInstance().getConstants(); final GlusterClusterSnapshotConfigModel clusterSnapshotConfigModel = new GlusterClusterSnapshotConfigModel(); clusterSnapshotConfigModel.setHelpTag(HelpTag.configure_volume_snapshot); clusterSnapshotConfigModel.setHashName("configure_volume_snapshot"); //$NON-NLS-1$ clusterSnapshotConfigModel.setTitle(ConstantsManager.getInstance() .getConstants() .configureClusterSnapshotOptionsTitle()); setWindow(clusterSnapshotConfigModel); AsyncDataProvider.getInstance().getClustersHavingHosts(new AsyncQuery(this, new INewAsyncCallback() { @Override public void onSuccess(Object model, final Object returnValue) { if (getSystemTreeSelectedItem() != null) { Cluster selectedCluster = (Cluster) getSystemTreeSelectedItem().getEntity(); clusterSnapshotConfigModel.getClusters().setItems((List<Cluster>) returnValue, selectedCluster); } else { if (getSelectedItems() != null) { GlusterVolumeEntity volumeEntity = getSelectedItems().get(0); if (volumeEntity != null) { AsyncDataProvider.getInstance().getClusterById(new AsyncQuery(this, new INewAsyncCallback() { @Override public void onSuccess(Object model, Object returnValue1) { clusterSnapshotConfigModel.getClusters() .setItems((List<Cluster>) returnValue, (Cluster) returnValue1); } }), volumeEntity.getClusterId()); } } else { clusterSnapshotConfigModel.getClusters().setItems((List<Cluster>) returnValue); } } } })); clusterSnapshotConfigModel.getClusterConfigOptions().setTitle(ConstantsManager.getInstance() .getConstants() .configureClusterSnapshotOptionsTitle()); UICommand updateCommand = new UICommand("confirmConfigureClusterSnapshotOptions", this); //$NON-NLS-1$ updateCommand.setTitle(constants.snapshotConfigUpdateButtonLabel()); updateCommand.setIsDefault(true); clusterSnapshotConfigModel.getCommands().add(updateCommand); UICommand cancelCommand = new UICommand("Cancel", this); //$NON-NLS-1$ cancelCommand.setTitle(constants.cancel()); cancelCommand.setIsCancel(true); clusterSnapshotConfigModel.getCommands().add(cancelCommand); } public void confirmConfigureClusterSnapshotOptions() { boolean cfgChanged = false; GlusterClusterSnapshotConfigModel snapshotConfigModel = (GlusterClusterSnapshotConfigModel) getWindow(); if (!snapshotConfigModel.validate()) { return; } for (EntityModel<GlusterVolumeSnapshotConfig> clusterCfg : snapshotConfigModel.getClusterConfigOptions() .getItems()) { if (!clusterCfg.getEntity().getParamValue().equals(snapshotConfigModel.getExistingClusterConfigValue(clusterCfg.getEntity() .getParamName()))) { cfgChanged = true; break; } } if (cfgChanged) { ConfirmationModel confirmModel = new ConfirmationModel(); setConfirmWindow(confirmModel); confirmModel.setTitle(ConstantsManager.getInstance() .getConstants() .updateSnapshotConfigurationConfirmationTitle()); confirmModel.setHelpTag(HelpTag.configure_volume_snapshot_confirmation); confirmModel.setHashName("configure_volume_snapshot_confirmation"); //$NON-NLS-1$ confirmModel.setMessage(ConstantsManager.getInstance() .getConstants() .youAreAboutChangeSnapshotConfigurationMsg()); UICommand tempVar = new UICommand("onConfigureClusterSnapshotOptions", this); //$NON-NLS-1$ tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok()); tempVar.setIsDefault(true); getConfirmWindow().getCommands().add(tempVar); UICommand tempVar2 = new UICommand("CancelConfirmation", this); //$NON-NLS-1$ tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel()); tempVar2.setIsCancel(true); getConfirmWindow().getCommands().add(tempVar2); } else { onConfigureClusterSnapshotOptions(); } } public void onConfigureClusterSnapshotOptions() { GlusterClusterSnapshotConfigModel clusterSnapshotConfigModel = (GlusterClusterSnapshotConfigModel) getWindow(); Guid clusterId = clusterSnapshotConfigModel.getClusters().getSelectedItem().getId(); List<GlusterVolumeSnapshotConfig> vdsParams = new ArrayList<>(); for (EntityModel<GlusterVolumeSnapshotConfig> clusterCfg : clusterSnapshotConfigModel.getClusterConfigOptions() .getItems()) { vdsParams.add(new GlusterVolumeSnapshotConfig(clusterId, null, clusterCfg.getEntity().getParamName(), clusterCfg.getEntity().getParamValue())); } Frontend.getInstance().runAction(VdcActionType.UpdateGlusterVolumeSnapshotConfig, new UpdateGlusterVolumeSnapshotConfigParameters(clusterId, null, vdsParams), new IFrontendActionAsyncCallback() { @Override public void executed(FrontendActionAsyncResult result) { if (result.getReturnValue() != null && result.getReturnValue().getSucceeded()) { cancel(); } if (getConfirmWindow() != null) { setConfirmWindow(null); } } }, this); } public void configureVolumeSnapshotOptions() { if (getWindow() != null) { return; } final UIConstants constants = ConstantsManager.getInstance().getConstants(); GlusterVolumeEntity volumeEntity = getSelectedItems().get(0); final GlusterVolumeSnapshotConfigModel volumeSnapshotConfigModel = new GlusterVolumeSnapshotConfigModel(volumeEntity); volumeSnapshotConfigModel.setHelpTag(HelpTag.configure_volume_snapshot); volumeSnapshotConfigModel.setHashName("configure_volume_snapshot"); //$NON-NLS-1$ volumeSnapshotConfigModel.setTitle(ConstantsManager.getInstance() .getConstants() .configureVolumeSnapshotOptionsTitle()); setWindow(volumeSnapshotConfigModel); AsyncDataProvider.getInstance().getClusterById(new AsyncQuery(this, new INewAsyncCallback() { @Override public void onSuccess(Object model, Object returnValue) { volumeSnapshotConfigModel.getClusterName().setEntity(((Cluster) returnValue).getName()); } }), volumeEntity.getClusterId()); volumeSnapshotConfigModel.getVolumeName().setEntity(volumeEntity.getName()); UICommand updateCommand = new UICommand("confirmConfigureVolumeSnapshotOptions", this); //$NON-NLS-1$ updateCommand.setTitle(constants.snapshotConfigUpdateButtonLabel()); updateCommand.setIsDefault(true); volumeSnapshotConfigModel.getCommands().add(updateCommand); UICommand cancelCommand = new UICommand("Cancel", this); //$NON-NLS-1$ cancelCommand.setTitle(constants.cancel()); cancelCommand.setIsCancel(true); volumeSnapshotConfigModel.getCommands().add(cancelCommand); } public void confirmConfigureVolumeSnapshotOptions() { boolean cfgChanged = false; GlusterVolumeSnapshotConfigModel snapshotConfigModel = (GlusterVolumeSnapshotConfigModel) getWindow(); if (!snapshotConfigModel.validate()) { return; } for (EntityModel<VolumeSnapshotOptionModel> volumeCfg : snapshotConfigModel.getConfigOptions().getItems()) { if (!volumeCfg.getEntity().getOptionValue().equals(snapshotConfigModel.getExistingVolumeConfigValue(volumeCfg.getEntity() .getOptionName()))) { cfgChanged = true; break; } } if (cfgChanged) { ConfirmationModel confirmModel = new ConfirmationModel(); setConfirmWindow(confirmModel); confirmModel.setTitle(ConstantsManager.getInstance() .getConstants() .updateSnapshotConfigurationConfirmationTitle()); confirmModel.setHelpTag(HelpTag.configure_volume_snapshot_confirmation); confirmModel.setHashName("configure_volume_snapshot_confirmation"); //$NON-NLS-1$ confirmModel.setMessage(ConstantsManager.getInstance() .getConstants() .youAreAboutChangeSnapshotConfigurationMsg()); UICommand tempVar = new UICommand("onConfigureVolumeSnapshotOptions", this); //$NON-NLS-1$ tempVar.setTitle(ConstantsManager.getInstance().getConstants().ok()); tempVar.setIsDefault(true); getConfirmWindow().getCommands().add(tempVar); UICommand tempVar2 = new UICommand("CancelConfirmation", this); //$NON-NLS-1$ tempVar2.setTitle(ConstantsManager.getInstance().getConstants().cancel()); tempVar2.setIsCancel(true); getConfirmWindow().getCommands().add(tempVar2); } else { onConfigureVolumeSnapshotOptions(); } } public void onConfigureVolumeSnapshotOptions() { GlusterVolumeSnapshotConfigModel volumeSnapshotConfigModel = (GlusterVolumeSnapshotConfigModel) getWindow(); GlusterVolumeEntity volumeEntity = volumeSnapshotConfigModel.getSelectedVolumeEntity(); List<GlusterVolumeSnapshotConfig> vdsParams = new ArrayList<>(); for (EntityModel<VolumeSnapshotOptionModel> volumeCfg : volumeSnapshotConfigModel.getConfigOptions() .getItems()) { vdsParams.add(new GlusterVolumeSnapshotConfig(volumeEntity.getClusterId(), volumeEntity.getId(), volumeCfg.getEntity().getOptionName(), volumeCfg.getEntity().getOptionValue())); } Frontend.getInstance().runAction(VdcActionType.UpdateGlusterVolumeSnapshotConfig, new UpdateGlusterVolumeSnapshotConfigParameters(volumeEntity.getClusterId(), volumeEntity.getId(), vdsParams), new IFrontendActionAsyncCallback() { @Override public void executed(FrontendActionAsyncResult result) { if (result.getReturnValue() != null && result.getReturnValue().getSucceeded()) { cancel(); } if (getConfirmWindow() != null) { setConfirmWindow(null); } } }, this); } @Override protected String getListName() { return "VolumeListModel"; //$NON-NLS-1$ } private SystemTreeItemModel systemTreeSelectedItem; @Override public SystemTreeItemModel getSystemTreeSelectedItem() { return systemTreeSelectedItem; } @Override public void setSystemTreeSelectedItem(SystemTreeItemModel value) { if (systemTreeSelectedItem != value) { systemTreeSelectedItem = value; onSystemTreeSelectedItemChanged(); } } private void onSystemTreeSelectedItemChanged() { updateActionAvailability(); } @Override public boolean isSearchStringMatch(String searchString) { return searchString.trim().toLowerCase().startsWith("volume"); //$NON-NLS-1$ } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.pzexoplayer.util; import junit.framework.TestCase; import java.nio.ByteBuffer; import java.util.Arrays; /** * Tests for {@link ParsableByteArray}. */ public class ParsableByteArrayTest extends TestCase { private static final byte[] TEST_DATA = new byte[] {0x0F, (byte) 0xFF, (byte) 0x42, (byte) 0x0F, 0x00, 0x00, 0x00, 0x00}; private static ParsableByteArray getTestDataArray() { ParsableByteArray testArray = new ParsableByteArray(TEST_DATA.length); System.arraycopy(TEST_DATA, 0, testArray.data, 0, TEST_DATA.length); return testArray; } public void testReadInt() { testReadInt(0); testReadInt(1); testReadInt(-1); testReadInt(Integer.MIN_VALUE); testReadInt(Integer.MAX_VALUE); } private static void testReadInt(int testValue) { ParsableByteArray testArray = new ParsableByteArray( ByteBuffer.allocate(4).putInt(testValue).array()); int readValue = testArray.readInt(); // Assert that the value we read was the value we wrote. assertEquals(testValue, readValue); // And that the position advanced as expected. assertEquals(4, testArray.getPosition()); // And that skipping back and reading gives the same results. testArray.skipBytes(-4); readValue = testArray.readInt(); assertEquals(testValue, readValue); assertEquals(4, testArray.getPosition()); } public void testReadUnsignedInt() { testReadUnsignedInt(0); testReadUnsignedInt(1); testReadUnsignedInt(Integer.MAX_VALUE); testReadUnsignedInt(Integer.MAX_VALUE + 1L); testReadUnsignedInt(0xFFFFFFFFL); } private static void testReadUnsignedInt(long testValue) { ParsableByteArray testArray = new ParsableByteArray( Arrays.copyOfRange(ByteBuffer.allocate(8).putLong(testValue).array(), 4, 8)); long readValue = testArray.readUnsignedInt(); // Assert that the value we read was the value we wrote. assertEquals(testValue, readValue); // And that the position advanced as expected. assertEquals(4, testArray.getPosition()); // And that skipping back and reading gives the same results. testArray.skipBytes(-4); readValue = testArray.readUnsignedInt(); assertEquals(testValue, readValue); assertEquals(4, testArray.getPosition()); } public void testReadUnsignedIntToInt() { testReadUnsignedIntToInt(0); testReadUnsignedIntToInt(1); testReadUnsignedIntToInt(Integer.MAX_VALUE); try { testReadUnsignedIntToInt(-1); fail(); } catch (IllegalStateException e) { // Expected. } try { testReadUnsignedIntToInt(Integer.MIN_VALUE); fail(); } catch (IllegalStateException e) { // Expected. } } private static void testReadUnsignedIntToInt(int testValue) { ParsableByteArray testArray = new ParsableByteArray( ByteBuffer.allocate(4).putInt(testValue).array()); int readValue = testArray.readUnsignedIntToInt(); // Assert that the value we read was the value we wrote. assertEquals(testValue, readValue); // And that the position advanced as expected. assertEquals(4, testArray.getPosition()); // And that skipping back and reading gives the same results. testArray.skipBytes(-4); readValue = testArray.readUnsignedIntToInt(); assertEquals(testValue, readValue); assertEquals(4, testArray.getPosition()); } public void testReadUnsignedLongToLong() { testReadUnsignedLongToLong(0); testReadUnsignedLongToLong(1); testReadUnsignedLongToLong(Long.MAX_VALUE); try { testReadUnsignedLongToLong(-1); fail(); } catch (IllegalStateException e) { // Expected. } try { testReadUnsignedLongToLong(Long.MIN_VALUE); fail(); } catch (IllegalStateException e) { // Expected. } } private static void testReadUnsignedLongToLong(long testValue) { ParsableByteArray testArray = new ParsableByteArray( ByteBuffer.allocate(8).putLong(testValue).array()); long readValue = testArray.readUnsignedLongToLong(); // Assert that the value we read was the value we wrote. assertEquals(testValue, readValue); // And that the position advanced as expected. assertEquals(8, testArray.getPosition()); // And that skipping back and reading gives the same results. testArray.skipBytes(-8); readValue = testArray.readUnsignedLongToLong(); assertEquals(testValue, readValue); assertEquals(8, testArray.getPosition()); } public void testReadLong() { testReadLong(0); testReadLong(1); testReadLong(-1); testReadLong(Long.MIN_VALUE); testReadLong(Long.MAX_VALUE); } private static void testReadLong(long testValue) { ParsableByteArray testArray = new ParsableByteArray( ByteBuffer.allocate(8).putLong(testValue).array()); long readValue = testArray.readLong(); // Assert that the value we read was the value we wrote. assertEquals(testValue, readValue); // And that the position advanced as expected. assertEquals(8, testArray.getPosition()); // And that skipping back and reading gives the same results. testArray.skipBytes(-8); readValue = testArray.readLong(); assertEquals(testValue, readValue); assertEquals(8, testArray.getPosition()); } public void testReadingMovesPosition() { ParsableByteArray parsableByteArray = getTestDataArray(); // Given an array at the start assertEquals(0, parsableByteArray.getPosition()); // When reading an integer, the position advances parsableByteArray.readUnsignedInt(); assertEquals(4, parsableByteArray.getPosition()); } public void testOutOfBoundsThrows() { ParsableByteArray parsableByteArray = getTestDataArray(); // Given an array at the end parsableByteArray.readUnsignedLongToLong(); assertEquals(TEST_DATA.length, parsableByteArray.getPosition()); // Then reading more data throws. try { parsableByteArray.readUnsignedInt(); fail(); } catch (Exception e) { // Expected. } } public void testModificationsAffectParsableArray() { ParsableByteArray parsableByteArray = getTestDataArray(); // When modifying the wrapped byte array byte[] data = parsableByteArray.data; long readValue = parsableByteArray.readUnsignedInt(); data[0] = (byte) (TEST_DATA[0] + 1); parsableByteArray.setPosition(0); // Then the parsed value changes. assertFalse(parsableByteArray.readUnsignedInt() == readValue); } public void testReadingUnsignedLongWithMsbSetThrows() { ParsableByteArray parsableByteArray = getTestDataArray(); // Given an array with the most-significant bit set on the top byte byte[] data = parsableByteArray.data; data[0] = (byte) 0x80; // Then reading an unsigned long throws. try { parsableByteArray.readUnsignedLongToLong(); fail(); } catch (Exception e) { // Expected. } } public void testReadUnsignedFixedPoint1616() { ParsableByteArray parsableByteArray = getTestDataArray(); // When reading the integer part of a 16.16 fixed point value int value = parsableByteArray.readUnsignedFixedPoint1616(); // Then the read value is equal to the array elements interpreted as a short. assertEquals((0xFF & TEST_DATA[0]) << 8 | (TEST_DATA[1] & 0xFF), value); assertEquals(4, parsableByteArray.getPosition()); } public void testReadingBytesReturnsCopy() { ParsableByteArray parsableByteArray = getTestDataArray(); // When reading all the bytes back int length = parsableByteArray.limit(); assertEquals(TEST_DATA.length, length); byte[] copy = new byte[length]; parsableByteArray.readBytes(copy, 0, length); // Then the array elements are the same. assertTrue(Arrays.equals(parsableByteArray.data, copy)); } }
/* * Copyright (c) 1996, 2006, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang; /** * The {@code Short} class wraps a value of primitive type {@code * short} in an object. An object of type {@code Short} contains a * single field whose type is {@code short}. * * <p>In addition, this class provides several methods for converting * a {@code short} to a {@code String} and a {@code String} to a * {@code short}, as well as other constants and methods useful when * dealing with a {@code short}. * * @author Nakul Saraiya * @author Joseph D. Darcy * @see java.lang.Number * @since JDK1.1 */ public final class Short extends Number implements Comparable<Short> { /** * A constant holding the minimum value a {@code short} can * have, -2<sup>15</sup>. */ public static final short MIN_VALUE = -32768; /** * A constant holding the maximum value a {@code short} can * have, 2<sup>15</sup>-1. */ public static final short MAX_VALUE = 32767; /** * The {@code Class} instance representing the primitive type * {@code short}. */ public static final Class<Short> TYPE = (Class<Short>) Class.getPrimitiveClass("short"); /** * Returns a new {@code String} object representing the * specified {@code short}. The radix is assumed to be 10. * * @param s the {@code short} to be converted * @return the string representation of the specified {@code short} * @see java.lang.Integer#toString(int) */ public static String toString(short s) { return Integer.toString((int)s, 10); } /** * Parses the string argument as a signed {@code short} in the * radix specified by the second argument. The characters in the * string must all be digits, of the specified radix (as * determined by whether {@link java.lang.Character#digit(char, * int)} returns a nonnegative value) except that the first * character may be an ASCII minus sign {@code '-'} * (<code>'&#92;u002D'</code>) to indicate a negative value. The * resulting {@code short} value is returned. * * <p>An exception of type {@code NumberFormatException} is * thrown if any of the following situations occurs: * <ul> * <li> The first argument is {@code null} or is a string of * length zero. * * <li> The radix is either smaller than {@link * java.lang.Character#MIN_RADIX} or larger than {@link * java.lang.Character#MAX_RADIX}. * * <li> Any character of the string is not a digit of the * specified radix, except that the first character may be a minus * sign {@code '-'} (<code>'&#92;u002D'</code>) provided that the * string is longer than length 1. * * <li> The value represented by the string is not a value of type * {@code short}. * </ul> * * @param s the {@code String} containing the * {@code short} representation to be parsed * @param radix the radix to be used while parsing {@code s} * @return the {@code short} represented by the string * argument in the specified radix. * @throws NumberFormatException If the {@code String} * does not contain a parsable {@code short}. */ public static short parseShort(String s, int radix) throws NumberFormatException { int i = Integer.parseInt(s, radix); if (i < MIN_VALUE || i > MAX_VALUE) throw new NumberFormatException( "Value out of range. Value:\"" + s + "\" Radix:" + radix); return (short)i; } /** * Parses the string argument as a signed decimal {@code * short}. The characters in the string must all be decimal * digits, except that the first character may be an ASCII minus * sign {@code '-'} (<code>'&#92;u002D'</code>) to indicate a * negative value. The resulting {@code short} value is returned, * exactly as if the argument and the radix 10 were given as * arguments to the {@link #parseShort(java.lang.String, int)} * method. * * @param s a {@code String} containing the {@code short} * representation to be parsed * @return the {@code short} value represented by the * argument in decimal. * @throws NumberFormatException If the string does not * contain a parsable {@code short}. */ public static short parseShort(String s) throws NumberFormatException { return parseShort(s, 10); } /** * Returns a {@code Short} object holding the value * extracted from the specified {@code String} when parsed * with the radix given by the second argument. The first argument * is interpreted as representing a signed {@code short} in * the radix specified by the second argument, exactly as if the * argument were given to the {@link #parseShort(java.lang.String, * int)} method. The result is a {@code Short} object that * represents the {@code short} value specified by the string. * * <p>In other words, this method returns a {@code Short} object * equal to the value of: * * <blockquote> * {@code new Short(Short.parseShort(s, radix))} * </blockquote> * * @param s the string to be parsed * @param radix the radix to be used in interpreting {@code s} * @return a {@code Short} object holding the value * represented by the string argument in the * specified radix. * @throws NumberFormatException If the {@code String} does * not contain a parsable {@code short}. */ public static Short valueOf(String s, int radix) throws NumberFormatException { return new Short(parseShort(s, radix)); } /** * Returns a {@code Short} object holding the * value given by the specified {@code String}. The argument * is interpreted as representing a signed decimal * {@code short}, exactly as if the argument were given to * the {@link #parseShort(java.lang.String)} method. The result is * a {@code Short} object that represents the * {@code short} value specified by the string. * * <p>In other words, this method returns a {@code Short} object * equal to the value of: * * <blockquote> * {@code new Short(Short.parseShort(s))} * </blockquote> * * @param s the string to be parsed * @return a {@code Short} object holding the value * represented by the string argument * @throws NumberFormatException If the {@code String} does * not contain a parsable {@code short}. */ public static Short valueOf(String s) throws NumberFormatException { return valueOf(s, 10); } private static class ShortCache { private ShortCache(){} static final Short cache[] = new Short[-(-128) + 127 + 1]; static { for(int i = 0; i < cache.length; i++) cache[i] = new Short((short)(i - 128)); } } /** * Returns a {@code Short} instance representing the specified * {@code short} value. * If a new {@code Short} instance is not required, this method * should generally be used in preference to the constructor * {@link #Short(short)}, as this method is likely to yield * significantly better space and time performance by caching * frequently requested values. * * @param s a short value. * @return a {@code Short} instance representing {@code s}. * @since 1.5 */ public static Short valueOf(short s) { final int offset = 128; int sAsInt = s; if (sAsInt >= -128 && sAsInt <= 127) { // must cache return ShortCache.cache[sAsInt + offset]; } return new Short(s); } /** * Decodes a {@code String} into a {@code Short}. * Accepts decimal, hexadecimal, and octal numbers given by * the following grammar: * * <blockquote> * <dl> * <dt><i>DecodableString:</i> * <dd><i>Sign<sub>opt</sub> DecimalNumeral</i> * <dd><i>Sign<sub>opt</sub></i> {@code 0x} <i>HexDigits</i> * <dd><i>Sign<sub>opt</sub></i> {@code 0X} <i>HexDigits</i> * <dd><i>Sign<sub>opt</sub></i> {@code #} <i>HexDigits</i> * <dd><i>Sign<sub>opt</sub></i> {@code 0} <i>OctalDigits</i> * <p> * <dt><i>Sign:</i> * <dd>{@code -} * </dl> * </blockquote> * * <i>DecimalNumeral</i>, <i>HexDigits</i>, and <i>OctalDigits</i> * are defined in <a href="http://java.sun.com/docs/books/jls/second_edition/html/lexical.doc.html#48282">&sect;3.10.1</a> * of the <a href="http://java.sun.com/docs/books/jls/html/">Java * Language Specification</a>. * * <p>The sequence of characters following an (optional) negative * sign and/or radix specifier ("{@code 0x}", "{@code 0X}", * "{@code #}", or leading zero) is parsed as by the {@code * Short.parseShort} method with the indicated radix (10, 16, or * 8). This sequence of characters must represent a positive * value or a {@link NumberFormatException} will be thrown. The * result is negated if first character of the specified {@code * String} is the minus sign. No whitespace characters are * permitted in the {@code String}. * * @param nm the {@code String} to decode. * @return a {@code Short} object holding the {@code short} * value represented by {@code nm} * @throws NumberFormatException if the {@code String} does not * contain a parsable {@code short}. * @see java.lang.Short#parseShort(java.lang.String, int) */ public static Short decode(String nm) throws NumberFormatException { int i = Integer.decode(nm); if (i < MIN_VALUE || i > MAX_VALUE) throw new NumberFormatException( "Value " + i + " out of range from input " + nm); return (short)i; } /** * The value of the {@code Short}. * * @serial */ private final short value; /** * Constructs a newly allocated {@code Short} object that * represents the specified {@code short} value. * * @param value the value to be represented by the * {@code Short}. */ public Short(short value) { this.value = value; } /** * Constructs a newly allocated {@code Short} object that * represents the {@code short} value indicated by the * {@code String} parameter. The string is converted to a * {@code short} value in exactly the manner used by the * {@code parseShort} method for radix 10. * * @param s the {@code String} to be converted to a * {@code Short} * @throws NumberFormatException If the {@code String} * does not contain a parsable {@code short}. * @see java.lang.Short#parseShort(java.lang.String, int) */ public Short(String s) throws NumberFormatException { this.value = parseShort(s, 10); } /** * Returns the value of this {@code Short} as a * {@code byte}. */ public byte byteValue() { return (byte)value; } /** * Returns the value of this {@code Short} as a * {@code short}. */ public short shortValue() { return value; } /** * Returns the value of this {@code Short} as an * {@code int}. */ public int intValue() { return (int)value; } /** * Returns the value of this {@code Short} as a * {@code long}. */ public long longValue() { return (long)value; } /** * Returns the value of this {@code Short} as a * {@code float}. */ public float floatValue() { return (float)value; } /** * Returns the value of this {@code Short} as a * {@code double}. */ public double doubleValue() { return (double)value; } /** * Returns a {@code String} object representing this * {@code Short}'s value. The value is converted to signed * decimal representation and returned as a string, exactly as if * the {@code short} value were given as an argument to the * {@link java.lang.Short#toString(short)} method. * * @return a string representation of the value of this object in * base&nbsp;10. */ public String toString() { return String.valueOf((int)value); } /** * Returns a hash code for this {@code Short}. */ public int hashCode() { return (int)value; } /** * Compares this object to the specified object. The result is * {@code true} if and only if the argument is not * {@code null} and is a {@code Short} object that * contains the same {@code short} value as this object. * * @param obj the object to compare with * @return {@code true} if the objects are the same; * {@code false} otherwise. */ public boolean equals(Object obj) { if (obj instanceof Short) { return value == ((Short)obj).shortValue(); } return false; } /** * Compares two {@code Short} objects numerically. * * @param anotherShort the {@code Short} to be compared. * @return the value {@code 0} if this {@code Short} is * equal to the argument {@code Short}; a value less than * {@code 0} if this {@code Short} is numerically less * than the argument {@code Short}; and a value greater than * {@code 0} if this {@code Short} is numerically * greater than the argument {@code Short} (signed * comparison). * @since 1.2 */ public int compareTo(Short anotherShort) { return this.value - anotherShort.value; } /** * The number of bits used to represent a {@code short} value in two's * complement binary form. * @since 1.5 */ public static final int SIZE = 16; /** * Returns the value obtained by reversing the order of the bytes in the * two's complement representation of the specified {@code short} value. * * @return the value obtained by reversing (or, equivalently, swapping) * the bytes in the specified {@code short} value. * @since 1.5 */ public static short reverseBytes(short i) { return (short) (((i & 0xFF00) >> 8) | (i << 8)); } /** use serialVersionUID from JDK 1.1. for interoperability */ private static final long serialVersionUID = 7515723908773894738L; }
/* * Copyright (c) 2013 - present Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package codetoanalyze.java.infer; import android.content.ContentResolver; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.text.TextUtils; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import javax.annotation.Nullable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.HashMap; public class NullPointerExceptions { class A { int x; public void method() { } } // npe local with field public int nullPointerException() { A a = null; return a.x; } public A canReturnNullObject(boolean ok) { A a = new A(); if (ok) return a; else return null; } public static void expectNotNullObjectParameter(A a) { a.method(); } public static void expectNotNullArrayParameter(A[] array) { array.clone(); } // npe with branching, interprocedural public int nullPointerExceptionInterProc() { A a = canReturnNullObject(false); return a.x; } // npe with exception handling public int nullPointerExceptionWithExceptionHandling(boolean ok) { A a = null; try { throw new Exception(); } catch (Exception e) { return a.x; } } class B { A a; void test() { } } public static int nullPointerExceptionWithArray() { A[] array = new A[]{null}; A t = array[0]; return t.x; } // npe with a chain of fields class C { B b; } public int nullPointerExceptionWithAChainOfFields(C c) { c.b = new B(); return c.b.a.x; } // npe with a null object parameter public static void nullPointerExceptionWithNullObjectParameter() { expectNotNullObjectParameter(null); } // npe with a null array parameter public static void nullPointerExceptionWithNullArrayParameter() { expectNotNullArrayParameter(null); } public static void nullPointerExceptionFromFaillingResourceConstructor() throws IOException { FileInputStream fis = null; try { fis = new FileInputStream(new File("whatever.txt")); } catch (IOException e) { } finally { fis.close(); } } public static void nullPointerExceptionFromFailingFileOutputStreamConstructor() throws IOException { FileOutputStream fos = null; try { fos = new FileOutputStream(new File("whatever.txt")); } catch (IOException e) { } finally { fos.close(); } } int x; public void nullPointerExceptionFromNotKnowingThatThisIsNotNull() { if (this == null) { } this.x = 4; } public <T> T id_generics(T o) { o.toString(); return o; } public A frame(A x) { return id_generics(x); } public void nullPointerExceptionUnlessFrameFails() { String s = null; Object a = frame(new A()); if (a instanceof A) { s.length(); } } class D { int x; } public int preconditionCheckStateTest(D d) { Preconditions.checkState(d != null); return d.x; } public void genericMethodSomewhereCheckingForNull(String s) { if (s == null) { } } public void noNullPointerExceptionAfterSkipFunction() { String t = new String("Hello!"); String s = t.toString(); genericMethodSomewhereCheckingForNull(s); s.length(); } String hashmapNPE(HashMap h, Object o) { return (h.get(o).toString()); } String NPEhashmapProtectedByContainsKey(HashMap h, Object o) { if (h.containsKey(o)) { return (h.get(o).toString()); } return "aa"; } int NPEvalueOfFromHashmapBad(HashMap<Integer,Integer> h, int position) { return h.get(position); } Integer NPEvalueOfFromHashmapGood(HashMap<Integer,Integer> h, int position) { return h.get(position); } static void ReturnedValueOfImmutableListOf() { ImmutableList<Object> l = ImmutableList.of(); if (l == null) { l.toString(); } } void nullPointerExceptionInArrayLengthLoop(Object[] arr) { for (int i = 0; i < arr.length; i++) { Object x = null; x.toString(); } } Context mContext; ContentResolver mContentResolver; public void cursorFromContentResolverNPE(String customClause) { String[] projection = {"COUNT(*)"}; String selectionClause = selectionClause = customClause; Cursor cursor = mContext.getContentResolver().query( null, projection, selectionClause, null, null); cursor.close(); } public int cursorQueryShouldNotReturnNull(SQLiteDatabase sqLiteDatabase) { Cursor cursor = sqLiteDatabase.query( "events", null, null, null, null, null, null); try { return cursor.getCount(); } finally { cursor.close(); } } Object[] arr = new Object[1]; Object arrayReadShouldNotCauseSymexMemoryError(int i) { arr[i].toString(); return null; } void nullPointerExceptionCallArrayReadMethod() { arr[0] = new Object(); arrayReadShouldNotCauseSymexMemoryError(0).toString(); } public void sinkWithNeverNullSource() { NeverNullSource source = new NeverNullSource(); T t = source.get(); t.f(); } public void otherSinkWithNeverNullSource() { SomeLibrary source = new SomeLibrary(); T t = source.get(); t.f(); } private @Nullable Object mFld; void nullableFieldNPE() { mFld.toString(); } void guardedNullableFieldDeref() { if (mFld != null) mFld.toString(); } void allocNullableFieldDeref() { mFld = new Object(); mFld.toString(); } void nullableParamNPE(@Nullable Object param) { param.toString(); } void guardedNullableParamDeref(@Nullable Object param) { if (param != null) param.toString(); } void allocNullableParamDeref(@Nullable Object param) { param = new Object(); param.toString(); } native boolean test(); Object getObj() { if (test()) { return new Object(); } else { return null; } } Boolean getBool() { if (test()) { return new Boolean(true); } else { return null; } } void derefGetterAfterCheckShouldNotCauseNPE() { if (getObj() != null) { getObj().toString(); } } void derefBoxedGetterAfterCheckShouldNotCauseNPE() { boolean b = getBool() != null && getBool(); } static void derefNonThisGetterAfterCheckShouldNotCauseNPE() { NullPointerExceptions c = new NullPointerExceptions(); if (c.getObj() != null) { c.getObj().toString(); } } void badCheckShouldCauseNPE() { if (getBool() != null) getObj().toString(); } void nullPointerExceptionArrayLength() { Object[] arr = null; int i = arr.length; } class $$Class$Name$With$Dollars { void npeWithDollars() { String s = null; int n = s.length(); } } void nullableNonNullStringAfterTextUtilsIsEmptyCheckShouldNotCauseNPE(@Nullable String str) { if(!TextUtils.isEmpty(str)) { str.length(); } } }
package kosen.talha.viagogo.com.viagogo.pojos; import android.content.Context; import android.os.Parcel; import android.os.Parcelable; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; import kosen.talha.viagogo.com.viagogo.R; /** * Created by talhacevher on 06/03/2015. */ // why parcable ? http://prolificinteractive.com/blog/2014/07/18/why-we-love-parcelable public class CountryModal implements Parcelable { private String name; private String localizedName; private String capital; private String relevance; private String region; private String subregion; private String demonym; private String alpha2Code; private String alpha3Code; private String nativeName; private String altSpellings; private String latlng; private String timezones; private String translations; private String borders; private String callingCodes; private String topLevelDomain; private String currencies; private String languages; private Integer population; private Integer area; private Double gini; public CountryModal(Context context, JSONObject country) throws JSONException { this.name = getJsonString(country, "name"); this.capital = getJsonString(country, "capital"); this.relevance = getJsonString(country, "relevance"); this.region = getJsonString(country, "region"); this.subregion = getJsonString(country, "subregion"); this.demonym = getJsonString(country, "demonym"); this.alpha2Code = getJsonString(country, "alpha2Code"); this.alpha3Code = getJsonString(country, "alpha3Code"); this.nativeName = getJsonString(country, "nativeName"); this.altSpellings = getJsonString(country, "altSpellings"); this.latlng = getJsonString(country, "latlng"); this.timezones = getJsonString(country, "timezones"); this.translations = getJsonString(country, "translations"); this.borders = getJsonString(country, "borders"); this.callingCodes = getJsonString(country, "callingCodes"); this.topLevelDomain = getJsonString(country, "topLevelDomain"); this.currencies = getJsonString(country, "currencies"); this.languages = getJsonString(country, "languages"); this.population = getJsonInt(country, "population"); this.area = getJsonInt(country, "area"); this.gini = getJsonDouble(country, "gini"); if(!context.getResources().getString(R.string.lang).equals("en")){ JSONObject jsonTranslations = new JSONObject(translations); this.localizedName = jsonTranslations.getString(context.getResources().getString(R.string.lang)); } else this.localizedName= this.name; } public CountryModal(Parcel in) { this.name = in.readString(); this.capital = in.readString(); this.relevance = in.readString(); this.region = in.readString(); this.subregion = in.readString(); this.demonym = in.readString(); this.alpha2Code = in.readString(); this.alpha3Code = in.readString(); this.nativeName = in.readString(); this.altSpellings = in.readString(); this.latlng = in.readString(); this.timezones = in.readString(); this.translations = in.readString(); this.borders = in.readString(); this.callingCodes = in.readString(); this.topLevelDomain = in.readString(); this.currencies = in.readString(); this.languages = in.readString(); this.localizedName = in.readString(); this.population = in.readInt(); this.area = in.readInt(); this.gini = in.readDouble(); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(name); dest.writeString(capital); dest.writeString(relevance); dest.writeString(region); dest.writeString(subregion); dest.writeString(demonym); dest.writeString(alpha2Code); dest.writeString(alpha3Code); dest.writeString(nativeName); dest.writeString(altSpellings); dest.writeString(latlng); dest.writeString(timezones); dest.writeString(translations); dest.writeString(borders); dest.writeString(callingCodes); dest.writeString(topLevelDomain); dest.writeString(currencies); dest.writeString(languages); dest.writeString(localizedName); dest.writeInt(population); dest.writeInt(area); dest.writeDouble(gini); } public static final Parcelable.Creator<CountryModal> CREATOR = new Parcelable.Creator<CountryModal>() { public CountryModal createFromParcel(final Parcel in) { return new CountryModal(in); } public CountryModal[] newArray(int size) { return new CountryModal[size]; } }; public String getJsonString(JSONObject country, String key) { try { return country.getString(key); } catch (JSONException e) { return ""; } } public int getJsonInt(JSONObject country, String key) { try { return country.getInt(key); } catch (JSONException e) { return 0; } } public Double getJsonDouble(JSONObject country, String key) { try { return country.getDouble(key); } catch (JSONException e) { return 0.0; } } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCapital() { return capital; } public void setCapital(String capital) { this.capital = capital; } public String getRelevance() { return relevance; } public void setRelevance(String relevance) { this.relevance = relevance; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } public String getSubregion() { return subregion; } public void setSubregion(String subregion) { this.subregion = subregion; } public String getDemonym() { return demonym; } public void setDemonym(String demonym) { this.demonym = demonym; } public String getAlpha2Code() { return alpha2Code; } public void setAlpha2Code(String alpha2Code) { this.alpha2Code = alpha2Code; } public String getAlpha3Code() { return alpha3Code; } public void setAlpha3Code(String alpha3Code) { this.alpha3Code = alpha3Code; } public String getNativeName() { return nativeName; } public void setNativeName(String nativeName) { this.nativeName = nativeName; } public String getAltSpellings() { return altSpellings; } public void setAltSpellings(String altSpellings) { this.altSpellings = altSpellings; } public String getLatlng() { return latlng; } public void setLatlng(String latlng) { this.latlng = latlng; } public String getTimezones() { return timezones; } public void setTimezones(String timezones) { this.timezones = timezones; } public String getBorders() { return borders; } public void setBorders(String borders) { this.borders = borders; } public String getCallingCodes() { return callingCodes; } public void setCallingCodes(String callingCodes) { this.callingCodes = callingCodes; } public String getTopLevelDomain() { return topLevelDomain; } public void setTopLevelDomain(String topLevelDomain) { this.topLevelDomain = topLevelDomain; } public String getCurrencies() { return currencies; } public void setCurrencies(String currencies) { this.currencies = currencies; } public String getLanguages() { return languages; } public void setLanguages(String languages) { this.languages = languages; } public Integer getPopulation() { return population; } public void setPopulation(Integer population) { this.population = population; } public Integer getArea() { return area; } public void setArea(Integer area) { this.area = area; } public Double getGini() { return gini; } public void setGini(Double gini) { this.gini = gini; } public String getTranslations() { return translations; } public void setTranslations(String translations) { this.translations = translations; } public String getLocalizedName() { return localizedName; } public void setLocalizedName(String localizedName) { this.localizedName = localizedName; } }
/* * @(#)BezierAnim.java 1.6 98/12/03 * * Copyright 1998 by Sun Microsystems, Inc., * 901 San Antonio Road, Palo Alto, California, 94303, U.S.A. * All rights reserved. * * This software is the confidential and proprietary information * of Sun Microsystems, Inc. ("Confidential Information"). You * shall not disclose such Confidential Information and shall use * it only in accordance with the terms of the license agreement * you entered into with Sun. */ package bezier; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.awt.geom.GeneralPath; import java.awt.image.BufferedImage; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; /** * The BezierAnim class renders an animated Bezier Curve with the drawing * style and paint and the filling paint selected by the user. */ public class BezierAnim extends JApplet { //command line arguments static List<? extends String> arguments; Demo demo; // This map receives the "leaked" objects if the "Leak memory" check box is selected private Map<Long, GeneralPath> leakMap = new HashMap<Long, GeneralPath>(); @Override public void init() { getContentPane().add(demo = new Demo()); getContentPane().add("North", new DemoControls(demo)); } @Override public void start() { demo.start(); } @Override public void stop() { demo.stop(); } /** * The Demo class performs the animation and the painting. */ public class Demo extends JPanel implements Runnable { private Thread thread; private BufferedImage bimg; private static final int NUMPTS = 6; // solid line stoke protected BasicStroke solid = new BasicStroke(10.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND); // dashed line stroke protected BasicStroke dashed = new BasicStroke(10.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10, new float[]{5}, 0); private float animpts[] = new float[NUMPTS * 2]; private float deltas[] = new float[NUMPTS * 2]; protected Paint fillPaint, drawPaint; // indicates whether or not to fill shape protected boolean doFill = true; // indicates whether or not to intentionally leak memory protected boolean leakMemory = arguments.indexOf("leak") > -1; // indicates whether to simulate blocking on the EDT private boolean block = arguments.indexOf("block") > -1; private int blockCounter = 0; // indicates whether to simulate long-running tasks on the EDT private boolean longRunning = arguments.indexOf("longRunning") > -1; private int longRunningCounter = 0; // indicates whether or not to draw shape protected boolean doDraw = true; protected GradientPaint gradient; protected BasicStroke stroke; private final Object waitObject = new Object(); public Demo() { setBackground(Color.WHITE); gradient = new GradientPaint(0, 0, Color.RED, 200, 200, Color.YELLOW); fillPaint = gradient; drawPaint = Color.BLUE; stroke = solid; } // generates new points for the path public void animate(float[] pts, float[] deltas, int i, int limit) { float newpt = pts[i] + deltas[i]; if (newpt <= 0) { newpt = -newpt; deltas[i] = (float)(Math.random() * 4.0 + 2.0); } else if (newpt >= (float)limit) { newpt = 2.0f * limit - newpt; deltas[i] = -(float)(Math.random() * 4.0 + 2.0); } pts[i] = newpt; } /* * generates random points with the specified surface width * and height for the path */ public void reset(int w, int h) { for (int i = 0; i < animpts.length; i += 2) { animpts[i + 0] = (float)(Math.random() * w); animpts[i + 1] = (float)(Math.random() * h); deltas[i + 0] = (float)(Math.random() * 6.0 + 4.0); deltas[i + 1] = (float)(Math.random() * 6.0 + 4.0); if (animpts[i + 0] > w / 2.0f) { deltas[i + 0] = -deltas[i + 0]; } if (animpts[i + 1] > h / 2.0f) { deltas[i + 1] = -deltas[i + 1]; } } gradient = new GradientPaint(0, 0, Color.RED, w * .7f, h * .7f, Color.YELLOW); } // calls animate for every point in animpts public void step(int w, int h) { for (int i = 0; i < animpts.length; i += 2) { animate(animpts, deltas, i + 0, w); animate(animpts, deltas, i + 1, h); } if (block && (++blockCounter % 100 == 0)) { // Enter synchronized method from EDT every 100th time and sleep block(true); blockCounter = 0; } } // sets the points of the path and draws and fills the path public void drawDemo(Graphics2D g2) { float[] ctrlpts = animpts; int len = ctrlpts.length; float prevx = ctrlpts[len - 2]; float prevy = ctrlpts[len - 1]; float curx = ctrlpts[0]; float cury = ctrlpts[1]; float midx = (curx + prevx) / 2.0f; float midy = (cury + prevy) / 2.0f; GeneralPath gp = new GeneralPath(GeneralPath.WIND_NON_ZERO); gp.moveTo(midx, midy); if (leakMemory) { leakMap.put(System.currentTimeMillis(), gp); } for (int i = 2; i <= ctrlpts.length; i += 2) { float x1 = (midx + curx) / 2.0f; float y1 = (midy + cury) / 2.0f; prevx = curx; prevy = cury; if (i < ctrlpts.length) { curx = ctrlpts[i + 0]; cury = ctrlpts[i + 1]; } else { curx = ctrlpts[0]; cury = ctrlpts[1]; } midx = (curx + prevx) / 2.0f; midy = (cury + prevy) / 2.0f; float x2 = (prevx + midx) / 2.0f; float y2 = (prevy + midy) / 2.0f; gp.curveTo(x1, y1, x2, y2, midx, midy); } gp.closePath(); if (doDraw) { g2.setPaint(drawPaint); g2.setStroke(stroke); g2.draw(gp); } if (doFill) { if (fillPaint instanceof GradientPaint) { fillPaint = gradient; } g2.setPaint(fillPaint); g2.fill(gp); } } public Graphics2D createGraphics2D(int w, int h) { Graphics2D g2; if (bimg == null || bimg.getWidth() != w || bimg.getHeight() != h) { bimg = (BufferedImage)createImage(w, h); reset(w, h); } g2 = bimg.createGraphics(); g2.setBackground(getBackground()); g2.clearRect(0, 0, w, h); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); return g2; } @Override public void paint(Graphics g) { Dimension d = getSize(); step(d.width, d.height); Graphics2D g2 = createGraphics2D(d.width, d.height); drawDemo(g2); g2.dispose(); if (bimg != null) { g.drawImage(bimg, 0, 0, this); } } public void start() { thread = new Thread(this); thread.setPriority(Thread.MIN_PRIORITY); thread.start(); } public synchronized void stop() { thread = null; } @Override public void run() { Thread me = Thread.currentThread(); while (thread == me) { repaint(); if (block) { // Enter synchronized method from controlling thread but do not sleep block(false); } try { Thread.sleep(10); } catch (Exception e) { break; } EventQueue.invokeLater(new Runnable() { @Override public void run() { onEDTMethod(); } }); } thread = null; } public void onEDTMethod() { // do something on EDT thread int count = 10; if (longRunning && (++longRunningCounter % 250 == 0)) { longRunningCounter = 0; count = 1000000; } for (int i = 0; i < count ; i++) { Integer.parseInt("1234567"); } } private synchronized void block(boolean sleep) { if (!sleep) { return; } try { // does not give up lock on Demo that guards this method synchronized (waitObject) { // return after 200 ms waitObject.wait(200); } } catch (InterruptedException e) { e.printStackTrace(); } } } // End Demo class /** * The DemoControls class controls fills and strokes. */ static class DemoControls extends JPanel implements ActionListener { static TexturePaint tp1, tp2; static { BufferedImage bi = new BufferedImage(2, 1, BufferedImage.TYPE_INT_RGB); bi.setRGB(0, 0, 0xff00ff00); bi.setRGB(1, 0, 0xffff0000); tp1 = new TexturePaint(bi, new Rectangle(0, 0, 2, 1)); bi = new BufferedImage(2, 1, BufferedImage.TYPE_INT_RGB); bi.setRGB(0, 0, 0xff0000ff); bi.setRGB(1, 0, 0xffff0000); tp2 = new TexturePaint(bi, new Rectangle(0, 0, 2, 1)); } Demo demo; static Paint drawPaints[] = {new Color(0, 0, 0, 0), Color.BLUE, new Color(0, 0, 255, 126), Color.BLUE, tp2}; static String drawName[] = {"No Draw", "Blue", "Blue w/ Alpha", "Blue Dash", "Texture"}; static Paint fillPaints[] = {new Color(0, 0, 0, 0), Color.GREEN, new Color(0, 255, 0, 126), tp1, new GradientPaint(0, 0, Color.RED, 30, 30, Color.YELLOW)}; String fillName[] = {"No Fill", "Green", "Green w/ Alpha", "Texture", "Gradient"}; JMenu fillMenu, drawMenu; JMenuItem fillMI[] = new JMenuItem[fillPaints.length]; JMenuItem drawMI[] = new JMenuItem[drawPaints.length]; JCheckBox leakCheckbox; JCheckBox blockCheckbox; JCheckBox longRunningCheckbox; PaintedIcon fillIcons[] = new PaintedIcon[fillPaints.length]; PaintedIcon drawIcons[] = new PaintedIcon[drawPaints.length]; Thread thread; public DemoControls(Demo demo) { this.demo = demo; JMenuBar drawMenuBar = new JMenuBar(); drawMenuBar.setBorder(null); JMenuBar fillMenuBar = new JMenuBar(); fillMenuBar.setBorder(null); drawMenu = drawMenuBar.add(new JMenu("Draw Choice")); for (int i = 0; i < drawPaints.length; i++) { drawIcons[i] = new PaintedIcon(drawPaints[i]); drawMI[i] = drawMenu.add(new JMenuItem(drawName[i])); drawMI[i].setIcon(drawIcons[i]); drawMI[i].addActionListener(this); } drawMenu.setIcon(drawIcons[1]); fillMenu = fillMenuBar.add(new JMenu("Fill Choice")); for (int i = 0; i < fillPaints.length; i++) { fillIcons[i] = new PaintedIcon(fillPaints[i]); fillMI[i] = fillMenu.add(new JMenuItem(fillName[i])); fillMI[i].setIcon(fillIcons[i]); fillMI[i].addActionListener(this); } fillMenu.setIcon(fillIcons[fillPaints.length - 1]); leakCheckbox = new JCheckBox("Leak memory", demo.leakMemory); leakCheckbox.addActionListener(this); blockCheckbox = new JCheckBox("Simulate blocking", demo.block); blockCheckbox.addActionListener(this); longRunningCheckbox = new JCheckBox("Long-running tasks on EDT", demo.longRunning); longRunningCheckbox.addActionListener(this); Box optionsBox = Box.createVerticalBox(); optionsBox.add(leakCheckbox); optionsBox.add(blockCheckbox); optionsBox.add(longRunningCheckbox); add(drawMenuBar); add(fillMenuBar); add(optionsBox); } @Override public void actionPerformed(ActionEvent e) { Object obj = e.getSource(); for (int i = 0; i < fillPaints.length; i++) { if (obj.equals(fillMI[i])) { demo.doFill = true; demo.fillPaint = fillPaints[i]; fillMenu.setIcon(fillIcons[i]); break; } } for (int i = 0; i < drawPaints.length; i++) { if (obj.equals(drawMI[i])) { demo.doDraw = true; demo.drawPaint = drawPaints[i]; if (((JMenuItem)obj).getText().endsWith("Dash")) { demo.stroke = demo.dashed; } else { demo.stroke = demo.solid; } drawMenu.setIcon(drawIcons[i]); break; } } if (obj.equals(fillMI[0])) { demo.doFill = false; } else if (obj.equals(drawMI[0])) { demo.doDraw = false; } demo.leakMemory = leakCheckbox.isSelected(); demo.block = blockCheckbox.isSelected(); demo.longRunning = longRunningCheckbox.isSelected(); } /** * The PaintedIcon class provides little filled icons * for the fill and stroke menu choices. */ static class PaintedIcon implements Icon { Paint paint; public PaintedIcon(Paint p) { this.paint = p; } @Override public void paintIcon(Component c, Graphics g, int x, int y) { Graphics2D g2 = (Graphics2D)g; g2.setPaint(paint); g2.fillRect(x, y, getIconWidth(), getIconHeight()); g2.setColor(Color.GRAY); g2.draw3DRect(x, y, getIconWidth() - 1, getIconHeight() - 1, true); } @Override public int getIconWidth() { return 12; } @Override public int getIconHeight() { return 12; } } // End PaintedIcon class } // End DemoControls class public static void main(String args[]) throws Exception { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); arguments = Arrays.asList(args); final BezierAnim demo = new BezierAnim(); demo.init(); Frame f = new Frame("Java 2D(TM) Demo - BezierAnim"); f.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { System.exit(0); } @Override public void windowDeiconified(WindowEvent e) { demo.start(); } @Override public void windowIconified(WindowEvent e) { demo.stop(); } }); f.add(demo); f.pack(); f.setSize(new Dimension(400, 350)); f.setVisible(true); demo.start(); } } // End BezierAnim class
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.projectView; import com.intellij.JavaTestUtil; import com.intellij.ide.structureView.newStructureView.StructureViewComponent; import com.intellij.ide.util.InheritedMembersNodeProvider; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.JavaDirectoryService; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiField; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.TestSourceBasedTestCase; import com.intellij.util.IncorrectOperationException; import javax.swing.*; public class StructureViewUpdatingTest extends TestSourceBasedTestCase { @Override protected String getTestPath() { return "structureView"; } public void testJavaClassStructure() throws Exception { final PsiClass psiClass = JavaDirectoryService.getInstance().getClasses(getPackageDirectory("com/package1"))[0]; final VirtualFile virtualFile = psiClass.getContainingFile().getVirtualFile(); final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject); FileEditor[] fileEditors = fileEditorManager.openFile(virtualFile, false); final FileEditor fileEditor = fileEditors[0]; try { final StructureViewComponent structureViewComponent = (StructureViewComponent)fileEditor.getStructureViewBuilder().createStructureView(fileEditor, myProject); final Document document = PsiDocumentManager.getInstance(myProject).getDocument(psiClass.getContainingFile()); structureViewComponent.setActionActive(InheritedMembersNodeProvider.ID, true); PlatformTestUtil.assertTreeEqual(structureViewComponent.getTree(), "-Class1.java\n" + " -Class1\n" + " getValue(): int\n" + " getClass(): Class<?>\n" + " hashCode(): int\n" + " equals(Object): boolean\n" + " clone(): Object\n" + " toString(): String\n" + " notify(): void\n" + " notifyAll(): void\n" + " wait(long): void\n" + " wait(long, int): void\n" + " wait(): void\n" + " finalize(): void\n" + " myField1: boolean\n" + " myField2: boolean\n"); new WriteCommandAction.Simple(getProject()) { @Override protected void run() throws Throwable { final int offset = document.getLineStartOffset(5); document.insertString(offset, " boolean myNewField = false;\n"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitDocument(document); PlatformTestUtil.waitForAlarm(600); //TreeUtil.expand(structureViewComponent.getTree(), 3); PlatformTestUtil.assertTreeEqual(structureViewComponent.getTree(), "-Class1.java\n" + " -Class1\n" + " getValue(): int\n" + " getClass(): Class<?>\n" + " hashCode(): int\n" + " equals(Object): boolean\n" + " clone(): Object\n" + " toString(): String\n" + " notify(): void\n" + " notifyAll(): void\n" + " wait(long): void\n" + " wait(long, int): void\n" + " wait(): void\n" + " finalize(): void\n" + " myField1: boolean\n" + " myField2: boolean\n" + " myNewField: boolean = false\n"); Disposer.dispose(structureViewComponent); } finally { fileEditorManager.closeFile(virtualFile); } } public void testShowClassMembers() throws Exception { final PsiClass psiClass = JavaDirectoryService.getInstance().getClasses(getPackageDirectory("com/package1"))[0]; final VirtualFile virtualFile = psiClass.getContainingFile().getVirtualFile(); final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject); FileEditor[] fileEditors = fileEditorManager.openFile(virtualFile, false); final FileEditor fileEditor = fileEditors[0]; final StructureViewComponent structureViewComponent = (StructureViewComponent)fileEditor.getStructureViewBuilder().createStructureView(fileEditor, myProject); try { PlatformTestUtil.assertTreeEqual(structureViewComponent.getTree(), "-Class2.java\n" + " -Class2\n" + " +InnerClass1\n" + " +InnerClass2\n" + " getValue(): int\n" + " myField1: boolean\n" + " myField2: boolean\n" + " myField3: boolean\n" + " myField4: boolean\n"); final PsiField innerClassField = psiClass.getInnerClasses()[0].getFields()[0]; structureViewComponent.select(innerClassField, true); PlatformTestUtil.assertTreeEqual(structureViewComponent.getTree(), "-Class2.java\n" + " -Class2\n" + " -InnerClass1\n" + " +InnerClass12\n" + " myInnerClassField: int\n" + " +InnerClass2\n" + " getValue(): int\n" + " myField1: boolean\n" + " myField2: boolean\n" + " myField3: boolean\n" + " myField4: boolean\n"); CommandProcessor.getInstance().executeCommand(myProject, () -> WriteCommandAction.runWriteCommandAction(null, () -> { try { innerClassField.delete(); } catch (IncorrectOperationException e) { fail(e.getLocalizedMessage()); } }), null, null); PlatformTestUtil.waitForAlarm(600); PlatformTestUtil.assertTreeEqual(structureViewComponent.getTree(), "-Class2.java\n" + " -Class2\n" + " -InnerClass1\n" + " +InnerClass12\n" + " +InnerClass2\n" + " getValue(): int\n" + " myField1: boolean\n" + " myField2: boolean\n" + " myField3: boolean\n" + " myField4: boolean\n"); } finally { Disposer.dispose(structureViewComponent); fileEditorManager.closeFile(virtualFile); } } public void testExpandElementWithExitingName() throws InterruptedException { final VirtualFile xmlVirtualFile = getContentRoot().findFileByRelativePath("test.xml"); final FileEditorManager fileEditorManager = FileEditorManager.getInstance(myProject); FileEditor[] fileEditors = fileEditorManager.openFile(xmlVirtualFile, false); final FileEditor fileEditor = fileEditors[0]; try { final StructureViewComponent structureViewComponent = (StructureViewComponent)fileEditor.getStructureViewBuilder().createStructureView(fileEditor, myProject); final JTree tree = structureViewComponent.getTree(); PlatformTestUtil.assertTreeEqual(tree, "-test.xml\n" + " -test\n" + " +level1\n" + " +level1\n" + " +level1\n" + " +level1\n"); tree.expandPath(tree.getPathForRow(3)); PlatformTestUtil.waitForAlarm(600); PlatformTestUtil.assertTreeEqual(tree, "-test.xml\n" + " -test\n" + " +level1\n" + " -level1\n" + " +level2\n" + " +level1\n" + " +level1\n"); Disposer.dispose(structureViewComponent); } finally { fileEditorManager.closeFile(xmlVirtualFile); } } @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } }
package jenkins.python.descriptor; import hudson.model.*; import hudson.model.UserPropertyDescriptor.*; import hudson.DescriptorExtensionList; import hudson.PluginWrapper; import hudson.RelativePath; import hudson.XmlFile; import hudson.BulkChange; import hudson.Util; import hudson.model.listeners.SaveableListener; import hudson.util.FormApply; import hudson.util.ReflectionUtils; import hudson.util.ReflectionUtils.Parameter; import hudson.views.ListViewColumn; import jenkins.model.Jenkins; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.kohsuke.stapler.*; import org.kohsuke.stapler.jelly.JellyCompatibleFacet; import org.kohsuke.stapler.lang.Klass; import org.springframework.util.StringUtils; import org.jvnet.tiger_types.Types; import org.apache.commons.io.IOUtils; import hudson.Functions.*; import hudson.util.QuotedStringTokenizer.*; import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; import javax.servlet.ServletException; import javax.servlet.RequestDispatcher; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Locale; import java.util.Arrays; import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Type; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.beans.Introspector; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import hudson.model.Descriptor.*; import jenkins.python.DataConvertor; import jenkins.python.PythonExecutor; /** * This class was automatically generated by the PWM tool on 2014/03/21. * @see hudson.model.UserPropertyDescriptor */ public abstract class UserPropertyDescriptorPW extends UserPropertyDescriptor { private transient PythonExecutor pexec; private void initPython() { if (pexec == null) { pexec = new PythonExecutor(this); String[] jMethods = new String[2]; jMethods[0] = "newInstance"; jMethods[1] = "getDisplayName"; String[] pFuncs = new String[2]; pFuncs[0] = "new_instance"; pFuncs[1] = "get_display_name"; Class[][] argTypes = new Class[2][]; argTypes[0] = new Class[1]; argTypes[0][0] = User.class; argTypes[1] = new Class[0]; pexec.checkAbstrMethods(jMethods, pFuncs, argTypes); String[] functions = new String[28]; functions[0] = "is_enabled"; functions[1] = "get_id"; functions[2] = "get_t"; functions[3] = "get_descriptor_url"; functions[4] = "get_check_url"; functions[5] = "calc_fill_settings"; functions[6] = "calc_auto_complete_settings"; functions[7] = "get_property_type"; functions[8] = "get_property_type_or_die"; functions[9] = "get_property_type"; functions[10] = "get_global_property_type"; functions[11] = "new_instance"; functions[12] = "new_instance"; functions[13] = "get_klass"; functions[14] = "get_help_file"; functions[15] = "get_help_file"; functions[16] = "get_help_file"; functions[17] = "add_help_file_redirect"; functions[18] = "configure"; functions[19] = "configure"; functions[20] = "get_config_page"; functions[21] = "get_global_config_page"; functions[22] = "get_possible_view_names"; functions[23] = "save"; functions[24] = "load"; functions[25] = "get_config_file"; functions[26] = "get_plugin"; functions[27] = "do_help"; int[] argsCount = new int[28]; argsCount[0] = 0; argsCount[1] = 0; argsCount[2] = 0; argsCount[3] = 0; argsCount[4] = 1; argsCount[5] = 2; argsCount[6] = 2; argsCount[7] = 2; argsCount[8] = 2; argsCount[9] = 1; argsCount[10] = 1; argsCount[11] = 1; argsCount[12] = 2; argsCount[13] = 0; argsCount[14] = 0; argsCount[15] = 1; argsCount[16] = 2; argsCount[17] = 3; argsCount[18] = 1; argsCount[19] = 2; argsCount[20] = 0; argsCount[21] = 0; argsCount[22] = 1; argsCount[23] = 0; argsCount[24] = 0; argsCount[25] = 0; argsCount[26] = 0; argsCount[27] = 2; pexec.registerFunctions(functions, argsCount); } } public UserPropertyDescriptorPW(Class<? extends UserProperty> clazz) { super(clazz); } public UserPropertyDescriptorPW() { super(); } @Override public UserProperty newInstance(User user) { initPython(); return (UserProperty) pexec.execPython("new_instance", user); } @Override public String getDisplayName() { initPython(); return (String) pexec.execPython("get_display_name"); } @Override public boolean isEnabled() { initPython(); if (pexec.isImplemented(0)) { return pexec.execPythonBool("is_enabled"); } else { return super.isEnabled(); } } @Override public String getId() { initPython(); if (pexec.isImplemented(1)) { return (String) pexec.execPython("get_id"); } else { return super.getId(); } } @Override public Class<UserProperty> getT() { initPython(); if (pexec.isImplemented(2)) { return (Class) pexec.execPython("get_t"); } else { return super.getT(); } } @Override public String getDescriptorUrl() { initPython(); if (pexec.isImplemented(3)) { return (String) pexec.execPython("get_descriptor_url"); } else { return super.getDescriptorUrl(); } } @Override public String getCheckUrl(String fieldName) { initPython(); if (pexec.isImplemented(4)) { return (String) pexec.execPython("get_check_url", fieldName); } else { return super.getCheckUrl(fieldName); } } @Override public void calcFillSettings(String field, Map<String, Object> attributes) { initPython(); if (pexec.isImplemented(5)) { pexec.execPythonVoid("calc_fill_settings", field, attributes); } else { super.calcFillSettings(field, attributes); } } @Override public void calcAutoCompleteSettings(String field, Map<String, Object> attributes) { initPython(); if (pexec.isImplemented(6)) { pexec.execPythonVoid("calc_auto_complete_settings", field, attributes); } else { super.calcAutoCompleteSettings(field, attributes); } } @Override public @CheckForNull PropertyType getPropertyType(@Nonnull Object instance, @Nonnull String field) { initPython(); if (pexec.isImplemented(7)) { return (PropertyType) pexec.execPython("get_property_type", instance, field); } else { return super.getPropertyType(instance, field); } } @Override public @Nonnull PropertyType getPropertyTypeOrDie(@Nonnull Object instance, @Nonnull String field) { initPython(); if (pexec.isImplemented(8)) { return (PropertyType) pexec.execPython("get_property_type_or_die", instance, field); } else { return super.getPropertyTypeOrDie(instance, field); } } @Override public PropertyType getPropertyType(String field) { initPython(); if (pexec.isImplemented(9)) { return (PropertyType) pexec.execPython("get_property_type", field); } else { return super.getPropertyType(field); } } @Override public PropertyType getGlobalPropertyType(String field) { initPython(); if (pexec.isImplemented(10)) { return (PropertyType) pexec.execPython("get_global_property_type", field); } else { return super.getGlobalPropertyType(field); } } @Override public UserProperty newInstance(StaplerRequest req) throws FormException { initPython(); if (pexec.isImplemented(11)) { return (UserProperty) pexec.execPython("new_instance", req); } else { return super.newInstance(req); } } @Override public UserProperty newInstance(StaplerRequest req, JSONObject formData) throws FormException { initPython(); if (pexec.isImplemented(12)) { return (UserProperty) pexec.execPython("new_instance", req, formData); } else { return super.newInstance(req, formData); } } @Override public Klass<?> getKlass() { initPython(); if (pexec.isImplemented(13)) { return (Klass) pexec.execPython("get_klass"); } else { return super.getKlass(); } } @Override public String getHelpFile() { initPython(); if (pexec.isImplemented(14)) { return (String) pexec.execPython("get_help_file"); } else { return super.getHelpFile(); } } @Override public String getHelpFile(final String fieldName) { initPython(); if (pexec.isImplemented(15)) { return (String) pexec.execPython("get_help_file", fieldName); } else { return super.getHelpFile(fieldName); } } @Override public String getHelpFile(Klass<?> clazz, String fieldName) { initPython(); if (pexec.isImplemented(16)) { return (String) pexec.execPython("get_help_file", clazz, fieldName); } else { return super.getHelpFile(clazz, fieldName); } } @Override public void addHelpFileRedirect(String fieldName, Class<? extends Describable> owner, String fieldNameToRedirectTo) { initPython(); if (pexec.isImplemented(17)) { pexec.execPythonVoid("add_help_file_redirect", fieldName, owner, fieldNameToRedirectTo); } else { super.addHelpFileRedirect(fieldName, owner, fieldNameToRedirectTo); } } @Override public boolean configure(StaplerRequest req) throws FormException { initPython(); if (pexec.isImplemented(18)) { return pexec.execPythonBool("configure", req); } else { return super.configure(req); } } @Override public boolean configure(StaplerRequest req, JSONObject json) throws FormException { initPython(); if (pexec.isImplemented(19)) { return pexec.execPythonBool("configure", req, json); } else { return super.configure(req, json); } } @Override public String getConfigPage() { initPython(); if (pexec.isImplemented(20)) { return (String) pexec.execPython("get_config_page"); } else { return super.getConfigPage(); } } @Override public String getGlobalConfigPage() { initPython(); if (pexec.isImplemented(21)) { return (String) pexec.execPython("get_global_config_page"); } else { return super.getGlobalConfigPage(); } } @Override public List<String> getPossibleViewNames(String baseName) { initPython(); if (pexec.isImplemented(22)) { return (List) pexec.execPython("get_possible_view_names", baseName); } else { return super.getPossibleViewNames(baseName); } } @Override public synchronized void save() { initPython(); if (pexec.isImplemented(23)) { pexec.execPythonVoid("save"); } else { super.save(); } } @Override public synchronized void load() { initPython(); if (pexec.isImplemented(24)) { pexec.execPythonVoid("load"); } else { super.load(); } } @Override public XmlFile getConfigFile() { initPython(); if (pexec.isImplemented(25)) { return (XmlFile) pexec.execPython("get_config_file"); } else { return super.getConfigFile(); } } @Override public PluginWrapper getPlugin() { initPython(); if (pexec.isImplemented(26)) { return (PluginWrapper) pexec.execPython("get_plugin"); } else { return super.getPlugin(); } } @Override public void doHelp(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { initPython(); if (pexec.isImplemented(27)) { pexec.execPythonVoid("do_help", req, rsp); } else { super.doHelp(req, rsp); } } public boolean superIsEnabled() { return super.isEnabled(); } public String superGetId() { return super.getId(); } public Class<UserProperty> superGetT() { return super.getT(); } public String superGetDescriptorUrl() { return super.getDescriptorUrl(); } public String superGetCheckUrl(String fieldName) { return super.getCheckUrl(fieldName); } public void superCalcFillSettings(String field, Map<String, Object> attributes) { super.calcFillSettings(field, attributes); } public void superCalcAutoCompleteSettings(String field, Map<String, Object> attributes) { super.calcAutoCompleteSettings(field, attributes); } public PropertyType superGetPropertyType(@Nonnull Object instance, @Nonnull String field) { return super.getPropertyType(instance, field); } public PropertyType superGetPropertyTypeOrDie(@Nonnull Object instance, @Nonnull String field) { return super.getPropertyTypeOrDie(instance, field); } public PropertyType superGetPropertyType(String field) { return super.getPropertyType(field); } public PropertyType superGetGlobalPropertyType(String field) { return super.getGlobalPropertyType(field); } public UserProperty superNewInstance(StaplerRequest req) throws FormException { return super.newInstance(req); } public UserProperty superNewInstance(StaplerRequest req, JSONObject formData) throws FormException { return super.newInstance(req, formData); } public Klass<?> superGetKlass() { return super.getKlass(); } public String superGetHelpFile() { return super.getHelpFile(); } public String superGetHelpFile(final String fieldName) { return super.getHelpFile(fieldName); } public String superGetHelpFile(Klass<?> clazz, String fieldName) { return super.getHelpFile(clazz, fieldName); } public void superAddHelpFileRedirect(String fieldName, Class<? extends Describable> owner, String fieldNameToRedirectTo) { super.addHelpFileRedirect(fieldName, owner, fieldNameToRedirectTo); } public boolean superConfigure(StaplerRequest req) throws FormException { return super.configure(req); } public boolean superConfigure(StaplerRequest req, JSONObject json) throws FormException { return super.configure(req, json); } public String superGetConfigPage() { return super.getConfigPage(); } public String superGetGlobalConfigPage() { return super.getGlobalConfigPage(); } public List<String> superGetPossibleViewNames(String baseName) { return super.getPossibleViewNames(baseName); } public synchronized void superSave() { super.save(); } public synchronized void superLoad() { super.load(); } public XmlFile superGetConfigFile() { return super.getConfigFile(); } public PluginWrapper superGetPlugin() { return super.getPlugin(); } public void superDoHelp(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { super.doHelp(req, rsp); } public Object execPython(String function, Object... params) { initPython(); return pexec.execPython(function, params); } public byte execPythonByte(String function, Object... params) { initPython(); return pexec.execPythonByte(function, params); } public short execPythonShort(String function, Object... params) { initPython(); return pexec.execPythonShort(function, params); } public char execPythonChar(String function, Object... params) { initPython(); return pexec.execPythonChar(function, params); } public int execPythonInt(String function, Object... params) { initPython(); return pexec.execPythonInt(function, params); } public long execPythonLong(String function, Object... params) { initPython(); return pexec.execPythonLong(function, params); } public float execPythonFloat(String function, Object... params) { initPython(); return pexec.execPythonFloat(function, params); } public double execPythonDouble(String function, Object... params) { initPython(); return pexec.execPythonDouble(function, params); } public boolean execPythonBool(String function, Object... params) { initPython(); return pexec.execPythonBool(function, params); } public void execPythonVoid(String function, Object... params) { initPython(); pexec.execPythonVoid(function, params); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package android.location; import junit.framework.TestCase; import android.test.suitebuilder.annotation.SmallTest; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.HashSet; import java.util.Iterator; import java.util.Random; import java.util.Set; /** * Unit tests for {@link GpsStatus}. */ @SmallTest public class GpsStatusTest extends TestCase { private static final int MAX_VALUE = 250; private final Random mRandom = new Random(); private GpsStatus mStatus; private int mCount; private int[] mPrns; private float[] mSnrs; private float[] mElevations; private float[] mAzimuth; private int mEphemerisMask; private int mAlmanacMask; private int mUsedInFixMask; public void setUp() throws Exception { super.setUp(); mStatus = createGpsStatus(); generateSatellitesData(generateInt()); } public void testEmptyGpsStatus() throws Exception { verifyIsEmpty(mStatus); } public void testGpsStatusIterator() throws Exception { generateSatellitesData(2); setSatellites(mStatus); Iterator<GpsSatellite> iterator = mStatus.getSatellites().iterator(); assertTrue("hasNext(1)", iterator.hasNext()); assertTrue("hasNext(1) does not overflow", iterator.hasNext()); GpsSatellite satellite1 = iterator.next(); assertNotNull("satellite", satellite1); assertTrue("hasNext(2)", iterator.hasNext()); assertTrue("hasNext(2) does not overflow", iterator.hasNext()); GpsSatellite satellite2 = iterator.next(); assertNotNull("satellite", satellite2); assertFalse("hasNext() no elements", iterator.hasNext()); } public void testTtff() throws Exception { int testTtff = generateInt(); set(mStatus, testTtff); verifyTtff(mStatus, testTtff); } public void testCopyTtff() throws Exception { int testTtff = generateInt(); verifyTtff(mStatus, 0); GpsStatus otherStatus = createGpsStatus(); set(otherStatus, testTtff); verifyTtff(otherStatus, testTtff); set(mStatus, otherStatus); verifyTtff(mStatus, testTtff); } public void testSetSatellites() throws Exception { setSatellites(mStatus); verifySatellites(mStatus); } public void testCopySatellites() throws Exception { verifyIsEmpty(mStatus); GpsStatus otherStatus = createGpsStatus(); setSatellites(otherStatus); verifySatellites(otherStatus); set(mStatus, otherStatus); verifySatellites(mStatus); } public void testOverrideSatellites() throws Exception { setSatellites(mStatus); verifySatellites(mStatus); GpsStatus otherStatus = createGpsStatus(); generateSatellitesData(mCount, true /* reusePrns */); setSatellites(otherStatus); verifySatellites(otherStatus); set(mStatus, otherStatus); verifySatellites(mStatus); } public void testAddSatellites() throws Exception { int count = 10; generateSatellitesData(count); setSatellites(mStatus); verifySatellites(mStatus); GpsStatus otherStatus = createGpsStatus(); generateSatellitesData(count); setSatellites(otherStatus); verifySatellites(otherStatus); set(mStatus, otherStatus); verifySatellites(mStatus); } public void testAddMoreSatellites() throws Exception { int count = 25; generateSatellitesData(count); setSatellites(mStatus); verifySatellites(mStatus); GpsStatus otherStatus = createGpsStatus(); generateSatellitesData(count * 2); setSatellites(otherStatus); verifySatellites(otherStatus); set(mStatus, otherStatus); verifySatellites(mStatus); } public void testAddLessSatellites() throws Exception { int count = 25; generateSatellitesData(count * 2); setSatellites(mStatus); verifySatellites(mStatus); GpsStatus otherStatus = createGpsStatus(); generateSatellitesData(count); setSatellites(otherStatus); verifySatellites(otherStatus); set(mStatus, otherStatus); verifySatellites(mStatus); } private static void verifyIsEmpty(GpsStatus status) { verifySatelliteCount(status, 0); verifyTtff(status, 0); } private static void verifySatelliteCount(GpsStatus status, int expectedCount) { int satellites = 0; for (GpsSatellite s : status.getSatellites()) { ++satellites; } assertEquals("GpsStatus::SatelliteCount", expectedCount, satellites); } private void verifySatellites(GpsStatus status) { verifySatelliteCount(status, mCount); verifySatellites(status, mCount, mPrns, mSnrs, mElevations, mAzimuth, mEphemerisMask, mAlmanacMask, mUsedInFixMask); } private static void verifySatellites( GpsStatus status, int count, int[] prns, float[] snrs, float[] elevations, float[] azimuth, int ephemerisMask, int almanacMask, int usedInFixMask) { for (int i = 0; i < count; ++i) { int prn = prns[i]; GpsSatellite satellite = getSatellite(status, prn); assertNotNull(getSatelliteAssertInfo(i, prn, "non-null"), satellite); assertEquals(getSatelliteAssertInfo(i, prn, "Snr"), snrs[i], satellite.getSnr()); assertEquals( getSatelliteAssertInfo(i, prn, "Elevation"), elevations[i], satellite.getElevation()); assertEquals( getSatelliteAssertInfo(i, prn, "Azimuth"), azimuth[i], satellite.getAzimuth()); int prnShift = 1 << (prn - 1); assertEquals( getSatelliteAssertInfo(i, prn, "ephemeris"), (ephemerisMask & prnShift) != 0, satellite.hasEphemeris()); assertEquals( getSatelliteAssertInfo(i, prn, "almanac"), (almanacMask & prnShift) != 0, satellite.hasAlmanac()); assertEquals( getSatelliteAssertInfo(i, prn, "usedInFix"), (usedInFixMask & prnShift) != 0, satellite.usedInFix()); } } private static void verifyTtff(GpsStatus status, int expectedTtff) { assertEquals("GpsStatus::TTFF", expectedTtff, status.getTimeToFirstFix()); } private static GpsStatus createGpsStatus() throws Exception { Constructor<GpsStatus> ctor = GpsStatus.class.getDeclaredConstructor(); ctor.setAccessible(true); return ctor.newInstance(); } private static void set(GpsStatus status, int ttff) throws Exception { Class<?> statusClass = status.getClass(); Method setTtff = statusClass.getDeclaredMethod("setTimeToFirstFix", Integer.TYPE); setTtff.setAccessible(true); setTtff.invoke(status, ttff); } private static void set(GpsStatus status, GpsStatus statusToSet) throws Exception { Class<?> statusClass = status.getClass(); Method setStatus = statusClass.getDeclaredMethod("setStatus", statusClass); setStatus.setAccessible(true); setStatus.invoke(status, statusToSet); } private void setSatellites(GpsStatus status) throws Exception { set(status, mCount, mPrns, mSnrs, mElevations, mAzimuth, mEphemerisMask, mAlmanacMask, mUsedInFixMask); } private static void set( GpsStatus status, int count, int[] prns, float[] snrs, float[] elevations, float[] azimuth, int ephemerisMask, int almanacMask, int usedInFixMask) throws Exception { Class<?> statusClass = status.getClass(); Class<?> intClass = Integer.TYPE; Class<?> floatArrayClass = Class.forName("[F"); Method setStatus = statusClass.getDeclaredMethod( "setStatus", intClass, Class.forName("[I"), floatArrayClass, floatArrayClass, floatArrayClass, intClass, intClass, intClass); setStatus.setAccessible(true); setStatus.invoke( status, count, prns, snrs, elevations, azimuth, ephemerisMask, almanacMask, usedInFixMask); } private int generateInt() { return mRandom.nextInt(MAX_VALUE) + 1; } private int[] generateIntArray(int count) { Set<Integer> generatedPrns = new HashSet<>(); int[] array = new int[count]; for(int i = 0; i < count; ++i) { int generated; do { generated = generateInt(); } while (generatedPrns.contains(generated)); array[i] = generated; generatedPrns.add(generated); } return array; } private float[] generateFloatArray(int count) { float[] array = new float[count]; for(int i = 0; i < count; ++i) { array[i] = generateInt(); } return array; } private int generateMask(int[] prns) { int mask = 0; int prnsLength = prns.length; for (int i = 0; i < prnsLength; ++i) { if (mRandom.nextBoolean()) { mask |= 1 << (prns[i] - 1); } } return mask; } private void generateSatellitesData(int count) { generateSatellitesData(count, false /* reusePrns */); } private void generateSatellitesData(int count, boolean reusePrns) { mCount = count; if (!reusePrns) { mPrns = generateIntArray(count); } mSnrs = generateFloatArray(count); mElevations = generateFloatArray(count); mAzimuth = generateFloatArray(count); mEphemerisMask = generateMask(mPrns); mAlmanacMask = generateMask(mPrns); mUsedInFixMask = generateMask(mPrns); } private static GpsSatellite getSatellite(GpsStatus status, int prn) { for (GpsSatellite satellite : status.getSatellites()) { if (satellite.getPrn() == prn) { return satellite; } } return null; } private static String getSatelliteAssertInfo(int index, int prn, String param) { return String.format("Satellite::%s [i=%d, prn=%d]", param, index, prn); } }
/** * Copyright 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.waveprotocol.box.server.waveserver; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFutureTask; import com.google.gxp.compiler.io.RuntimeIOException; import org.waveprotocol.box.common.DeltaSequence; import org.waveprotocol.box.server.persistence.PersistenceException; import org.waveprotocol.box.server.util.WaveletDataUtil; import org.waveprotocol.wave.federation.Proto.ProtocolAppliedWaveletDelta; import org.waveprotocol.wave.model.id.IdURIEncoderDecoder; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.model.operation.OperationException; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.version.HashedVersionFactory; import org.waveprotocol.wave.model.version.HashedVersionFactoryImpl; import org.waveprotocol.wave.model.wave.data.ReadableWaveletData; import org.waveprotocol.wave.model.wave.data.WaveletData; import org.waveprotocol.wave.util.escapers.jvm.JavaUrlCodec; import org.waveprotocol.wave.util.logging.Log; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableMap; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; /** * Simplistic {@link DeltaStore}-backed wavelet state implementation * which goes to persistent storage for every history request. * * TODO(soren): rewire this class to be backed by {@link WaveletStore} and * read the snapshot from there instead of computing it in the * DeltaStoreBasedWaveletState constructor. * * @author soren@google.com (Soren Lassen) */ class DeltaStoreBasedWaveletState implements WaveletState { private static final Log LOG = Log.get(DeltaStoreBasedWaveletState.class); private static final IdURIEncoderDecoder URI_CODEC = new IdURIEncoderDecoder(new JavaUrlCodec()); private static final HashedVersionFactory HASH_FACTORY = new HashedVersionFactoryImpl(URI_CODEC); private static final Function<WaveletDeltaRecord, TransformedWaveletDelta> TRANSFORMED = new Function<WaveletDeltaRecord, TransformedWaveletDelta>() { @Override public TransformedWaveletDelta apply(WaveletDeltaRecord record) { return record.getTransformedDelta(); } }; /** * @return An entry keyed by a hashed version with the given version number, * if any, otherwise null. */ private static <T> Map.Entry<HashedVersion, T> lookupCached(NavigableMap<HashedVersion, T> map, long version) { // Smallest key with version number >= version. HashedVersion key = HashedVersion.unsigned(version); Map.Entry<HashedVersion, T> entry = map.ceilingEntry(key); return (entry != null && entry.getKey().getVersion() == version) ? entry : null; } /** * Creates a new delta store based state. * * The executor must ensure that only one thread executes at any time for each * state instance. * * @param deltasAccess delta store accessor * @param persistExecutor executor for making persistence calls * @return a state initialized from the deltas * @throws PersistenceException if a failure occurs while reading or * processing stored deltas */ public static DeltaStoreBasedWaveletState create(DeltaStore.DeltasAccess deltasAccess, Executor persistExecutor) throws PersistenceException { if (deltasAccess.isEmpty()) { return new DeltaStoreBasedWaveletState(deltasAccess, ImmutableList.<WaveletDeltaRecord>of(), null, persistExecutor); } else { try { ImmutableList<WaveletDeltaRecord> deltas = readAll(deltasAccess); WaveletData snapshot = WaveletDataUtil.buildWaveletFromDeltas(deltasAccess.getWaveletName(), Iterators.transform(deltas.iterator(), TRANSFORMED)); return new DeltaStoreBasedWaveletState(deltasAccess, deltas, snapshot, persistExecutor); } catch (IOException e) { throw new PersistenceException("Failed to read stored deltas", e); } catch (OperationException e) { throw new PersistenceException("Failed to compose stored deltas", e); } } } /** * Reads all deltas from persistent storage. */ private static ImmutableList<WaveletDeltaRecord> readAll(WaveletDeltaRecordReader reader) throws IOException { long startVersion = 0; long endVersion = reader.getEndVersion().getVersion(); return readDeltasInRange(reader, startVersion, endVersion); } private static ImmutableList<WaveletDeltaRecord> readDeltasInRange( final WaveletDeltaRecordReader reader, final long startVersion, final long endVersion) throws IOException { Preconditions.checkArgument(!reader.isEmpty()); ImmutableList.Builder<WaveletDeltaRecord> result = ImmutableList.builder(); long i = startVersion; while (i < endVersion) { WaveletDeltaRecord delta; delta = reader.getDelta(i); result.add(delta); i = delta.getResultingVersion().getVersion(); } return result.build(); } private final Executor persistExecutor; private final HashedVersion versionZero; private final DeltaStore.DeltasAccess deltasAccess; /** The lock that guards access to persistence related state. */ private final Object persistLock = new Object(); /** * Indicates the version of the latest appended delta that was already requested to be * persisted. */ private HashedVersion latestVersionToPersist = null; /** The persist task that will be executed next. */ private ListenableFutureTask<Void> nextPersistTask = null; /** * Processes the persist task and checks if there is another task to do when * one task is done. In such a case, it writes all waiting to be persisted * deltas to persistent storage in one operation. */ private final Callable<Void> persisterTask = new Callable<Void>() { @Override public Void call() throws PersistenceException { HashedVersion last; HashedVersion version; synchronized (persistLock) { last = lastPersistedVersion.get(); version = latestVersionToPersist; } if (last != null && version.getVersion() <= last.getVersion()) { LOG.info("Attempt to persist version " + version + " smaller than last persisted version " + last); // Done, version is already persisted. version = last; } else { ImmutableList.Builder<WaveletDeltaRecord> deltas = ImmutableList.builder(); HashedVersion v = (last == null) ? versionZero : last; do { WaveletDeltaRecord d = new WaveletDeltaRecord(v, appliedDeltas.get(v), transformedDeltas.get(v)); deltas.add(d); v = d.getResultingVersion(); } while (v.getVersion() < version.getVersion()); Preconditions.checkState(v.equals(version)); deltasAccess.append(deltas.build()); } synchronized (persistLock) { Preconditions.checkState(last == lastPersistedVersion.get(), "lastPersistedVersion changed while we were writing to storage"); lastPersistedVersion.set(version); if (nextPersistTask != null) { persistExecutor.execute(nextPersistTask); nextPersistTask = null; } else { latestVersionToPersist = null; } } return null; } }; /** Keyed by appliedAtVersion. */ private final ConcurrentNavigableMap<HashedVersion, ByteStringMessage<ProtocolAppliedWaveletDelta>> appliedDeltas = new ConcurrentSkipListMap<HashedVersion, ByteStringMessage<ProtocolAppliedWaveletDelta>>(); /** Keyed by appliedAtVersion. */ private final ConcurrentNavigableMap<HashedVersion, TransformedWaveletDelta> transformedDeltas = new ConcurrentSkipListMap<HashedVersion, TransformedWaveletDelta>(); /** Is null if the wavelet state is empty. */ private WaveletData snapshot; /** * Last version persisted with a call to persist(), or null if never called. * It's an atomic reference so we can set in one thread (which * asynchronously writes deltas to storage) and read it in another, * simultaneously. */ private final AtomicReference<HashedVersion> lastPersistedVersion; /** * Constructs a wavelet state with the given deltas and snapshot. * The deltas must be the contents of deltasAccess, and they * must be contiguous from version zero. * The snapshot must be the composition of the deltas, or null if there * are no deltas. The constructed object takes ownership of the * snapshot and will mutate it if appendDelta() is called. */ @VisibleForTesting DeltaStoreBasedWaveletState(DeltaStore.DeltasAccess deltasAccess, List<WaveletDeltaRecord> deltas, WaveletData snapshot, Executor persistExecutor) { Preconditions.checkArgument(deltasAccess.isEmpty() == deltas.isEmpty()); Preconditions.checkArgument(deltas.isEmpty() == (snapshot == null)); this.persistExecutor = persistExecutor; this.versionZero = HASH_FACTORY.createVersionZero(deltasAccess.getWaveletName()); this.deltasAccess = deltasAccess; this.snapshot = snapshot; this.lastPersistedVersion = new AtomicReference<HashedVersion>(deltasAccess.getEndVersion()); } @Override public WaveletName getWaveletName() { return deltasAccess.getWaveletName(); } @Override public ReadableWaveletData getSnapshot() { return snapshot; } @Override public HashedVersion getCurrentVersion() { return (snapshot == null) ? versionZero : snapshot.getHashedVersion(); } @Override public HashedVersion getLastPersistedVersion() { HashedVersion version = lastPersistedVersion.get(); return (version == null) ? versionZero : version; } @Override public HashedVersion getHashedVersion(long version) { final Entry<HashedVersion, TransformedWaveletDelta> cachedEntry = lookupCached(transformedDeltas, version); if (version == 0) { return versionZero; } else if (snapshot == null) { return null; } else if (version == snapshot.getVersion()) { return snapshot.getHashedVersion(); } else { WaveletDeltaRecord delta; try { delta = lookup(version); } catch (IOException e) { throw new RuntimeIOException(e); } if (delta == null && cachedEntry != null) { return cachedEntry.getKey(); } else { return delta != null ? delta.getAppliedAtVersion() : null; } } } @Override public TransformedWaveletDelta getTransformedDelta( final HashedVersion beginVersion) { TransformedWaveletDelta delta = transformedDeltas.get(beginVersion); if (delta != null) { return delta; } else { WaveletDeltaRecord nowDelta; try { nowDelta = lookup(beginVersion.getVersion()); } catch (IOException e) { throw new RuntimeIOException(e); } return nowDelta != null ? nowDelta.transformed : null; } } @Override public TransformedWaveletDelta getTransformedDeltaByEndVersion(final HashedVersion endVersion) { Preconditions.checkArgument(endVersion.getVersion() > 0, "end version %s is not positive", endVersion); Entry<HashedVersion, TransformedWaveletDelta> transformedEntry = transformedDeltas.lowerEntry(endVersion); final TransformedWaveletDelta cachedDelta = transformedEntry != null ? transformedEntry.getValue() : null; if (snapshot == null) { return null; } else { WaveletDeltaRecord deltaRecord = getDeltaRecordByEndVersion(endVersion); TransformedWaveletDelta delta; if (deltaRecord == null && cachedDelta != null && cachedDelta.getResultingVersion().equals(endVersion)) { delta = cachedDelta; } else { delta = deltaRecord != null ? deltaRecord.getTransformedDelta() : null; } return delta; } } @Override public DeltaSequence getTransformedDeltaHistory(final HashedVersion startVersion, final HashedVersion endVersion) { Preconditions.checkArgument(startVersion.getVersion() < endVersion.getVersion(), "Start version %s should be smaller than end version %s", startVersion, endVersion); // The history deltas can be either in the memory - waiting to be persisted, // or already persisted. We take both and merge into one list. final NavigableMap<HashedVersion, TransformedWaveletDelta> cachedDeltas = Maps.newTreeMap(); cachedDeltas.putAll(transformedDeltas.subMap(startVersion, true, endVersion, false)); ImmutableList<WaveletDeltaRecord> persistedDeltas; try { persistedDeltas = readDeltasInRange(deltasAccess, startVersion.getVersion(), endVersion.getVersion()); } catch (IOException e) { throw new RuntimeIOException(e); } NavigableMap<HashedVersion, TransformedWaveletDelta> allTransformedDeltasMap = Maps.newTreeMap(); allTransformedDeltasMap.putAll(cachedDeltas); for (WaveletDeltaRecord d : persistedDeltas) { allTransformedDeltasMap.put(d.getAppliedAtVersion(), d.getTransformedDelta()); } DeltaSequence nowDeltaSequence; if (!allTransformedDeltasMap.isEmpty() && allTransformedDeltasMap.firstKey().equals(startVersion) && allTransformedDeltasMap.lastEntry().getValue().getResultingVersion().equals(endVersion)) { List<TransformedWaveletDelta> cachedAndPersitentDeltasList = Lists.newArrayList(allTransformedDeltasMap.values()); nowDeltaSequence = DeltaSequence.of(cachedAndPersitentDeltasList); } else { nowDeltaSequence = null; } return nowDeltaSequence; } @Override public ByteStringMessage<ProtocolAppliedWaveletDelta> getAppliedDelta( HashedVersion beginVersion) { ByteStringMessage<ProtocolAppliedWaveletDelta> delta = appliedDeltas.get(beginVersion); if (delta != null) { return delta; } else { WaveletDeltaRecord record = null; try { record = lookup(beginVersion.getVersion()); } catch (IOException e) { new RuntimeIOException(e); } return record != null ? record.applied : null; } } @Override public ByteStringMessage<ProtocolAppliedWaveletDelta> getAppliedDeltaByEndVersion( final HashedVersion endVersion) { Preconditions.checkArgument(endVersion.getVersion() > 0, "end version %s is not positive", endVersion); Entry<HashedVersion, ByteStringMessage<ProtocolAppliedWaveletDelta>> appliedEntry = appliedDeltas.lowerEntry(endVersion); final ByteStringMessage<ProtocolAppliedWaveletDelta> cachedDelta = appliedEntry != null ? appliedEntry.getValue() : null; WaveletDeltaRecord deltaRecord = getDeltaRecordByEndVersion(endVersion); ByteStringMessage<ProtocolAppliedWaveletDelta> appliedDelta; if (deltaRecord == null && isDeltaBoundary(endVersion)) { appliedDelta = cachedDelta; } else { appliedDelta = deltaRecord != null ? deltaRecord.getAppliedDelta() : null; } return appliedDelta; } @Override public Collection<ByteStringMessage<ProtocolAppliedWaveletDelta>> getAppliedDeltaHistory( HashedVersion startVersion, HashedVersion endVersion) { Preconditions.checkArgument(startVersion.getVersion() < endVersion.getVersion()); return (isDeltaBoundary(startVersion) && isDeltaBoundary(endVersion)) ? appliedDeltas.subMap(startVersion, endVersion).values() : null; } public Collection<ByteStringMessage<ProtocolAppliedWaveletDelta>> getAppliedDeltaHistory1( final HashedVersion startVersion, final HashedVersion endVersion) { Preconditions.checkArgument(startVersion.getVersion() < endVersion.getVersion()); final Set<ByteStringMessage<ProtocolAppliedWaveletDelta>> allDeltas = Sets.newHashSet(); allDeltas.addAll(appliedDeltas.subMap(startVersion, endVersion).values()); ImmutableList<WaveletDeltaRecord> persistedDeltas; try { persistedDeltas = readDeltasInRange(deltasAccess, startVersion.getVersion(), endVersion.getVersion()); } catch (IOException e) { throw new RuntimeIOException(e); } for (WaveletDeltaRecord d : persistedDeltas) { allDeltas.add(d.getAppliedDelta()); } Collection<ByteStringMessage<ProtocolAppliedWaveletDelta>> deltaCollection = Lists.newArrayList(); if (isDeltaBoundary(startVersion) && isDeltaBoundary(endVersion)) { for (ByteStringMessage<ProtocolAppliedWaveletDelta> appliedDelta : allDeltas) { deltaCollection.add(appliedDelta); } } return deltaCollection; } @Override public void appendDelta(HashedVersion appliedAtVersion, TransformedWaveletDelta transformedDelta, ByteStringMessage<ProtocolAppliedWaveletDelta> appliedDelta) throws OperationException { HashedVersion currentVersion = getCurrentVersion(); Preconditions.checkArgument(currentVersion.equals(appliedAtVersion), "Applied version %s doesn't match current version %s", appliedAtVersion, currentVersion); if (appliedAtVersion.getVersion() == 0) { Preconditions.checkState(lastPersistedVersion.get() == null); snapshot = WaveletDataUtil.buildWaveletFromFirstDelta(getWaveletName(), transformedDelta); } else { WaveletDataUtil.applyWaveletDelta(transformedDelta, snapshot); } // Now that we built the snapshot without any exceptions, we record the delta. transformedDeltas.put(appliedAtVersion, transformedDelta); appliedDeltas.put(appliedAtVersion, appliedDelta); } @Override public ListenableFuture<Void> persist(final HashedVersion version) { Preconditions.checkArgument(version.getVersion() > 0, "Cannot persist non-positive version %s", version); Preconditions.checkArgument(isDeltaBoundary(version), "Version to persist %s matches no delta", version); synchronized (persistLock) { if (latestVersionToPersist != null) { // There's a persist task in flight. if (version.getVersion() <= latestVersionToPersist.getVersion()) { LOG.info("Attempt to persist version " + version + " smaller than last version requested " + latestVersionToPersist); } else { latestVersionToPersist = version; } if (nextPersistTask == null) { nextPersistTask = new ListenableFutureTask<Void>(persisterTask); } return nextPersistTask; } else { latestVersionToPersist = version; ListenableFutureTask<Void> resultTask = new ListenableFutureTask<Void>(persisterTask); persistExecutor.execute(resultTask); return resultTask; } } } @Override public void flush(HashedVersion version) { transformedDeltas.remove(transformedDeltas.lowerKey(version)); appliedDeltas.remove(appliedDeltas.lowerKey(version)); if (LOG.isFineLoggable()) { LOG.fine("Flushed deltas up to version " + version); } } @Override public void close() { } /** * @return An entry keyed by a hashed version with the given version number, * if any, otherwise null. */ private WaveletDeltaRecord lookup(long version) throws IOException { return deltasAccess.getDelta(version); } private WaveletDeltaRecord getDeltaRecordByEndVersion(HashedVersion endVersion) { long version = endVersion.getVersion(); try { return deltasAccess.getDeltaByEndVersion(version); } catch (IOException e) { throw new RuntimeIOException(e); } } private boolean isDeltaBoundary(HashedVersion version) { Preconditions.checkNotNull(version, "version is null"); return version.equals(getCurrentVersion()) || transformedDeltas.containsKey(version); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.remobile.file; import android.net.Uri; import java.io.File; import java.io.FileNotFoundException; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import com.remobile.cordova.CordovaResourceApi; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; public abstract class Filesystem { protected final Uri rootUri; protected final CordovaResourceApi resourceApi; public final String name; private JSONObject rootEntry; public Filesystem(Uri rootUri, String name, CordovaResourceApi resourceApi) { this.rootUri = rootUri; this.name = name; this.resourceApi = resourceApi; } public interface ReadFileCallback { public void handleData(InputStream inputStream, String contentType) throws IOException; } public static JSONObject makeEntryForURL(LocalFilesystemURL inputURL, Uri nativeURL) { try { String path = inputURL.path; int end = path.endsWith("/") ? 1 : 0; String[] parts = path.substring(0, path.length() - end).split("/+"); String fileName = parts[parts.length - 1]; JSONObject entry = new JSONObject(); entry.put("isFile", !inputURL.isDirectory); entry.put("isDirectory", inputURL.isDirectory); entry.put("name", fileName); entry.put("fullPath", path); // The file system can't be specified, as it would lead to an infinite loop, // but the filesystem name can be. entry.put("filesystemName", inputURL.fsName); // Backwards compatibility entry.put("filesystem", "temporary".equals(inputURL.fsName) ? 0 : 1); String nativeUrlStr = nativeURL.toString(); if (inputURL.isDirectory && !nativeUrlStr.endsWith("/")) { nativeUrlStr += "/"; } entry.put("nativeURL", nativeUrlStr); return entry; } catch (JSONException e) { e.printStackTrace(); throw new RuntimeException(e); } } public JSONObject makeEntryForURL(LocalFilesystemURL inputURL) { Uri nativeUri = toNativeUri(inputURL); return nativeUri == null ? null : makeEntryForURL(inputURL, nativeUri); } public JSONObject makeEntryForNativeUri(Uri nativeUri) { LocalFilesystemURL inputUrl = toLocalUri(nativeUri); return inputUrl == null ? null : makeEntryForURL(inputUrl, nativeUri); } public JSONObject getEntryForLocalURL(LocalFilesystemURL inputURL) throws IOException { return makeEntryForURL(inputURL); } public JSONObject makeEntryForFile(File file) { return makeEntryForNativeUri(Uri.fromFile(file)); } abstract JSONObject getFileForLocalURL(LocalFilesystemURL inputURL, String path, JSONObject options, boolean directory) throws FileExistsException, IOException, TypeMismatchException, EncodingException, JSONException; abstract boolean removeFileAtLocalURL(LocalFilesystemURL inputURL) throws InvalidModificationException, NoModificationAllowedException; abstract boolean recursiveRemoveFileAtLocalURL(LocalFilesystemURL inputURL) throws FileExistsException, NoModificationAllowedException; abstract LocalFilesystemURL[] listChildren(LocalFilesystemURL inputURL) throws FileNotFoundException; public final JSONArray readEntriesAtLocalURL(LocalFilesystemURL inputURL) throws FileNotFoundException { LocalFilesystemURL[] children = listChildren(inputURL); JSONArray entries = new JSONArray(); if (children != null) { for (LocalFilesystemURL url : children) { entries.put(makeEntryForURL(url)); } } return entries; } abstract JSONObject getFileMetadataForLocalURL(LocalFilesystemURL inputURL) throws FileNotFoundException; public Uri getRootUri() { return rootUri; } public boolean exists(LocalFilesystemURL inputURL) { try { getFileMetadataForLocalURL(inputURL); } catch (FileNotFoundException e) { return false; } return true; } public Uri nativeUriForFullPath(String fullPath) { Uri ret = null; if (fullPath != null) { String encodedPath = Uri.fromFile(new File(fullPath)).getEncodedPath(); if (encodedPath.startsWith("/")) { encodedPath = encodedPath.substring(1); } ret = rootUri.buildUpon().appendEncodedPath(encodedPath).build(); } return ret; } public LocalFilesystemURL localUrlforFullPath(String fullPath) { Uri nativeUri = nativeUriForFullPath(fullPath); if (nativeUri != null) { return toLocalUri(nativeUri); } return null; } /** * Removes multiple repeated //s, and collapses processes ../s. */ protected static String normalizePath(String rawPath) { // If this is an absolute path, trim the leading "/" and replace it later boolean isAbsolutePath = rawPath.startsWith("/"); if (isAbsolutePath) { rawPath = rawPath.replaceFirst("/+", ""); } ArrayList<String> components = new ArrayList<String>(Arrays.asList(rawPath.split("/+"))); for (int index = 0; index < components.size(); ++index) { if (components.get(index).equals("..")) { components.remove(index); if (index > 0) { components.remove(index-1); --index; } } } StringBuilder normalizedPath = new StringBuilder(); for(String component: components) { normalizedPath.append("/"); normalizedPath.append(component); } if (isAbsolutePath) { return normalizedPath.toString(); } else { return normalizedPath.toString().substring(1); } } public abstract Uri toNativeUri(LocalFilesystemURL inputURL); public abstract LocalFilesystemURL toLocalUri(Uri inputURL); public JSONObject getRootEntry() { if (rootEntry == null) { rootEntry = makeEntryForNativeUri(rootUri); } return rootEntry; } public JSONObject getParentForLocalURL(LocalFilesystemURL inputURL) throws IOException { Uri parentUri = inputURL.uri; String parentPath = new File(inputURL.uri.getPath()).getParent(); if (!"/".equals(parentPath)) { parentUri = inputURL.uri.buildUpon().path(parentPath + '/').build(); } return getEntryForLocalURL(LocalFilesystemURL.parse(parentUri)); } protected LocalFilesystemURL makeDestinationURL(String newName, LocalFilesystemURL srcURL, LocalFilesystemURL destURL, boolean isDirectory) { // I know this looks weird but it is to work around a JSON bug. if ("null".equals(newName) || "".equals(newName)) { newName = srcURL.uri.getLastPathSegment();; } String newDest = destURL.uri.toString(); if (newDest.endsWith("/")) { newDest = newDest + newName; } else { newDest = newDest + "/" + newName; } if (isDirectory) { newDest += '/'; } return LocalFilesystemURL.parse(newDest); } /* Read a source URL (possibly from a different filesystem, srcFs,) and copy it to * the destination URL on this filesystem, optionally with a new filename. * If move is true, then this method should either perform an atomic move operation * or remove the source file when finished. */ public JSONObject copyFileToURL(LocalFilesystemURL destURL, String newName, Filesystem srcFs, LocalFilesystemURL srcURL, boolean move) throws IOException, InvalidModificationException, JSONException, NoModificationAllowedException, FileExistsException { // First, check to see that we can do it if (move && !srcFs.canRemoveFileAtLocalURL(srcURL)) { throw new NoModificationAllowedException("Cannot move file at source URL"); } final LocalFilesystemURL destination = makeDestinationURL(newName, srcURL, destURL, srcURL.isDirectory); Uri srcNativeUri = srcFs.toNativeUri(srcURL); CordovaResourceApi.OpenForReadResult ofrr = resourceApi.openForRead(srcNativeUri); OutputStream os = null; try { os = getOutputStreamForURL(destination); } catch (IOException e) { ofrr.inputStream.close(); throw e; } // Closes streams. resourceApi.copyResource(ofrr, os); if (move) { srcFs.removeFileAtLocalURL(srcURL); } return getEntryForLocalURL(destination); } public OutputStream getOutputStreamForURL(LocalFilesystemURL inputURL) throws IOException { return resourceApi.openOutputStream(toNativeUri(inputURL)); } public void readFileAtURL(LocalFilesystemURL inputURL, long start, long end, ReadFileCallback readFileCallback) throws IOException { CordovaResourceApi.OpenForReadResult ofrr = resourceApi.openForRead(toNativeUri(inputURL)); if (end < 0) { end = ofrr.length; } long numBytesToRead = end - start; try { if (start > 0) { ofrr.inputStream.skip(start); } InputStream inputStream = ofrr.inputStream; if (end < ofrr.length) { inputStream = new LimitedInputStream(inputStream, numBytesToRead); } readFileCallback.handleData(inputStream, ofrr.mimeType); } finally { ofrr.inputStream.close(); } } abstract long writeToFileAtURL(LocalFilesystemURL inputURL, String data, int offset, boolean isBinary) throws NoModificationAllowedException, IOException; abstract long truncateFileAtURL(LocalFilesystemURL inputURL, long size) throws IOException, NoModificationAllowedException; // This method should return null if filesystem urls cannot be mapped to paths abstract String filesystemPathForURL(LocalFilesystemURL url); abstract LocalFilesystemURL URLforFilesystemPath(String path); abstract boolean canRemoveFileAtLocalURL(LocalFilesystemURL inputURL); protected class LimitedInputStream extends FilterInputStream { long numBytesToRead; public LimitedInputStream(InputStream in, long numBytesToRead) { super(in); this.numBytesToRead = numBytesToRead; } @Override public int read() throws IOException { if (numBytesToRead <= 0) { return -1; } numBytesToRead--; return in.read(); } @Override public int read(byte[] buffer, int byteOffset, int byteCount) throws IOException { if (numBytesToRead <= 0) { return -1; } int bytesToRead = byteCount; if (byteCount > numBytesToRead) { bytesToRead = (int)numBytesToRead; // Cast okay; long is less than int here. } int numBytesRead = in.read(buffer, byteOffset, bytesToRead); numBytesToRead -= numBytesRead; return numBytesRead; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.annotation.Internal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.runtime.checkpoint.hooks.MasterHooks; import org.apache.flink.runtime.checkpoint.savepoint.SavepointLoader; import org.apache.flink.runtime.checkpoint.savepoint.SavepointStore; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.Execution; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.ExecutionJobVertex; import org.apache.flink.runtime.executiongraph.ExecutionVertex; import org.apache.flink.runtime.executiongraph.JobStatusListener; import org.apache.flink.runtime.jobgraph.JobStatus; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings; import org.apache.flink.runtime.messages.checkpoint.AcknowledgeCheckpoint; import org.apache.flink.runtime.messages.checkpoint.DeclineCheckpoint; import org.apache.flink.runtime.state.SharedStateRegistry; import org.apache.flink.runtime.state.SharedStateRegistryFactory; import org.apache.flink.runtime.taskmanager.DispatcherThreadFactory; import org.apache.flink.util.Preconditions; import org.apache.flink.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.util.ArrayDeque; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * The checkpoint coordinator coordinates the distributed snapshots of operators and state. * It triggers the checkpoint by sending the messages to the relevant tasks and collects the * checkpoint acknowledgements. It also collects and maintains the overview of the state handles * reported by the tasks that acknowledge the checkpoint. */ public class CheckpointCoordinator { private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); /** The number of recent checkpoints whose IDs are remembered */ private static final int NUM_GHOST_CHECKPOINT_IDS = 16; // ------------------------------------------------------------------------ /** Coordinator-wide lock to safeguard the checkpoint updates */ private final Object lock = new Object(); /** Lock specially to make sure that trigger requests do not overtake each other. * This is not done with the coordinator-wide lock, because as part of triggering, * blocking operations may happen (distributed atomic counters). * Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline' * messages during that phase. */ private final Object triggerLock = new Object(); /** The job whose checkpoint this coordinator coordinates */ private final JobID job; /** Default checkpoint properties **/ private final CheckpointProperties checkpointProperties; /** The executor used for asynchronous calls, like potentially blocking I/O */ private final Executor executor; /** Tasks who need to be sent a message when a checkpoint is started */ private final ExecutionVertex[] tasksToTrigger; /** Tasks who need to acknowledge a checkpoint before it succeeds */ private final ExecutionVertex[] tasksToWaitFor; /** Tasks who need to be sent a message when a checkpoint is confirmed */ private final ExecutionVertex[] tasksToCommitTo; /** Map from checkpoint ID to the pending checkpoint */ private final Map<Long, PendingCheckpoint> pendingCheckpoints; /** Completed checkpoints. Implementations can be blocking. Make sure calls to methods * accessing this don't block the job manager actor and run asynchronously. */ private final CompletedCheckpointStore completedCheckpointStore; /** Default directory for persistent checkpoints; <code>null</code> if none configured. * THIS WILL BE REPLACED BY PROPER STATE-BACKEND METADATA WRITING */ @Nullable private final String checkpointDirectory; /** A list of recent checkpoint IDs, to identify late messages (vs invalid ones) */ private final ArrayDeque<Long> recentPendingCheckpoints; /** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these * need to be ascending across job managers. */ private final CheckpointIDCounter checkpointIdCounter; /** The base checkpoint interval. Actual trigger time may be affected by the * max concurrent checkpoints and minimum-pause values */ private final long baseInterval; /** The max time (in ms) that a checkpoint may take */ private final long checkpointTimeout; /** The min time(in ms) to delay after a checkpoint could be triggered. Allows to * enforce minimum processing time between checkpoint attempts */ private final long minPauseBetweenCheckpointsNanos; /** The maximum number of checkpoints that may be in progress at the same time */ private final int maxConcurrentCheckpointAttempts; /** The timer that handles the checkpoint timeouts and triggers periodic checkpoints */ private final ScheduledThreadPoolExecutor timer; /** The master checkpoint hooks executed by this checkpoint coordinator */ private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks; /** Actor that receives status updates from the execution graph this coordinator works for */ private JobStatusListener jobStatusListener; /** The number of consecutive failed trigger attempts */ private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0); /** A handle to the current periodic trigger, to cancel it when necessary */ private ScheduledFuture<?> currentPeriodicTrigger; /** The timestamp (via {@link System#nanoTime()}) when the last checkpoint completed */ private long lastCheckpointCompletionNanos; /** Flag whether a triggered checkpoint should immediately schedule the next checkpoint. * Non-volatile, because only accessed in synchronized scope */ private boolean periodicScheduling; /** Flag whether a trigger request could not be handled immediately. Non-volatile, because only * accessed in synchronized scope */ private boolean triggerRequestQueued; /** Flag marking the coordinator as shut down (not accepting any messages any more) */ private volatile boolean shutdown; /** Optional tracker for checkpoint statistics. */ @Nullable private CheckpointStatsTracker statsTracker; /** A factory for SharedStateRegistry objects */ private final SharedStateRegistryFactory sharedStateRegistryFactory; /** Registry that tracks state which is shared across (incremental) checkpoints */ private SharedStateRegistry sharedStateRegistry; // -------------------------------------------------------------------------------------------- public CheckpointCoordinator( JobID job, long baseInterval, long checkpointTimeout, long minPauseBetweenCheckpoints, int maxConcurrentCheckpointAttempts, ExternalizedCheckpointSettings externalizeSettings, ExecutionVertex[] tasksToTrigger, ExecutionVertex[] tasksToWaitFor, ExecutionVertex[] tasksToCommitTo, CheckpointIDCounter checkpointIDCounter, CompletedCheckpointStore completedCheckpointStore, @Nullable String checkpointDirectory, Executor executor, SharedStateRegistryFactory sharedStateRegistryFactory) { // sanity checks checkArgument(baseInterval > 0, "Checkpoint timeout must be larger than zero"); checkArgument(checkpointTimeout >= 1, "Checkpoint timeout must be larger than zero"); checkArgument(minPauseBetweenCheckpoints >= 0, "minPauseBetweenCheckpoints must be >= 0"); checkArgument(maxConcurrentCheckpointAttempts >= 1, "maxConcurrentCheckpointAttempts must be >= 1"); if (externalizeSettings.externalizeCheckpoints() && checkpointDirectory == null) { throw new IllegalStateException("CheckpointConfig says to persist periodic " + "checkpoints, but no checkpoint directory has been configured. You can " + "configure configure one via key '" + CoreOptions.CHECKPOINTS_DIRECTORY.key() + "'."); } // max "in between duration" can be one year - this is to prevent numeric overflows if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) { minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000; } // it does not make sense to schedule checkpoints more often then the desired // time between checkpoints if (baseInterval < minPauseBetweenCheckpoints) { baseInterval = minPauseBetweenCheckpoints; } this.job = checkNotNull(job); this.baseInterval = baseInterval; this.checkpointTimeout = checkpointTimeout; this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000; this.maxConcurrentCheckpointAttempts = maxConcurrentCheckpointAttempts; this.tasksToTrigger = checkNotNull(tasksToTrigger); this.tasksToWaitFor = checkNotNull(tasksToWaitFor); this.tasksToCommitTo = checkNotNull(tasksToCommitTo); this.pendingCheckpoints = new LinkedHashMap<>(); this.checkpointIdCounter = checkNotNull(checkpointIDCounter); this.completedCheckpointStore = checkNotNull(completedCheckpointStore); this.checkpointDirectory = checkpointDirectory; this.executor = checkNotNull(executor); this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory); this.sharedStateRegistry = sharedStateRegistryFactory.create(executor); this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS); this.masterHooks = new HashMap<>(); this.timer = new ScheduledThreadPoolExecutor(1, new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer")); // make sure the timer internally cleans up and does not hold onto stale scheduled tasks this.timer.setRemoveOnCancelPolicy(true); this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); if (externalizeSettings.externalizeCheckpoints()) { LOG.info("Persisting periodic checkpoints externally at {}.", checkpointDirectory); checkpointProperties = CheckpointProperties.forExternalizedCheckpoint(externalizeSettings.deleteOnCancellation()); } else { checkpointProperties = CheckpointProperties.forStandardCheckpoint(); } try { // Make sure the checkpoint ID enumerator is running. Possibly // issues a blocking call to ZooKeeper. checkpointIDCounter.start(); } catch (Throwable t) { throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t); } } // -------------------------------------------------------------------------------------------- // Configuration // -------------------------------------------------------------------------------------------- /** * Adds the given master hook to the checkpoint coordinator. This method does nothing, if * the checkpoint coordinator already contained a hook with the same ID (as defined via * {@link MasterTriggerRestoreHook#getIdentifier()}). * * @param hook The hook to add. * @return True, if the hook was added, false if the checkpoint coordinator already * contained a hook with the same ID. */ public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) { checkNotNull(hook); final String id = hook.getIdentifier(); checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id"); synchronized (lock) { if (!masterHooks.containsKey(id)) { masterHooks.put(id, hook); return true; } else { return false; } } } /** * Gets the number of currently register master hooks. */ public int getNumberOfRegisteredMasterHooks() { synchronized (lock) { return masterHooks.size(); } } /** * Sets the checkpoint stats tracker. * * @param statsTracker The checkpoint stats tracker. */ public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) { this.statsTracker = statsTracker; } // -------------------------------------------------------------------------------------------- // Clean shutdown // -------------------------------------------------------------------------------------------- /** * Shuts down the checkpoint coordinator. * * <p>After this method has been called, the coordinator does not accept * and further messages and cannot trigger any further checkpoints. */ public void shutdown(JobStatus jobStatus) throws Exception { synchronized (lock) { if (!shutdown) { shutdown = true; LOG.info("Stopping checkpoint coordinator for job " + job); periodicScheduling = false; triggerRequestQueued = false; // shut down the thread that handles the timeouts and pending triggers timer.shutdownNow(); // clear and discard all pending checkpoints for (PendingCheckpoint pending : pendingCheckpoints.values()) { pending.abortError(new Exception("Checkpoint Coordinator is shutting down")); } pendingCheckpoints.clear(); completedCheckpointStore.shutdown(jobStatus); checkpointIdCounter.shutdown(jobStatus); } } } public boolean isShutdown() { return shutdown; } // -------------------------------------------------------------------------------------------- // Handling checkpoints and messages // -------------------------------------------------------------------------------------------- /** * Triggers a savepoint with the given savepoint directory as a target. * * @param timestamp The timestamp for the savepoint. * @param targetDirectory Target directory for the savepoint. * @return A future to the completed checkpoint * @throws IllegalStateException If no savepoint directory has been * specified and no default savepoint directory has been * configured * @throws Exception Failures during triggering are forwarded */ public CompletableFuture<CompletedCheckpoint> triggerSavepoint(long timestamp, String targetDirectory) throws Exception { checkNotNull(targetDirectory, "Savepoint target directory"); CheckpointProperties props = CheckpointProperties.forStandardSavepoint(); // Create the unique savepoint directory final String savepointDirectory = SavepointStore .createSavepointDirectory(targetDirectory, job); CheckpointTriggerResult triggerResult = triggerCheckpoint( timestamp, props, savepointDirectory, false); CompletableFuture<CompletedCheckpoint> result; if (triggerResult.isSuccess()) { result = triggerResult.getPendingCheckpoint().getCompletionFuture(); } else { Throwable cause = new Exception("Failed to trigger savepoint: " + triggerResult.getFailureReason().message()); return FutureUtils.completedExceptionally(cause); } // Make sure to remove the created base directory on Exceptions result.whenCompleteAsync( (CompletedCheckpoint checkpoint, Throwable throwable) -> { if (throwable != null) { try { SavepointStore.deleteSavepointDirectory(savepointDirectory); } catch (Throwable t) { LOG.warn("Failed to delete savepoint directory " + savepointDirectory + " after failed savepoint.", t); } } }, executor); return result; } /** * Triggers a new standard checkpoint and uses the given timestamp as the checkpoint * timestamp. * * @param timestamp The timestamp for the checkpoint. * @param isPeriodic Flag indicating whether this triggered checkpoint is * periodic. If this flag is true, but the periodic scheduler is disabled, * the checkpoint will be declined. * @return <code>true</code> if triggering the checkpoint succeeded. */ public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) { return triggerCheckpoint(timestamp, checkpointProperties, checkpointDirectory, isPeriodic).isSuccess(); } /** * Test method to trigger a checkpoint/savepoint. * * @param timestamp The timestamp for the checkpoint. * @param options The checkpoint options. * @return A future to the completed checkpoint */ @VisibleForTesting @Internal public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(long timestamp, CheckpointOptions options) throws Exception { switch (options.getCheckpointType()) { case SAVEPOINT: return triggerSavepoint(timestamp, options.getTargetLocation()); case FULL_CHECKPOINT: CheckpointTriggerResult triggerResult = triggerCheckpoint(timestamp, checkpointProperties, checkpointDirectory, false); if (triggerResult.isSuccess()) { return triggerResult.getPendingCheckpoint().getCompletionFuture(); } else { Throwable cause = new Exception("Failed to trigger checkpoint: " + triggerResult.getFailureReason().message()); return FutureUtils.completedExceptionally(cause); } default: throw new IllegalArgumentException("Unknown checkpoint type: " + options.getCheckpointType()); } } @VisibleForTesting CheckpointTriggerResult triggerCheckpoint( long timestamp, CheckpointProperties props, String targetDirectory, boolean isPeriodic) { // Sanity check if (props.externalizeCheckpoint() && targetDirectory == null) { throw new IllegalStateException("No target directory specified to persist checkpoint to."); } // make some eager pre-checks synchronized (lock) { // abort if the coordinator has been shutdown in the meantime if (shutdown) { return new CheckpointTriggerResult(CheckpointDeclineReason.COORDINATOR_SHUTDOWN); } // Don't allow periodic checkpoint if scheduling has been disabled if (isPeriodic && !periodicScheduling) { return new CheckpointTriggerResult(CheckpointDeclineReason.PERIODIC_SCHEDULER_SHUTDOWN); } // validate whether the checkpoint can be triggered, with respect to the limit of // concurrent checkpoints, and the minimum time between checkpoints. // these checks are not relevant for savepoints if (!props.forceCheckpoint()) { // sanity check: there should never be more than one trigger request queued if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint while one was queued already"); return new CheckpointTriggerResult(CheckpointDeclineReason.ALREADY_QUEUED); } // if too many checkpoints are currently in progress, we need to mark that a request is queued if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } return new CheckpointTriggerResult(CheckpointDeclineReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } // make sure the minimum interval between checkpoints has passed final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } // Reassign the new trigger to the currentPeriodicTrigger currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); return new CheckpointTriggerResult(CheckpointDeclineReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } } // check if all tasks that we need to trigger are running. // if not, abort the checkpoint Execution[] executions = new Execution[tasksToTrigger.length]; for (int i = 0; i < tasksToTrigger.length; i++) { Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt(); if (ee != null && ee.getState() == ExecutionState.RUNNING) { executions[i] = ee; } else { LOG.info("Checkpoint triggering task {} is not being executed at the moment. Aborting checkpoint.", tasksToTrigger[i].getTaskNameWithSubtaskIndex()); return new CheckpointTriggerResult(CheckpointDeclineReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } // next, check if all tasks that need to acknowledge the checkpoint are running. // if not, abort the checkpoint Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length); for (ExecutionVertex ev : tasksToWaitFor) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ackTasks.put(ee.getAttemptId(), ev); } else { LOG.info("Checkpoint acknowledging task {} is not being executed at the moment. Aborting checkpoint.", ev.getTaskNameWithSubtaskIndex()); return new CheckpointTriggerResult(CheckpointDeclineReason.NOT_ALL_REQUIRED_TASKS_RUNNING); } } // we will actually trigger this checkpoint! // we lock with a special lock to make sure that trigger requests do not overtake each other. // this is not done with the coordinator-wide lock, because the 'checkpointIdCounter' // may issue blocking operations. Using a different lock than the coordinator-wide lock, // we avoid blocking the processing of 'acknowledge/decline' messages during that time. synchronized (triggerLock) { final long checkpointID; try { // this must happen outside the coordinator-wide lock, because it communicates // with external services (in HA mode) and may block for a while. checkpointID = checkpointIdCounter.getAndIncrement(); } catch (Throwable t) { int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint (" + numUnsuccessful + " consecutive failed attempts so far)", t); return new CheckpointTriggerResult(CheckpointDeclineReason.EXCEPTION); } final PendingCheckpoint checkpoint = new PendingCheckpoint( job, checkpointID, timestamp, ackTasks, props, targetDirectory, executor); if (statsTracker != null) { PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint( checkpointID, timestamp, props); checkpoint.setStatsCallback(callback); } // schedule the timer that will clean up the expired checkpoints final Runnable canceller = new Runnable() { @Override public void run() { synchronized (lock) { // only do the work if the checkpoint is not discarded anyways // note that checkpoint completion discards the pending checkpoint object if (!checkpoint.isDiscarded()) { LOG.info("Checkpoint " + checkpointID + " expired before completing."); checkpoint.abortExpired(); pendingCheckpoints.remove(checkpointID); rememberRecentCheckpointId(checkpointID); triggerQueuedRequests(); } } } }; try { // re-acquire the coordinator-wide lock synchronized (lock) { // since we released the lock in the meantime, we need to re-check // that the conditions still hold. if (shutdown) { return new CheckpointTriggerResult(CheckpointDeclineReason.COORDINATOR_SHUTDOWN); } else if (!props.forceCheckpoint()) { if (triggerRequestQueued) { LOG.warn("Trying to trigger another checkpoint while one was queued already"); return new CheckpointTriggerResult(CheckpointDeclineReason.ALREADY_QUEUED); } if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) { triggerRequestQueued = true; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } return new CheckpointTriggerResult(CheckpointDeclineReason.TOO_MANY_CONCURRENT_CHECKPOINTS); } // make sure the minimum interval between checkpoints has passed final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos; final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000; if (durationTillNextMillis > 0) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } // Reassign the new trigger to the currentPeriodicTrigger currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS); return new CheckpointTriggerResult(CheckpointDeclineReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS); } } LOG.info("Triggering checkpoint " + checkpointID + " @ " + timestamp); pendingCheckpoints.put(checkpointID, checkpoint); ScheduledFuture<?> cancellerHandle = timer.schedule( canceller, checkpointTimeout, TimeUnit.MILLISECONDS); if (!checkpoint.setCancellerHandle(cancellerHandle)) { // checkpoint is already disposed! cancellerHandle.cancel(false); } // trigger the master hooks for the checkpoint final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(), checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout)); for (MasterState s : masterStates) { checkpoint.addMasterState(s); } } // end of lock scope CheckpointOptions checkpointOptions; if (!props.isSavepoint()) { checkpointOptions = CheckpointOptions.forFullCheckpoint(); } else { checkpointOptions = CheckpointOptions.forSavepoint(targetDirectory); } // send the messages to the tasks that trigger their checkpoint for (Execution execution: executions) { execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions); } numUnsuccessfulCheckpointsTriggers.set(0); return new CheckpointTriggerResult(checkpoint); } catch (Throwable t) { // guard the map against concurrent modifications synchronized (lock) { pendingCheckpoints.remove(checkpointID); } int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet(); LOG.warn("Failed to trigger checkpoint {}. ({} consecutive failed attempts so far)", checkpointID, numUnsuccessful, t); if (!checkpoint.isDiscarded()) { checkpoint.abortError(new Exception("Failed to trigger checkpoint", t)); } return new CheckpointTriggerResult(CheckpointDeclineReason.EXCEPTION); } } // end trigger lock } /** * Receives a {@link DeclineCheckpoint} message for a pending checkpoint. * * @param message Checkpoint decline from the task manager */ public void receiveDeclineMessage(DeclineCheckpoint message) { if (shutdown || message == null) { return; } if (!job.equals(message.getJob())) { throw new IllegalArgumentException("Received DeclineCheckpoint message for job " + message.getJob() + " while this coordinator handles job " + job); } final long checkpointId = message.getCheckpointId(); final String reason = (message.getReason() != null ? message.getReason().getMessage() : ""); PendingCheckpoint checkpoint; synchronized (lock) { // we need to check inside the lock for being shutdown as well, otherwise we // get races and invalid error log messages if (shutdown) { return; } checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { LOG.info("Discarding checkpoint {} because of checkpoint decline from task {} : {}", checkpointId, message.getTaskExecutionId(), reason); pendingCheckpoints.remove(checkpointId); checkpoint.abortDeclined(); rememberRecentCheckpointId(checkpointId); // we don't have to schedule another "dissolving" checkpoint any more because the // cancellation barriers take care of breaking downstream alignments // we only need to make sure that suspended queued requests are resumed boolean haveMoreRecentPending = false; for (PendingCheckpoint p : pendingCheckpoints.values()) { if (!p.isDiscarded() && p.getCheckpointId() >= checkpoint.getCheckpointId()) { haveMoreRecentPending = true; break; } } if (!haveMoreRecentPending) { triggerQueuedRequests(); } } else if (checkpoint != null) { // this should not happen throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else if (LOG.isDebugEnabled()) { if (recentPendingCheckpoints.contains(checkpointId)) { // message is for an unknown checkpoint, or comes too late (checkpoint disposed) LOG.debug("Received another decline message for now expired checkpoint attempt {} : {}", checkpointId, reason); } else { // message is for an unknown checkpoint. might be so old that we don't even remember it any more LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} : {}", checkpointId, reason); } } } } /** * Receives an AcknowledgeCheckpoint message and returns whether the * message was associated with a pending checkpoint. * * @param message Checkpoint ack from the task manager * * @return Flag indicating whether the ack'd checkpoint was associated * with a pending checkpoint. * * @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store. */ public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message) throws CheckpointException { if (shutdown || message == null) { return false; } if (!job.equals(message.getJob())) { LOG.error("Received wrong AcknowledgeCheckpoint message for job {}: {}", job, message); return false; } final long checkpointId = message.getCheckpointId(); synchronized (lock) { // we need to check inside the lock for being shutdown as well, otherwise we // get races and invalid error log messages if (shutdown) { return false; } final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId); if (checkpoint != null && !checkpoint.isDiscarded()) { switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) { case SUCCESS: LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); if (checkpoint.isFullyAcknowledged()) { completePendingCheckpoint(checkpoint); } break; case DUPLICATE: LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); break; case UNKNOWN: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the task's execution attempt id was unknown. Discarding " + "the state handle to avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); break; case DISCARDED: LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " + "because the pending checkpoint had been discarded. Discarding the " + "state handle tp avoid lingering state.", message.getCheckpointId(), message.getTaskExecutionId(), message.getJob()); discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); } return true; } else if (checkpoint != null) { // this should not happen throw new IllegalStateException( "Received message for discarded but non-removed checkpoint " + checkpointId); } else { boolean wasPendingCheckpoint; // message is for an unknown checkpoint, or comes too late (checkpoint disposed) if (recentPendingCheckpoints.contains(checkpointId)) { wasPendingCheckpoint = true; LOG.warn("Received late message for now expired checkpoint attempt {} from " + "{} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); } else { LOG.debug("Received message for an unknown checkpoint {} from {} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob()); wasPendingCheckpoint = false; } // try to discard the state so that we don't have lingering state lying around discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState()); return wasPendingCheckpoint; } } } /** * Try to complete the given pending checkpoint. * * Important: This method should only be called in the checkpoint lock scope. * * @param pendingCheckpoint to complete * @throws CheckpointException if the completion failed */ private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException { final long checkpointId = pendingCheckpoint.getCheckpointId(); final CompletedCheckpoint completedCheckpoint; // As a first step to complete the checkpoint, we register its state with the registry Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates(); sharedStateRegistry.registerAll(operatorStates.values()); try { try { // externalize the checkpoint if required if (pendingCheckpoint.getProps().externalizeCheckpoint()) { completedCheckpoint = pendingCheckpoint.finalizeCheckpointExternalized(); } else { completedCheckpoint = pendingCheckpoint.finalizeCheckpointNonExternalized(); } } catch (Exception e1) { // abort the current pending checkpoint if we fails to finalize the pending checkpoint. if (!pendingCheckpoint.isDiscarded()) { pendingCheckpoint.abortError(e1); } throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.', e1); } // the pending checkpoint must be discarded after the finalization Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null); // TODO: add savepoints to completed checkpoint store once FLINK-4815 has been completed if (!completedCheckpoint.getProperties().isSavepoint()) { try { completedCheckpointStore.addCheckpoint(completedCheckpoint); } catch (Exception exception) { // we failed to store the completed checkpoint. Let's clean up executor.execute(new Runnable() { @Override public void run() { try { completedCheckpoint.discardOnFailedStoring(); } catch (Throwable t) { LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t); } } }); throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.', exception); } // drop those pending checkpoints that are at prior to the completed one dropSubsumedCheckpoints(checkpointId); } } finally { pendingCheckpoints.remove(checkpointId); triggerQueuedRequests(); } rememberRecentCheckpointId(checkpointId); // record the time when this was completed, to calculate // the 'min delay between checkpoints' lastCheckpointCompletionNanos = System.nanoTime(); LOG.info("Completed checkpoint {} ({} bytes in {} ms).", checkpointId, completedCheckpoint.getStateSize(), completedCheckpoint.getDuration()); if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("Checkpoint state: "); for (OperatorState state : completedCheckpoint.getOperatorStates().values()) { builder.append(state); builder.append(", "); } // Remove last two chars ", " builder.setLength(builder.length() - 2); LOG.debug(builder.toString()); } // send the "notify complete" call to all vertices final long timestamp = completedCheckpoint.getTimestamp(); for (ExecutionVertex ev : tasksToCommitTo) { Execution ee = ev.getCurrentExecutionAttempt(); if (ee != null) { ee.notifyCheckpointComplete(checkpointId, timestamp); } } } private void rememberRecentCheckpointId(long id) { if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) { recentPendingCheckpoints.removeFirst(); } recentPendingCheckpoints.addLast(id); } private void dropSubsumedCheckpoints(long checkpointId) { Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator(); while (entries.hasNext()) { PendingCheckpoint p = entries.next().getValue(); // remove all pending checkpoints that are lesser than the current completed checkpoint if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) { rememberRecentCheckpointId(p.getCheckpointId()); p.abortSubsumed(); entries.remove(); } } } /** * Triggers the queued request, if there is one. * * <p>NOTE: The caller of this method must hold the lock when invoking the method! */ private void triggerQueuedRequests() { if (triggerRequestQueued) { triggerRequestQueued = false; // trigger the checkpoint from the trigger timer, to finish the work of this thread before // starting with the next checkpoint if (periodicScheduling) { if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); } currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), 0L, baseInterval, TimeUnit.MILLISECONDS); } else { timer.execute(new ScheduledTrigger()); } } } @VisibleForTesting int getNumScheduledTasks() { return timer.getQueue().size(); } // -------------------------------------------------------------------------------------------- // Checkpoint State Restoring // -------------------------------------------------------------------------------------------- /** * Restores the latest checkpointed state. * * @param tasks Map of job vertices to restore. State for these vertices is * restored via {@link Execution#setInitialState(TaskStateSnapshot)}. * @param errorIfNoCheckpoint Fail if no completed checkpoint is available to * restore from. * @param allowNonRestoredState Allow checkpoint state that cannot be mapped * to any job vertex in tasks. * @return <code>true</code> if state was restored, <code>false</code> otherwise. * @throws IllegalStateException If the CheckpointCoordinator is shut down. * @throws IllegalStateException If no completed checkpoint is available and * the <code>failIfNoCheckpoint</code> flag has been set. * @throws IllegalStateException If the checkpoint contains state that cannot be * mapped to any job vertex in <code>tasks</code> and the * <code>allowNonRestoredState</code> flag has not been set. * @throws IllegalStateException If the max parallelism changed for an operator * that restores state from this checkpoint. * @throws IllegalStateException If the parallelism changed for an operator * that restores <i>non-partitioned</i> state from this * checkpoint. */ public boolean restoreLatestCheckpointedState( Map<JobVertexID, ExecutionJobVertex> tasks, boolean errorIfNoCheckpoint, boolean allowNonRestoredState) throws Exception { synchronized (lock) { if (shutdown) { throw new IllegalStateException("CheckpointCoordinator is shut down"); } // We create a new shared state registry object, so that all pending async disposal requests from previous // runs will go against the old object (were they can do no harm). // This must happen under the checkpoint lock. sharedStateRegistry.close(); sharedStateRegistry = sharedStateRegistryFactory.create(executor); // Recover the checkpoints, TODO this could be done only when there is a new leader, not on each recovery completedCheckpointStore.recover(); // Now, we re-register all (shared) states from the checkpoint store with the new registry for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) { completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry); } LOG.debug("Status of the shared state registry after restore: {}.", sharedStateRegistry); // Restore from the latest checkpoint CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(); if (latest == null) { if (errorIfNoCheckpoint) { throw new IllegalStateException("No completed checkpoint available"); } else { return false; } } LOG.info("Restoring from latest valid checkpoint: {}.", latest); // re-assign the task states final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates(); StateAssignmentOperation stateAssignmentOperation = new StateAssignmentOperation(tasks, operatorStates, allowNonRestoredState); stateAssignmentOperation.assignStates(); // call master hooks for restore MasterHooks.restoreMasterHooks( masterHooks, latest.getMasterHookStates(), latest.getCheckpointID(), allowNonRestoredState, LOG); // update metrics if (statsTracker != null) { long restoreTimestamp = System.currentTimeMillis(); RestoredCheckpointStats restored = new RestoredCheckpointStats( latest.getCheckpointID(), latest.getProperties(), restoreTimestamp, latest.getExternalPointer()); statsTracker.reportRestoredCheckpoint(restored); } return true; } } /** * Restore the state with given savepoint * * @param savepointPath Location of the savepoint * @param allowNonRestored True if allowing checkpoint state that cannot be * mapped to any job vertex in tasks. * @param tasks Map of job vertices to restore. State for these * vertices is restored via * {@link Execution#setInitialState(TaskStateSnapshot)}. * @param userClassLoader The class loader to resolve serialized classes in * legacy savepoint versions. */ public boolean restoreSavepoint( String savepointPath, boolean allowNonRestored, Map<JobVertexID, ExecutionJobVertex> tasks, ClassLoader userClassLoader) throws Exception { Preconditions.checkNotNull(savepointPath, "The savepoint path cannot be null."); LOG.info("Starting job from savepoint {} ({})", savepointPath, (allowNonRestored ? "allowing non restored state" : "")); // Load the savepoint as a checkpoint into the system CompletedCheckpoint savepoint = SavepointLoader.loadAndValidateSavepoint( job, tasks, savepointPath, userClassLoader, allowNonRestored); completedCheckpointStore.addCheckpoint(savepoint); // Reset the checkpoint ID counter long nextCheckpointId = savepoint.getCheckpointID() + 1; checkpointIdCounter.setCount(nextCheckpointId); LOG.info("Reset the checkpoint ID to {}.", nextCheckpointId); return restoreLatestCheckpointedState(tasks, true, allowNonRestored); } // ------------------------------------------------------------------------ // Accessors // ------------------------------------------------------------------------ public int getNumberOfPendingCheckpoints() { return this.pendingCheckpoints.size(); } public int getNumberOfRetainedSuccessfulCheckpoints() { synchronized (lock) { return completedCheckpointStore.getNumberOfRetainedCheckpoints(); } } public Map<Long, PendingCheckpoint> getPendingCheckpoints() { synchronized (lock) { return new HashMap<>(this.pendingCheckpoints); } } public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception { synchronized (lock) { return completedCheckpointStore.getAllCheckpoints(); } } public CompletedCheckpointStore getCheckpointStore() { return completedCheckpointStore; } public CheckpointIDCounter getCheckpointIdCounter() { return checkpointIdCounter; } public long getCheckpointTimeout() { return checkpointTimeout; } // -------------------------------------------------------------------------------------------- // Periodic scheduling of checkpoints // -------------------------------------------------------------------------------------------- public void startCheckpointScheduler() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } // make sure all prior timers are cancelled stopCheckpointScheduler(); periodicScheduling = true; currentPeriodicTrigger = timer.scheduleAtFixedRate( new ScheduledTrigger(), baseInterval, baseInterval, TimeUnit.MILLISECONDS); } } public void stopCheckpointScheduler() { synchronized (lock) { triggerRequestQueued = false; periodicScheduling = false; if (currentPeriodicTrigger != null) { currentPeriodicTrigger.cancel(false); currentPeriodicTrigger = null; } for (PendingCheckpoint p : pendingCheckpoints.values()) { p.abortError(new Exception("Checkpoint Coordinator is suspending.")); } pendingCheckpoints.clear(); numUnsuccessfulCheckpointsTriggers.set(0); } } // ------------------------------------------------------------------------ // job status listener that schedules / cancels periodic checkpoints // ------------------------------------------------------------------------ public JobStatusListener createActivatorDeactivator() { synchronized (lock) { if (shutdown) { throw new IllegalArgumentException("Checkpoint coordinator is shut down"); } if (jobStatusListener == null) { jobStatusListener = new CheckpointCoordinatorDeActivator(this); } return jobStatusListener; } } // ------------------------------------------------------------------------ private final class ScheduledTrigger implements Runnable { @Override public void run() { try { triggerCheckpoint(System.currentTimeMillis(), true); } catch (Exception e) { LOG.error("Exception while triggering checkpoint.", e); } } } /** * Discards the given state object asynchronously belonging to the given job, execution attempt * id and checkpoint id. * * @param jobId identifying the job to which the state object belongs * @param executionAttemptID identifying the task to which the state object belongs * @param checkpointId of the state object * @param subtaskState to discard asynchronously */ private void discardSubtaskState( final JobID jobId, final ExecutionAttemptID executionAttemptID, final long checkpointId, final TaskStateSnapshot subtaskState) { if (subtaskState != null) { executor.execute(new Runnable() { @Override public void run() { try { subtaskState.discardState(); } catch (Throwable t2) { LOG.warn("Could not properly discard state object of checkpoint {} " + "belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2); } } }); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.parquet.columnreaders; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.MaterializedField.Key; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.FileMetaData; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.format.converter.ParquetMetadataConverter; import org.apache.parquet.hadoop.CodecFactory; import org.apache.parquet.hadoop.ParquetFileWriter; import org.apache.parquet.hadoop.metadata.BlockMetaData; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; import org.apache.parquet.hadoop.metadata.ParquetMetadata; import org.apache.parquet.schema.PrimitiveType; import com.google.common.collect.Lists; public class ParquetRecordReader extends AbstractRecordReader { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRecordReader.class); // this value has been inflated to read in multiple value vectors at once, and then break them up into smaller vectors private static final int NUMBER_OF_VECTORS = 1; private static final long DEFAULT_BATCH_LENGTH = 256 * 1024 * NUMBER_OF_VECTORS; // 256kb private static final long DEFAULT_BATCH_LENGTH_IN_BITS = DEFAULT_BATCH_LENGTH * 8; // 256kb private static final char DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH = 32*1024; // TODO - should probably find a smarter way to set this, currently 1 megabyte public static final int PARQUET_PAGE_MAX_SIZE = 1024 * 1024 * 1; // used for clearing the last n bits of a byte public static final byte[] endBitMasks = {-2, -4, -8, -16, -32, -64, -128}; // used for clearing the first n bits of a byte public static final byte[] startBitMasks = {127, 63, 31, 15, 7, 3, 1}; private int bitWidthAllFixedFields; private boolean allFieldsFixedLength; private int recordsPerBatch; private OperatorContext operatorContext; // private long totalRecords; // private long rowGroupOffset; private List<ColumnReader<?>> columnStatuses; private FileSystem fileSystem; private long batchSize; Path hadoopPath; private VarLenBinaryReader varLengthReader; private ParquetMetadata footer; // This is a parallel list to the columns list above, it is used to determine the subset of the project // pushdown columns that do not appear in this file private boolean[] columnsFound; // For columns not found in the file, we need to return a schema element with the correct number of values // at that position in the schema. Currently this requires a vector be present. Here is a list of all of these vectors // that need only have their value count set at the end of each call to next(), as the values default to null. private List<NullableIntVector> nullFilledVectors; // Keeps track of the number of records returned in the case where only columns outside of the file were selected. // No actual data needs to be read out of the file, we only need to return batches until we have 'read' the number of // records specified in the row group metadata long mockRecordsRead; private final CodecFactory codecFactory; int rowGroupIndex; long totalRecordsRead; private final FragmentContext fragmentContext; public ParquetRecordReader(FragmentContext fragmentContext, String path, int rowGroupIndex, FileSystem fs, CodecFactory codecFactory, ParquetMetadata footer, List<SchemaPath> columns) throws ExecutionSetupException { this(fragmentContext, DEFAULT_BATCH_LENGTH_IN_BITS, path, rowGroupIndex, fs, codecFactory, footer, columns); } public ParquetRecordReader( FragmentContext fragmentContext, long batchSize, String path, int rowGroupIndex, FileSystem fs, CodecFactory codecFactory, ParquetMetadata footer, List<SchemaPath> columns) throws ExecutionSetupException { this.hadoopPath = new Path(path); this.fileSystem = fs; this.codecFactory = codecFactory; this.rowGroupIndex = rowGroupIndex; this.batchSize = batchSize; this.footer = footer; this.fragmentContext = fragmentContext; setColumns(columns); } public CodecFactory getCodecFactory() { return codecFactory; } public Path getHadoopPath() { return hadoopPath; } public FileSystem getFileSystem() { return fileSystem; } public int getRowGroupIndex() { return rowGroupIndex; } public int getBitWidthAllFixedFields() { return bitWidthAllFixedFields; } public long getBatchSize() { return batchSize; } /** * @param type a fixed length type from the parquet library enum * @return the length in pageDataByteArray of the type */ public static int getTypeLengthInBits(PrimitiveType.PrimitiveTypeName type) { switch (type) { case INT64: return 64; case INT32: return 32; case BOOLEAN: return 1; case FLOAT: return 32; case DOUBLE: return 64; case INT96: return 96; // binary and fixed length byte array default: throw new IllegalStateException("Length cannot be determined for type " + type); } } private boolean fieldSelected(MaterializedField field) { // TODO - not sure if this is how we want to represent this // for now it makes the existing tests pass, simply selecting // all available data if no columns are provided if (isStarQuery()) { return true; } int i = 0; for (SchemaPath expr : getColumns()) { if ( field.matches(expr)) { columnsFound[i] = true; return true; } i++; } return false; } public OperatorContext getOperatorContext() { return operatorContext; } @Override public void setup(OperatorContext operatorContext, OutputMutator output) throws ExecutionSetupException { this.operatorContext = operatorContext; if (!isStarQuery()) { columnsFound = new boolean[getColumns().size()]; nullFilledVectors = new ArrayList<>(); } columnStatuses = new ArrayList<>(); // totalRecords = footer.getBlocks().get(rowGroupIndex).getRowCount(); List<ColumnDescriptor> columns = footer.getFileMetaData().getSchema().getColumns(); allFieldsFixedLength = true; ColumnDescriptor column; ColumnChunkMetaData columnChunkMetaData; int columnsToScan = 0; mockRecordsRead = 0; MaterializedField field; // ParquetMetadataConverter metaConverter = new ParquetMetadataConverter(); FileMetaData fileMetaData; logger.debug("Reading row group({}) with {} records in file {}.", rowGroupIndex, footer.getBlocks().get(rowGroupIndex).getRowCount(), hadoopPath.toUri().getPath()); totalRecordsRead = 0; // TODO - figure out how to deal with this better once we add nested reading, note also look where this map is used below // store a map from column name to converted types if they are non-null HashMap<String, SchemaElement> schemaElements = new HashMap<>(); fileMetaData = new ParquetMetadataConverter().toParquetMetadata(ParquetFileWriter.CURRENT_VERSION, footer); for (SchemaElement se : fileMetaData.getSchema()) { schemaElements.put(se.getName(), se); } // loop to add up the length of the fixed width columns and build the schema for (int i = 0; i < columns.size(); ++i) { column = columns.get(i); logger.debug("name: " + fileMetaData.getSchema().get(i).name); SchemaElement se = schemaElements.get(column.getPath()[0]); MajorType mt = ParquetToDrillTypeConverter.toMajorType(column.getType(), se.getType_length(), getDataMode(column), se, fragmentContext.getOptions()); field = MaterializedField.create(toFieldName(column.getPath()),mt); if ( ! fieldSelected(field)) { continue; } columnsToScan++; // sum the lengths of all of the fixed length fields if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) { if (column.getMaxRepetitionLevel() > 0) { allFieldsFixedLength = false; } if (column.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) { bitWidthAllFixedFields += se.getType_length() * 8; } else { bitWidthAllFixedFields += getTypeLengthInBits(column.getType()); } } else { allFieldsFixedLength = false; } } // rowGroupOffset = footer.getBlocks().get(rowGroupIndex).getColumns().get(0).getFirstDataPageOffset(); if (columnsToScan != 0 && allFieldsFixedLength) { recordsPerBatch = (int) Math.min(Math.min(batchSize / bitWidthAllFixedFields, footer.getBlocks().get(0).getColumns().get(0).getValueCount()), 65535); } else { recordsPerBatch = DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH; } try { ValueVector vector; SchemaElement schemaElement; final ArrayList<VarLengthColumn> varLengthColumns = new ArrayList<>(); // initialize all of the column read status objects boolean fieldFixedLength; // the column chunk meta-data is not guaranteed to be in the same order as the columns in the schema // a map is constructed for fast access to the correct columnChunkMetadata to correspond // to an element in the schema Map<String, Integer> columnChunkMetadataPositionsInList = new HashMap(); BlockMetaData rowGroupMetadata = footer.getBlocks().get(rowGroupIndex); int colChunkIndex = 0; for (ColumnChunkMetaData colChunk : rowGroupMetadata.getColumns()) { columnChunkMetadataPositionsInList.put(Arrays.toString(colChunk.getPath().toArray()), colChunkIndex); colChunkIndex++; } for (int i = 0; i < columns.size(); ++i) { column = columns.get(i); columnChunkMetaData = rowGroupMetadata.getColumns().get(columnChunkMetadataPositionsInList.get(Arrays.toString(column.getPath()))); schemaElement = schemaElements.get(column.getPath()[0]); MajorType type = ParquetToDrillTypeConverter.toMajorType(column.getType(), schemaElement.getType_length(), getDataMode(column), schemaElement, fragmentContext.getOptions()); field = MaterializedField.create(toFieldName(column.getPath()), type); // the field was not requested to be read if ( ! fieldSelected(field)) { continue; } fieldFixedLength = column.getType() != PrimitiveType.PrimitiveTypeName.BINARY; vector = output.addField(field, (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode())); if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) { if (column.getMaxRepetitionLevel() > 0) { final RepeatedValueVector repeatedVector = RepeatedValueVector.class.cast(vector); ColumnReader dataReader = ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, repeatedVector.getDataVector(), schemaElement); varLengthColumns.add(new FixedWidthRepeatedReader(this, dataReader, getTypeLengthInBits(column.getType()), -1, column, columnChunkMetaData, false, repeatedVector, schemaElement)); } else { columnStatuses.add(ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, vector, schemaElement)); } } else { // create a reader and add it to the appropriate list varLengthColumns.add(ColumnReaderFactory.getReader(this, -1, column, columnChunkMetaData, false, vector, schemaElement)); } } varLengthReader = new VarLenBinaryReader(this, varLengthColumns); if (!isStarQuery()) { List<SchemaPath> projectedColumns = Lists.newArrayList(getColumns()); SchemaPath col; for (int i = 0; i < columnsFound.length; i++) { col = projectedColumns.get(i); assert col!=null; if ( ! columnsFound[i] && !col.equals(STAR_COLUMN)) { nullFilledVectors.add((NullableIntVector)output.addField(MaterializedField.create(col, Types.optional(TypeProtos.MinorType.INT)), (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(TypeProtos.MinorType.INT, DataMode.OPTIONAL))); } } } } catch (Exception e) { handleAndRaise("Failure in setting up reader", e); } } protected void handleAndRaise(String s, Exception e) { String message = "Error in parquet record reader.\nMessage: " + s + "\nParquet Metadata: " + footer; throw new DrillRuntimeException(message, e); } @Override public void allocate(Map<Key, ValueVector> vectorMap) throws OutOfMemoryException { try { for (final ValueVector v : vectorMap.values()) { AllocationHelper.allocate(v, recordsPerBatch, 50, 10); } } catch (NullPointerException e) { throw new OutOfMemoryException(); } } private SchemaPath toFieldName(String[] paths) { return SchemaPath.getCompoundPath(paths); } private TypeProtos.DataMode getDataMode(ColumnDescriptor column) { if (column.getMaxRepetitionLevel() > 0 ) { return DataMode.REPEATED; } else if (column.getMaxDefinitionLevel() == 0) { return TypeProtos.DataMode.REQUIRED; } else { return TypeProtos.DataMode.OPTIONAL; } } private void resetBatch() { for (final ColumnReader<?> column : columnStatuses) { column.valuesReadInCurrentPass = 0; } for (final VarLengthColumn<?> r : varLengthReader.columns) { r.valuesReadInCurrentPass = 0; } } public void readAllFixedFields(long recordsToRead) throws IOException { for (ColumnReader<?> crs : columnStatuses) { crs.processPages(recordsToRead); } } @Override public int next() { resetBatch(); long recordsToRead = 0; try { ColumnReader<?> firstColumnStatus; if (columnStatuses.size() > 0) { firstColumnStatus = columnStatuses.iterator().next(); } else{ if (varLengthReader.columns.size() > 0) { firstColumnStatus = varLengthReader.columns.iterator().next(); } else{ firstColumnStatus = null; } } // No columns found in the file were selected, simply return a full batch of null records for each column requested if (firstColumnStatus == null) { if (mockRecordsRead == footer.getBlocks().get(rowGroupIndex).getRowCount()) { return 0; } recordsToRead = Math.min(DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH, footer.getBlocks().get(rowGroupIndex).getRowCount() - mockRecordsRead); for (final ValueVector vv : nullFilledVectors ) { vv.getMutator().setValueCount( (int) recordsToRead); } mockRecordsRead += recordsToRead; totalRecordsRead += recordsToRead; return (int) recordsToRead; } if (allFieldsFixedLength) { recordsToRead = Math.min(recordsPerBatch, firstColumnStatus.columnChunkMetaData.getValueCount() - firstColumnStatus.totalValuesRead); } else { recordsToRead = DEFAULT_RECORDS_TO_READ_IF_NOT_FIXED_WIDTH; } if (allFieldsFixedLength) { readAllFixedFields(recordsToRead); } else { // variable length columns long fixedRecordsToRead = varLengthReader.readFields(recordsToRead, firstColumnStatus); readAllFixedFields(fixedRecordsToRead); } // if we have requested columns that were not found in the file fill their vectors with null // (by simply setting the value counts inside of them, as they start null filled) if (nullFilledVectors != null) { for (final ValueVector vv : nullFilledVectors ) { vv.getMutator().setValueCount(firstColumnStatus.getRecordsReadInCurrentPass()); } } // logger.debug("So far read {} records out of row group({}) in file '{}'", totalRecordsRead, rowGroupIndex, hadoopPath.toUri().getPath()); totalRecordsRead += firstColumnStatus.getRecordsReadInCurrentPass(); return firstColumnStatus.getRecordsReadInCurrentPass(); } catch (Exception e) { handleAndRaise("\nHadoop path: " + hadoopPath.toUri().getPath() + "\nTotal records read: " + totalRecordsRead + "\nMock records read: " + mockRecordsRead + "\nRecords to read: " + recordsToRead + "\nRow group index: " + rowGroupIndex + "\nRecords in row group: " + footer.getBlocks().get(rowGroupIndex).getRowCount(), e); } // this is never reached return 0; } @Override public void close() { logger.debug("Read {} records out of row group({}) in file '{}'", totalRecordsRead, rowGroupIndex, hadoopPath.toUri().getPath()); // enable this for debugging when it is know that a whole file will be read // limit kills upstream operators once it has enough records, so this assert will fail // assert totalRecordsRead == footer.getBlocks().get(rowGroupIndex).getRowCount(); if (columnStatuses != null) { for (final ColumnReader column : columnStatuses) { column.clear(); } columnStatuses.clear(); columnStatuses = null; } codecFactory.release(); if (varLengthReader != null) { for (final VarLengthColumn r : varLengthReader.columns) { r.clear(); } varLengthReader.columns.clear(); varLengthReader = null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.webapp; import static org.apache.hadoop.yarn.server.resourcemanager.MockNodes.newResource; import static org.apache.hadoop.yarn.webapp.Params.TITLE; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.List; import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockNodes; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.applicationsmanager.MockAsm; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.webapp.WebApps; import org.apache.hadoop.yarn.webapp.YarnWebParams; import org.apache.hadoop.yarn.webapp.test.WebAppTests; import org.junit.Test; import com.google.common.collect.Maps; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Module; public class TestRMWebApp { static final int GiB = 1024; // MiB @Test public void testControllerIndex() { Injector injector = WebAppTests.createMockInjector(TestRMWebApp.class, this, new Module() { @Override public void configure(Binder binder) { binder.bind(ApplicationACLsManager.class).toInstance( new ApplicationACLsManager(new Configuration())); } }); RmController c = injector.getInstance(RmController.class); c.index(); assertEquals("Applications", c.get(TITLE, "unknown")); } @Test public void testView() { Injector injector = WebAppTests.createMockInjector(RMContext.class, mockRMContext(15, 1, 2, 8*GiB), new Module() { @Override public void configure(Binder binder) { try { binder.bind(ResourceManager.class).toInstance(mockRm(3, 1, 2, 8*GiB)); } catch (IOException e) { throw new IllegalStateException(e); } } }); RmView rmViewInstance = injector.getInstance(RmView.class); rmViewInstance.set(YarnWebParams.APP_STATE, YarnApplicationState.RUNNING.toString()); rmViewInstance.render(); WebAppTests.flushOutput(injector); rmViewInstance.set(YarnWebParams.APP_STATE, StringHelper.cjoin( YarnApplicationState.ACCEPTED.toString(), YarnApplicationState.RUNNING.toString())); rmViewInstance.render(); WebAppTests.flushOutput(injector); } @Test public void testNodesPage() { // 10 nodes. Two of each type. final RMContext rmContext = mockRMContext(3, 2, 12, 8*GiB); Injector injector = WebAppTests.createMockInjector(RMContext.class, rmContext, new Module() { @Override public void configure(Binder binder) { try { binder.bind(ResourceManager.class).toInstance(mockRm(rmContext)); } catch (IOException e) { throw new IllegalStateException(e); } } }); // All nodes NodesPage instance = injector.getInstance(NodesPage.class); instance.render(); WebAppTests.flushOutput(injector); // Unhealthy nodes instance.moreParams().put(YarnWebParams.NODE_STATE, NodeState.UNHEALTHY.toString()); instance.render(); WebAppTests.flushOutput(injector); // Lost nodes instance.moreParams().put(YarnWebParams.NODE_STATE, NodeState.LOST.toString()); instance.render(); WebAppTests.flushOutput(injector); } public static RMContext mockRMContext(int numApps, int racks, int numNodes, int mbsPerNode) { final List<RMApp> apps = MockAsm.newApplications(numApps); final ConcurrentMap<ApplicationId, RMApp> applicationsMaps = Maps .newConcurrentMap(); for (RMApp app : apps) { applicationsMaps.put(app.getApplicationId(), app); } final List<RMNode> nodes = MockNodes.newNodes(racks, numNodes, newResource(mbsPerNode)); final ConcurrentMap<NodeId, RMNode> nodesMap = Maps.newConcurrentMap(); for (RMNode node : nodes) { nodesMap.put(node.getNodeID(), node); } final List<RMNode> deactivatedNodes = MockNodes.deactivatedNodes(racks, numNodes, newResource(mbsPerNode)); final ConcurrentMap<String, RMNode> deactivatedNodesMap = Maps.newConcurrentMap(); for (RMNode node : deactivatedNodes) { deactivatedNodesMap.put(node.getHostName(), node); } RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null, null, null, null, null, null) { @Override public ConcurrentMap<ApplicationId, RMApp> getRMApps() { return applicationsMaps; } @Override public ConcurrentMap<String, RMNode> getInactiveRMNodes() { return deactivatedNodesMap; } @Override public ConcurrentMap<NodeId, RMNode> getRMNodes() { return nodesMap; } }; rmContext.setNodeLabelManager(new MemoryRMNodeLabelsManager()); return rmContext; } public static ResourceManager mockRm(int apps, int racks, int nodes, int mbsPerNode) throws IOException { RMContext rmContext = mockRMContext(apps, racks, nodes, mbsPerNode); return mockRm(rmContext); } public static ResourceManager mockRm(RMContext rmContext) throws IOException { ResourceManager rm = mock(ResourceManager.class); ResourceScheduler rs = mockCapacityScheduler(); ApplicationACLsManager aclMgr = mockAppACLsManager(); when(rm.getResourceScheduler()).thenReturn(rs); when(rm.getRMContext()).thenReturn(rmContext); when(rm.getApplicationACLsManager()).thenReturn(aclMgr); return rm; } public static CapacityScheduler mockCapacityScheduler() throws IOException { // stolen from TestCapacityScheduler CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration(); setupQueueConfiguration(conf); CapacityScheduler cs = new CapacityScheduler(); cs.setConf(new YarnConfiguration()); RMContext rmContext = new RMContextImpl(null, null, null, null, null, null, new RMContainerTokenSecretManager(conf), new NMTokenSecretManagerInRM(conf), new ClientToAMTokenSecretManagerInRM(), null); rmContext.setNodeLabelManager(new MemoryRMNodeLabelsManager()); cs.setRMContext(rmContext); cs.init(conf); return cs; } public static ApplicationACLsManager mockAppACLsManager() { Configuration conf = new Configuration(); return new ApplicationACLsManager(conf); } static void setupQueueConfiguration(CapacitySchedulerConfiguration conf) { // Define top-level queues conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {"a", "b", "c"}); final String A = CapacitySchedulerConfiguration.ROOT + ".a"; conf.setCapacity(A, 10); final String B = CapacitySchedulerConfiguration.ROOT + ".b"; conf.setCapacity(B, 20); final String C = CapacitySchedulerConfiguration.ROOT + ".c"; conf.setCapacity(C, 70); // Define 2nd-level queues final String A1 = A + ".a1"; final String A2 = A + ".a2"; conf.setQueues(A, new String[] {"a1", "a2"}); conf.setCapacity(A1, 30); conf.setCapacity(A2, 70); final String B1 = B + ".b1"; final String B2 = B + ".b2"; final String B3 = B + ".b3"; conf.setQueues(B, new String[] {"b1", "b2", "b3"}); conf.setCapacity(B1, 50); conf.setCapacity(B2, 30); conf.setCapacity(B3, 20); final String C1 = C + ".c1"; final String C2 = C + ".c2"; final String C3 = C + ".c3"; final String C4 = C + ".c4"; conf.setQueues(C, new String[] {"c1", "c2", "c3", "c4"}); conf.setCapacity(C1, 50); conf.setCapacity(C2, 10); conf.setCapacity(C3, 35); conf.setCapacity(C4, 5); // Define 3rd-level queues final String C11 = C1 + ".c11"; final String C12 = C1 + ".c12"; final String C13 = C1 + ".c13"; conf.setQueues(C1, new String[] {"c11", "c12", "c13"}); conf.setCapacity(C11, 15); conf.setCapacity(C12, 45); conf.setCapacity(C13, 40); } public static ResourceManager mockFifoRm(int apps, int racks, int nodes, int mbsPerNode) throws Exception { ResourceManager rm = mock(ResourceManager.class); RMContext rmContext = mockRMContext(apps, racks, nodes, mbsPerNode); ResourceScheduler rs = mockFifoScheduler(rmContext); when(rm.getResourceScheduler()).thenReturn(rs); when(rm.getRMContext()).thenReturn(rmContext); return rm; } public static FifoScheduler mockFifoScheduler(RMContext rmContext) throws Exception { CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration(); setupFifoQueueConfiguration(conf); FifoScheduler fs = new FifoScheduler(); fs.setConf(new YarnConfiguration()); fs.setRMContext(rmContext); fs.init(conf); return fs; } static void setupFifoQueueConfiguration(CapacitySchedulerConfiguration conf) { // Define default queue conf.setQueues("default", new String[] {"default"}); conf.setCapacity("default", 100); } public static void main(String[] args) throws Exception { // For manual testing WebApps.$for("yarn", new TestRMWebApp()).at(8888).inDevMode(). start(new RMWebApp(mockRm(2500, 8, 8, 8*GiB))).joinThread(); WebApps.$for("yarn", new TestRMWebApp()).at(8888).inDevMode(). start(new RMWebApp(mockFifoRm(10, 1, 4, 8*GiB))).joinThread(); } }
/* * Copyright 2013 McEvoy Software Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bandstand.web; import com.bandstand.domain.Musician; import com.bandstand.domain.SessionManager; import io.milton.annotations.AddressBooks; import io.milton.annotations.ChildrenOf; import io.milton.annotations.ContactData; import io.milton.annotations.Get; import io.milton.annotations.ModifiedDate; import io.milton.annotations.PutChild; import io.milton.annotations.ResourceController; import io.milton.common.ModelAndView; import io.milton.http.exceptions.BadRequestException; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.UUID; import net.sourceforge.cardme.engine.VCardEngine; import net.sourceforge.cardme.io.VCardWriter; import net.sourceforge.cardme.vcard.VCard; import net.sourceforge.cardme.vcard.VCardImpl; import net.sourceforge.cardme.vcard.features.EmailFeature; import net.sourceforge.cardme.vcard.features.TelephoneFeature; import net.sourceforge.cardme.vcard.types.BeginType; import net.sourceforge.cardme.vcard.types.EmailType; import net.sourceforge.cardme.vcard.types.EndType; import net.sourceforge.cardme.vcard.types.FormattedNameType; import net.sourceforge.cardme.vcard.types.NameType; import net.sourceforge.cardme.vcard.types.TelephoneType; import net.sourceforge.cardme.vcard.types.UIDType; import org.apache.commons.lang.StringUtils; import org.hibernate.Transaction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * For accessing other musicians as contacts via carddav * * @author brad */ @ResourceController public class ContactsController { private static final Logger log = LoggerFactory.getLogger(ContactsController.class); @ChildrenOf public MusicianAddressBooksHome getAddressBookHome(Musician m) { return new MusicianAddressBooksHome(m); } @ChildrenOf @AddressBooks public MusicianAddressBook getAddressBook(MusicianAddressBooksHome m) { return new MusicianAddressBook(m.musician); } @ChildrenOf public List<MusicianContact> getMusicianContacts(MusicianAddressBook addressBook) { List<MusicianContact> list = new ArrayList<MusicianContact>(); for (Musician m : Musician.findAll(SessionManager.session())) { list.add(new MusicianContact(m)); } return list; } @ContactData @Get public byte[] getContactData(MusicianContact c) { Musician m = c.contact; try { VCardImpl vcard = new VCardImpl(); vcard.setBegin(new BeginType()); vcard.setID(m.getContactUid()); String uid = m.getContactUid(); if (uid == null || uid.length() == 0) { uid = m.getName(); } vcard.setUID(new UIDType(uid)); vcard.setFormattedName(new FormattedNameType(m.getGivenName() + " " + m.getSurName())); vcard.setName(new NameType(m.getSurName(), m.getGivenName())); if (!StringUtils.isBlank(m.getTelephonenumber())) { vcard.addTelephoneNumber(new TelephoneType(m.getTelephonenumber())); } if (!StringUtils.isBlank(m.getMail())) { vcard.addEmail(new EmailType(m.getMail())); } vcard.setEnd(new EndType()); VCardWriter writer = new VCardWriter(); writer.setVCard(vcard); return writer.buildVCardString().getBytes("UTF-8"); } catch (IOException ex) { throw new RuntimeException(ex); } } @PutChild public MusicianContact createMusicianContact(MusicianAddressBook abook, String newName, byte[] vcardData) throws BadRequestException { Transaction tx = SessionManager.session().beginTransaction(); try { VCardEngine cardEngine = new VCardEngine(); String vc = new String(vcardData); System.out.println("---- contact ----"); System.out.println(vc); System.out.println("-------"); VCard vcard = cardEngine.parse(vc); Musician m = new Musician(); if (vcard.getUID() != null) { m.setContactUid(vcard.getUID().getUID()); } else { m.setContactUid(UUID.randomUUID().toString()); } m.setName(newName); m.setCreatedDate(new Date()); m.setGivenName(vcard.getName().getGivenName()); m.setSurName(vcard.getName().getFamilyName()); m.setModifiedDate(new Date()); { Iterator<TelephoneFeature> it = vcard.getTelephoneNumbers(); while (it.hasNext()) { m.setTelephonenumber(it.next().getTelephone()); } } { Iterator<EmailFeature> itEmails = vcard.getEmails(); while (itEmails.hasNext()) { m.setMail(itEmails.next().getEmail()); } } SessionManager.session().save(m); SessionManager.session().flush(); tx.commit(); return new MusicianContact(m); } catch (Exception e) { tx.rollback(); log.error("exception uploading musician contact", e); throw new BadRequestException(e.getMessage()); } } @PutChild public MusicianContact updateMusicianContact(MusicianContact contact, byte[] vcardData) throws BadRequestException { log.info("updateMusicianContact"); Transaction tx = SessionManager.session().beginTransaction(); try { Musician m = contact.getMusician(); VCardEngine cardEngine = new VCardEngine(); String vc = new String(vcardData); System.out.println("---- contact ----"); System.out.println(vc); System.out.println("-------"); VCard vcard = cardEngine.parse(vc); if (vcard.getUID() != null) { m.setContactUid(vcard.getUID().getUID()); } if (vcard.getName() != null) { m.setGivenName(vcard.getName().getGivenName()); m.setSurName(vcard.getName().getFamilyName()); } else { log.warn("No name feature in supplied vcard: " + vc); } m.setModifiedDate(new Date()); { Iterator<TelephoneFeature> it = vcard.getTelephoneNumbers(); while (it.hasNext()) { m.setTelephonenumber(it.next().getTelephone()); } } { Iterator<EmailFeature> itEmails = vcard.getEmails(); while (itEmails.hasNext()) { m.setMail(itEmails.next().getEmail()); } } m.setModifiedDate(new Date()); SessionManager.session().save(m); SessionManager.session().flush(); tx.commit(); return contact; } catch (Exception e) { tx.rollback(); log.error("exception uploading musician contact", e); throw new BadRequestException(e.getMessage()); } } @ModifiedDate public Date getContactModDate(MusicianContact c) { return c.getMusician().getModifiedDate(); } @Get public ModelAndView renderAddressBookPage(MusicianAddressBook addressBook) { return new ModelAndView("addressBook", addressBook, "abook"); } public class MusicianContact { private final Musician contact; public MusicianContact(Musician contact) { this.contact = contact; } public String getId() { return contact.getId() + ""; } public String getName() { return contact.getName(); } public Musician getMusician() { return contact; } } public class MusicianAddressBooksHome { private final Musician musician; public MusicianAddressBooksHome(Musician musician) { this.musician = musician; } public String getName() { return "abooks"; } } public class MusicianAddressBook { private final Musician musician; public MusicianAddressBook(Musician musician) { this.musician = musician; } public String getName() { return "default"; } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.skylarkinterface.SkylarkInterfaceUtils; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import com.google.devtools.build.lib.skylarkinterface.SkylarkValue; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.Collection; import java.util.List; import java.util.Map; /** * Utilities used by the evaluator. */ public final class EvalUtils { private EvalUtils() {} /** * The exception that SKYLARK_COMPARATOR might throw. This is an unchecked exception * because Comparator doesn't let us declare exceptions. It should normally be caught * and wrapped in an EvalException. */ public static class ComparisonException extends RuntimeException { public ComparisonException(String msg) { super(msg); } } /** * Compare two Skylark objects. * * <p>It may throw an unchecked exception ComparisonException that should be wrapped in an * EvalException. */ public static final Ordering<Object> SKYLARK_COMPARATOR = new Ordering<Object>() { private int compareLists(SkylarkList o1, SkylarkList o2) { for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) { int cmp = compare(o1.get(i), o2.get(i)); if (cmp != 0) { return cmp; } } return Integer.compare(o1.size(), o2.size()); } @Override @SuppressWarnings("unchecked") public int compare(Object o1, Object o2) { o1 = SkylarkType.convertToSkylark(o1, /*env=*/ null); o2 = SkylarkType.convertToSkylark(o2, /*env=*/ null); if (o1 instanceof SkylarkList && o2 instanceof SkylarkList && ((SkylarkList) o1).isTuple() == ((SkylarkList) o2).isTuple()) { return compareLists((SkylarkList) o1, (SkylarkList) o2); } if (!o1.getClass().equals(o2.getClass())) { throw new ComparisonException( "Cannot compare " + getDataTypeName(o1) + " with " + getDataTypeName(o2)); } if (o1 instanceof ClassObject) { throw new ComparisonException("Cannot compare structs"); } if (o1 instanceof SkylarkNestedSet) { throw new ComparisonException("Cannot compare depsets"); } try { return ((Comparable<Object>) o1).compareTo(o2); } catch (ClassCastException e) { throw new ComparisonException( "Cannot compare " + getDataTypeName(o1) + " with " + getDataTypeName(o2)); } } }; /** * Checks that an Object is a valid key for a Skylark dict. * @param o an Object to validate * @throws EvalException if o is not a valid key */ public static void checkValidDictKey(Object o) throws EvalException { // TODO(bazel-team): check that all recursive elements are both Immutable AND Comparable. if (isImmutable(o)) { return; } // Same error message as Python (that makes it a TypeError). throw new EvalException(null, Printer.format("unhashable type: '%r'", o.getClass())); } /** * Is this object known or assumed to be recursively immutable by Skylark? * @param o an Object * @return true if the object is known to be an immutable value. */ // NB: This is used as the basis for accepting objects in SkylarkNestedSet-s, // as well as for accepting objects as keys for Skylark dict-s. public static boolean isImmutable(Object o) { if (o instanceof SkylarkValue) { return ((SkylarkValue) o).isImmutable(); } return isImmutable(o.getClass()); } /** * Is this class known to be *recursively* immutable by Skylark? * For instance, class Tuple is not it, because it can contain mutable values. * @param c a Class * @return true if the class is known to represent only recursively immutable values. */ // NB: This is used as the basis for accepting objects in SkylarkNestedSet-s, // as well as for accepting objects as keys for Skylark dict-s. static boolean isImmutable(Class<?> c) { return c.isAnnotationPresent(Immutable.class) // TODO(bazel-team): beware of containers! || c.equals(String.class) || c.equals(Integer.class) || c.equals(Boolean.class); } /** * Returns true if the type is acceptable to be returned to the Skylark language. */ public static boolean isSkylarkAcceptable(Class<?> c) { return SkylarkValue.class.isAssignableFrom(c) // implements SkylarkValue || c.equals(String.class) // basic values || c.equals(Integer.class) || c.equals(Boolean.class) // there is a registered Skylark ancestor class (useful e.g. when using AutoValue) || SkylarkInterfaceUtils.getSkylarkModule(c) != null || ImmutableMap.class.isAssignableFrom(c) // will be converted to SkylarkDict || NestedSet.class.isAssignableFrom(c) // will be converted to SkylarkNestedSet || c.equals(PathFragment.class); // other known class } // TODO(bazel-team): move the following few type-related functions to SkylarkType /** * Return the Skylark-type of {@code c} * * <p>The result will be a type that Skylark understands and is either equal to {@code c} * or is a supertype of it. For example, all instances of (all subclasses of) SkylarkList * are considered to be SkylarkLists. * * <p>Skylark's type validation isn't equipped to deal with inheritance so we must tell it which * of the superclasses or interfaces of {@code c} is the one that matters for type compatibility. * * @param c a class * @return a super-class of c to be used in validation-time type inference. */ public static Class<?> getSkylarkType(Class<?> c) { // TODO(bazel-team): replace these with SkylarkValue-s if (String.class.equals(c) || Boolean.class.equals(c) || Integer.class.equals(c) || Iterable.class.equals(c) || Class.class.equals(c)) { return c; } // TODO(bazel-team): also unify all implementations of ClassObject, // that we used to all print the same as "struct"? Class<?> parent = SkylarkInterfaceUtils.getParentWithSkylarkModule(c); if (parent != null) { return parent; } Preconditions.checkArgument(SkylarkValue.class.isAssignableFrom(c), "%s is not allowed as a Skylark value", c); return c; } /** * Returns a pretty name for the datatype of object 'o' in the Build language. */ public static String getDataTypeName(Object o) { return getDataTypeName(o, false); } /** * Returns a pretty name for the datatype of object {@code object} in Skylark * or the BUILD language, with full details if the {@code full} boolean is true. */ public static String getDataTypeName(Object object, boolean fullDetails) { Preconditions.checkNotNull(object); if (fullDetails) { if (object instanceof SkylarkNestedSet) { SkylarkNestedSet set = (SkylarkNestedSet) object; return "depset of " + set.getContentType() + "s"; } if (object instanceof SelectorList) { SelectorList list = (SelectorList) object; return "select of " + getDataTypeNameFromClass(list.getType()); } } return getDataTypeNameFromClass(object.getClass()); } /** * Returns a pretty name for the datatype equivalent of class 'c' in the Build language. */ public static String getDataTypeNameFromClass(Class<?> c) { return getDataTypeNameFromClass(c, true); } /** * Returns a pretty name for the datatype equivalent of class 'c' in the Build language. * @param highlightNameSpaces Determines whether the result should also contain a special comment * when the given class identifies a Skylark name space. */ public static String getDataTypeNameFromClass(Class<?> c, boolean highlightNameSpaces) { SkylarkModule module = SkylarkInterfaceUtils.getSkylarkModule(c); if (module != null) { return module.name() + ((module.namespace() && highlightNameSpaces) ? " (a language module)" : ""); } else if (c.equals(Object.class)) { return "unknown"; } else if (c.equals(String.class)) { return "string"; } else if (c.equals(Integer.class)) { return "int"; } else if (c.equals(Boolean.class)) { return "bool"; } else if (List.class.isAssignableFrom(c)) { // This is a Java List that isn't a SkylarkList return "List"; // This case shouldn't happen in normal code, but we keep it for debugging. } else if (Map.class.isAssignableFrom(c)) { // This is a Java Map that isn't a SkylarkDict return "Map"; // This case shouldn't happen in normal code, but we keep it for debugging. } else if (BaseFunction.class.isAssignableFrom(c)) { return "function"; } else if (c.equals(SelectorValue.class)) { return "select"; } else if (NestedSet.class.isAssignableFrom(c)) { // TODO(bazel-team): no one should be seeing naked NestedSet at all. return "set"; } else { if (c.getSimpleName().isEmpty()) { return c.getName(); } else { return c.getSimpleName(); } } } public static Object checkNotNull(Expression expr, Object obj) throws EvalException { if (obj == null) { throw new EvalException( expr.getLocation(), "unexpected null value, please send a bug report. " + "This was generated by expression '" + expr + "'"); } return obj; } /** * @return the truth value of an object, according to Python rules. * http://docs.python.org/2/library/stdtypes.html#truth-value-testing */ public static boolean toBoolean(Object o) { if (o == null || o == Runtime.NONE) { return false; } else if (o instanceof Boolean) { return (Boolean) o; } else if (o instanceof String) { return !((String) o).isEmpty(); } else if (o instanceof Integer) { return (Integer) o != 0; } else if (o instanceof Collection<?>) { return !((Collection<?>) o).isEmpty(); } else if (o instanceof Map<?, ?>) { return !((Map<?, ?>) o).isEmpty(); } else if (o instanceof NestedSet<?>) { return !((NestedSet<?>) o).isEmpty(); } else if (o instanceof SkylarkNestedSet) { return !((SkylarkNestedSet) o).isEmpty(); } else if (o instanceof Iterable<?>) { return !(Iterables.isEmpty((Iterable<?>) o)); } else { return true; } } public static Collection<?> toCollection(Object o, Location loc) throws EvalException { if (o instanceof Collection) { return (Collection<?>) o; } else if (o instanceof SkylarkList) { return ((SkylarkList) o).getImmutableList(); } else if (o instanceof Map) { // For dictionaries we iterate through the keys only if (o instanceof SkylarkDict) { // SkylarkDicts handle ordering themselves SkylarkDict<?, ?> dict = (SkylarkDict) o; List<Object> list = Lists.newArrayListWithCapacity(dict.size()); for (Map.Entry<?, ?> entries : dict.entrySet()) { list.add(entries.getKey()); } return ImmutableList.copyOf(list); } // For determinism, we sort the keys. try { return SKYLARK_COMPARATOR.sortedCopy(((Map<?, ?>) o).keySet()); } catch (ComparisonException e) { throw new EvalException(loc, e); } } else if (o instanceof SkylarkNestedSet) { return ((SkylarkNestedSet) o).toCollection(); } else { throw new EvalException(loc, "type '" + getDataTypeName(o) + "' is not a collection"); } } public static Iterable<?> toIterable(Object o, Location loc) throws EvalException { if (o instanceof String) { // This is not as efficient as special casing String in for and dict and list comprehension // statements. However this is a more unified way. return split((String) o); } else if (o instanceof SkylarkNestedSet) { // TODO(bazel-team): Add a deprecation warning: don't implicitly flatten depsets. return ((SkylarkNestedSet) o).toCollection(); } else if (o instanceof Iterable) { return (Iterable<?>) o; } else if (o instanceof Map) { return toCollection(o, loc); } else { throw new EvalException(loc, "type '" + getDataTypeName(o) + "' is not iterable"); } } /** * Given an {@link Iterable}, returns it as-is. Given a {@link SkylarkNestedSet}, returns its * contents as an iterable. Throws {@link EvalException} for any other value. * * <p>This is a kludge for the change that made {@code SkylarkNestedSet} not implement {@code * Iterable}. It is different from {@link #toIterable} in its behavior for strings and other types * that are not strictly Java-iterable. * * @throws EvalException if {@code o} is not an iterable or set * @deprecated avoid writing APIs that implicitly treat depsets as iterables. It encourages * unnecessary flattening of depsets. * * <p>TODO(bazel-team): Remove this if/when implicit iteration over {@code SkylarkNestedSet} is no * longer supported. */ @Deprecated public static Iterable<?> toIterableStrict(Object o, Location loc) throws EvalException { if (o instanceof Iterable) { return (Iterable<?>) o; } else if (o instanceof SkylarkNestedSet) { // TODO(bazel-team): Add a deprecation warning: don't implicitly flatten depsets. return ((SkylarkNestedSet) o).toCollection(); } else { throw new EvalException(loc, "expected Iterable or depset, but got '" + getDataTypeName(o) + "' (strings and maps " + "are not allowed here)"); } } public static void lock(Object object, Location loc) { if (object instanceof SkylarkMutable) { ((SkylarkMutable) object).lock(loc); } } public static void unlock(Object object, Location loc) { if (object instanceof SkylarkMutable) { ((SkylarkMutable) object).unlock(loc); } } private static ImmutableList<String> split(String value) { ImmutableList.Builder<String> builder = new ImmutableList.Builder<>(); for (char c : value.toCharArray()) { builder.add(String.valueOf(c)); } return builder.build(); } /** * @return the size of the Skylark object or -1 in case the object doesn't have a size. */ public static int size(Object arg) { if (arg instanceof String) { return ((String) arg).length(); } else if (arg instanceof Map) { return ((Map<?, ?>) arg).size(); } else if (arg instanceof SkylarkList) { return ((SkylarkList<?>) arg).size(); } else if (arg instanceof SkylarkNestedSet) { // TODO(bazel-team): Add a deprecation warning: don't implicitly flatten depsets. return ((SkylarkNestedSet) arg).toCollection().size(); } else if (arg instanceof Iterable) { // Iterables.size() checks if arg is a Collection so it's efficient in that sense. return Iterables.size((Iterable<?>) arg); } return -1; } // The following functions for indexing and slicing match the behavior of Python. /** * Resolves a positive or negative index to an index in the range [0, length), or throws * EvalException if it is out-of-range. If the index is negative, it counts backward from * length. */ public static int getSequenceIndex(int index, int length, Location loc) throws EvalException { int actualIndex = index; if (actualIndex < 0) { actualIndex += length; } if (actualIndex < 0 || actualIndex >= length) { throw new EvalException( loc, "index out of range (index is " + index + ", but sequence has " + length + " elements)"); } return actualIndex; } /** * Performs index resolution after verifying that the given object has index type. */ public static int getSequenceIndex(Object index, int length, Location loc) throws EvalException { if (!(index instanceof Integer)) { throw new EvalException( loc, "indices must be integers, not " + EvalUtils.getDataTypeName(index)); } return getSequenceIndex(((Integer) index).intValue(), length, loc); } /** * Resolves a positive or negative index to an integer that can denote the left or right boundary * of a slice. If reverse is false, the slice has positive stride (i.e., its elements are in their * normal order) and the result is guaranteed to be in range [0, length + 1). If reverse is true, * the slice has negative stride and the result is in range [-1, length). In either case, if the * index is negative, it counts backward from length. Note that an input index of -1 represents * the last element's position, while an output integer of -1 represents the imaginary position * to the left of the first element. */ public static int clampRangeEndpoint(int index, int length, boolean reverse) { if (index < 0) { index += length; } if (!reverse) { return Math.max(Math.min(index, length), 0); } else { return Math.max(Math.min(index, length - 1), -1); } } /** * Resolves a positive or negative index to an integer that can denote the boundary for a * slice with positive stride. */ public static int clampRangeEndpoint(int index, int length) { return clampRangeEndpoint(index, length, false); } /** * Calculates the indices of the elements that should be included in the slice [start:end:step] * of a sequence with the given length. Each of start, end, and step must be supplied, and step * may not be 0. */ public static List<Integer> getSliceIndices(int start, int end, int step, int length) { if (step == 0) { throw new IllegalArgumentException("Slice step cannot be zero"); } start = clampRangeEndpoint(start, length, step < 0); end = clampRangeEndpoint(end, length, step < 0); ImmutableList.Builder<Integer> indices = ImmutableList.builder(); for (int current = start; step > 0 ? current < end : current > end; current += step) { indices.add(current); } return indices.build(); } /** * Calculates the indices of the elements in a slice, after validating the arguments and replacing * Runtime.NONE with default values. Throws an EvalException if a bad argument is given. */ public static List<Integer> getSliceIndices( Object startObj, Object endObj, Object stepObj, int length, Location loc) throws EvalException { int start; int end; int step; if (stepObj == Runtime.NONE) { // This case is excluded by the parser, but let's handle it for completeness. step = 1; } else if (stepObj instanceof Integer) { step = ((Integer) stepObj).intValue(); } else { throw new EvalException( loc, String.format("slice step must be an integer, not '%s'", stepObj)); } if (step == 0) { throw new EvalException(loc, "slice step cannot be zero"); } if (startObj == Runtime.NONE) { start = (step > 0) ? 0 : length - 1; } else if (startObj instanceof Integer) { start = ((Integer) startObj).intValue(); } else { throw new EvalException( loc, String.format("slice start must be an integer, not '%s'", startObj)); } if (endObj == Runtime.NONE) { // If step is negative, can't use -1 for end since that would be converted // to the rightmost element's position. end = (step > 0) ? length : -length - 1; } else if (endObj instanceof Integer) { end = ((Integer) endObj).intValue(); } else { throw new EvalException(loc, String.format("slice end must be an integer, not '%s'", endObj)); } return getSliceIndices(start, end, step, length); } /** @return true if x is Java null or Skylark None */ public static boolean isNullOrNone(Object x) { return x == null || x == Runtime.NONE; } /** * Build a SkylarkDict of kwarg arguments from a list, removing null-s or None-s. * * @param env the Environment in which this map can be mutated. * @param init a series of key, value pairs (as consecutive arguments) * as in {@code optionMap(k1, v1, k2, v2, k3, v3)} * where each key is a String, each value is an arbitrary Objet. * @return a {@code Map<String, Object>} that has all the specified entries, * where key, value pairs appearing earlier have precedence, * i.e. {@code k1, v1} may override {@code k3, v3}. * * Ignore any entry where the value is null or None. * Keys cannot be null. */ @SuppressWarnings("unchecked") public static <K, V> SkylarkDict<K, V> optionMap(Environment env, Object... init) { ImmutableMap.Builder<K, V> b = new ImmutableMap.Builder<>(); Preconditions.checkState(init.length % 2 == 0); for (int i = init.length - 2; i >= 0; i -= 2) { K key = (K) Preconditions.checkNotNull(init[i]); V value = (V) init[i + 1]; if (!isNullOrNone(value)) { b.put(key, value); } } return SkylarkDict.<K, V>copyOf(env, b.build()); } }
package com.twitter.elephantbird.mapreduce.io; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Hadoop Writable wrapper around a serialized messages like Protocol buffers. */ public abstract class BinaryWritable<M> implements WritableComparable<BinaryWritable<M>> { private static final Logger LOG = LoggerFactory.getLogger(BinaryWritable.class); // NOTE: only one of message and messageBytes is non-null at any time so that // message and messageBytes don't go out of sync (user could modify message). private M message; private byte[] messageBytes; private BinaryConverter<M> converter; protected BinaryWritable(M message, BinaryConverter<M> converter) { this.message = message; this.converter = converter; } /** throws an exception if the converter is not set */ private void checkConverter() { if (converter == null) { throw new IllegalStateException("Runtime parameterized Protobuf/Thrift class is unkonwn. " + "This object was probably created with default constructor. " + "Please use setConverter(Class)."); } } protected abstract BinaryConverter<M> getConverterFor(Class<M> clazz); /** * Sets the handler for serialization and deserialization based on the class. * This converter is often set in constructor. But some times it might be * impossible to know the the actual class during construction. <br> <br> * * E.g. when this writable is used as output value for a Mapper, * MR creates writable on the Reducer using the default constructor, * and there is no way for us to know the parameterized class. * In this case, user invokes setConverter() before * calling get() to supply parameterized class. <br> * * The class name could be written as part of writable serialization, but we * don't yet see a need to do that as it has many other disadvantages. */ public void setConverter(Class<M> clazz) { converter = getConverterFor(clazz); } /** * sets converter. useful for reusing existing converter. */ public void setConverter(BinaryConverter<M> converter) { this.converter = converter; } /** * Returns the current object. Subsequent calls to get() may not return the * same object, but in stead might return a new object deserialized from same * set of bytes. As a result, multiple calls to get() should be avoided, and * modifications to an object returned by get() may not * reflect even if this writable is serialized later. <br> * Please use set() to be certain of what object is serialized.<br><br> * * The deserialization of the actual Protobuf/Thrift object is often delayed * till the first call to this method. <br> * In some cases the the parameterized proto class may not be known yet * ( in case of default construction. see {@link #setConverter(Class)} ), * and this will throw an {@link IllegalStateException}. */ public M get() { // may be we should rename this method. the contract would be less // confusing with a different name. if (message == null && messageBytes != null) { checkConverter(); return converter.fromBytes(messageBytes); } return message; } public void clear() { message = null; messageBytes = null; } public void set(M message) { this.message = message; this.messageBytes = null; // should we serialize the object to messageBytes instead? // that is the only way we can guarantee any subsequent modifications to // message by the user don't affect serialization. Unlike Protobuf objects // Thrift objects are mutable. For now we will delay deserialization until // it is required. } @Override public void write(DataOutput out) throws IOException { byte[] bytes = serialize(); if (bytes != null) { out.writeInt(bytes.length); out.write(bytes, 0, bytes.length); } else { out.writeInt(0); } } /** * Converts the message to raw bytes, and caches the converted value. * @return converted value, which may be null in case of null message or error. */ private byte[] serialize() { if (messageBytes == null && message != null) { checkConverter(); messageBytes = converter.toBytes(message); if (messageBytes == null) { // should we throw an IOException instead? LOG.warn("Could not serialize " + message.getClass()); } else { message = null; // so that message and messageBytes don't go out of // sync. } } return messageBytes; } @Override public void readFields(DataInput in) throws IOException { message = null; messageBytes = null; int size = in.readInt(); if (size > 0) { byte[] buf = new byte[size]; in.readFully(buf, 0, size); messageBytes = buf; // messageBytes is deserialized in get() } } @Override public int compareTo(BinaryWritable<M> other) { byte[] thisBytes = serialize(); byte[] otherBytes = other.serialize(); int thisLen = thisBytes == null ? 0 : thisBytes.length; int otherLen = otherBytes == null ? 0 : otherBytes.length; return BytesWritable.Comparator.compareBytes(thisBytes, 0, thisLen, otherBytes, 0, otherLen); } @SuppressWarnings("unchecked") @Override public boolean equals(Object obj) { if (obj == null) { return false; } BinaryWritable<M> other; try { other = (BinaryWritable<M>)obj; } catch (ClassCastException e) { return false; } return compareTo(other) == 0; } /** * <p>Returns a hashCode that is based on the serialized bytes. * This makes the hash stable across multiple instances of JVMs. * (<code>hashCode()</code> is not required to return the same value in * different instances of the same applications in Java, just in a * single instance of the application; Hadoop imposes a more strict requirement.) * <br> * In addition, it may not be feasible to create a deserialized object from * the serialized bytes (see {@link #setConverter(Class)}) */ @Override public int hashCode() { byte[] bytes = serialize(); return (bytes == null) ? 31 : Arrays.hashCode(bytes); } @Override public String toString() { M msgObj = null; try { msgObj = get(); } catch (IllegalStateException e) { // It is ok. might not be able to avoid this case in some situations. return super.toString() + "{could not be deserialized}"; } if (msgObj == null) { return super.toString(); } return msgObj.toString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.tests.product.hive; import com.google.common.collect.ImmutableList; import io.airlift.log.Logger; import io.trino.tempto.Requirement; import io.trino.tempto.Requirements; import io.trino.tempto.RequirementsProvider; import io.trino.tempto.configuration.Configuration; import io.trino.tempto.fulfillment.table.MutableTableRequirement; import io.trino.tempto.fulfillment.table.hive.HiveTableDefinition; import io.trino.testng.services.Flaky; import io.trino.tests.product.hive.util.TemporaryHiveTable; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.util.List; import java.util.Optional; import static com.google.common.collect.Lists.cartesianProduct; import static io.trino.tempto.assertions.QueryAssert.Row.row; import static io.trino.tempto.assertions.QueryAssert.anyOf; import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure; import static io.trino.tempto.assertions.QueryAssert.assertThat; import static io.trino.tempto.fulfillment.table.MutableTableRequirement.State.CREATED; import static io.trino.tempto.fulfillment.table.MutableTableRequirement.State.PREPARED; import static io.trino.tempto.fulfillment.table.MutableTablesState.mutableTablesState; import static io.trino.tempto.fulfillment.table.TableRequirements.immutableTable; import static io.trino.tempto.fulfillment.table.hive.tpch.TpchTableDefinitions.NATION; import static io.trino.tempto.query.QueryExecutor.param; import static io.trino.tempto.query.QueryExecutor.query; import static io.trino.tests.product.TestGroups.BIG_QUERY; import static io.trino.tests.product.TpchTableResults.PRESTO_NATION_RESULT; import static io.trino.tests.product.hive.BucketingType.BUCKETED_DEFAULT; import static io.trino.tests.product.hive.BucketingType.BUCKETED_V1; import static io.trino.tests.product.hive.BucketingType.BUCKETED_V2; import static io.trino.tests.product.hive.util.TemporaryHiveTable.randomTableSuffix; import static io.trino.tests.product.hive.util.TemporaryHiveTable.temporaryHiveTable; import static io.trino.tests.product.utils.QueryExecutors.onHive; import static io.trino.tests.product.utils.QueryExecutors.onTrino; import static io.trino.tests.product.utils.TableDefinitionUtils.mutableTableInstanceOf; import static java.lang.String.format; import static java.lang.String.join; import static java.sql.JDBCType.VARCHAR; import static java.util.stream.Collectors.joining; public class TestHiveBucketedTables extends HiveProductTest implements RequirementsProvider { private static final Logger log = Logger.get(TestHiveBucketedTables.class); public static final HiveTableDefinition BUCKETED_NATION = bucketTableDefinition("bucket_nation", false, false); public static final HiveTableDefinition BUCKETED_NATION_PREPARED = HiveTableDefinition.builder("bucket_nation_prepared") .setCreateTableDDLTemplate("Table %NAME% should be only used with CTAS queries") .setNoData() .build(); public static final HiveTableDefinition BUCKETED_SORTED_NATION = bucketTableDefinition("bucketed_sorted_nation", true, false); public static final HiveTableDefinition BUCKETED_PARTITIONED_NATION = bucketTableDefinition("bucketed_partitioned_nation", false, true); private static HiveTableDefinition bucketTableDefinition(String tableName, boolean sorted, boolean partitioned) { return HiveTableDefinition.builder(tableName) .setCreateTableDDLTemplate("CREATE TABLE %NAME%(" + "n_nationkey BIGINT," + "n_name STRING," + "n_regionkey BIGINT," + "n_comment STRING) " + (partitioned ? "PARTITIONED BY (part_key STRING) " : " ") + "CLUSTERED BY (n_regionkey) " + (sorted ? "SORTED BY (n_regionkey) " : " ") + "INTO 2 BUCKETS " + "ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' " + "TBLPROPERTIES ('bucketing_version'='1')") .setNoData() .build(); } @Override public Requirement getRequirements(Configuration configuration) { return Requirements.compose( MutableTableRequirement.builder(BUCKETED_PARTITIONED_NATION).withState(CREATED).build(), MutableTableRequirement.builder(BUCKETED_NATION).withState(CREATED).build(), MutableTableRequirement.builder(BUCKETED_NATION_PREPARED).withState(PREPARED).build(), MutableTableRequirement.builder(BUCKETED_SORTED_NATION).withState(CREATED).build(), immutableTable(NATION)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectStar() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); populateHiveTable(tableName, NATION.getName()); assertThat(query("SELECT * FROM " + tableName)).matches(PRESTO_NATION_RESULT); } @Test(groups = BIG_QUERY) @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testIgnorePartitionBucketingIfNotBucketed() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_1'"); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_2'"); onHive().executeQuery(format("ALTER TABLE %s NOT CLUSTERED", tableName)); assertThat(query(format("SELECT count(DISTINCT n_nationkey), count(*) FROM %s", tableName))) .hasRowsCount(1) .contains(row(25, 50)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_nationkey = 1", tableName))) .containsExactlyInOrder(row(2)); } @Test(groups = BIG_QUERY) @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testAllowMultipleFilesPerBucket() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); for (int i = 0; i < 3; i++) { populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert'"); } assertThat(query(format("SELECT count(DISTINCT n_nationkey), count(*) FROM %s", tableName))) .hasRowsCount(1) .contains(row(25, 75)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_nationkey = 1", tableName))) .containsExactlyInOrder(row(3)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectAfterMultipleInserts() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); populateHiveTable(tableName, NATION.getName()); populateHiveTable(tableName, NATION.getName()); assertThat(query(format("SELECT count(*) FROM %s WHERE n_nationkey = 1", tableName))) .containsExactlyInOrder(row(2)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey = 1", tableName))) .containsExactlyInOrder(row(10)); assertThat(query(format("SELECT n_regionkey, count(*) FROM %s GROUP BY n_regionkey", tableName))) .containsOnly(row(0, 10), row(1, 10), row(2, 10), row(3, 10), row(4, 10)); assertThat(query(format("SELECT count(*) FROM %s n JOIN %s n1 ON n.n_regionkey = n1.n_regionkey", tableName, tableName))) .containsExactlyInOrder(row(500)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectAfterMultipleInsertsForSortedTable() { String tableName = mutableTableInstanceOf(BUCKETED_SORTED_NATION).getNameInDatabase(); populateHiveTable(tableName, NATION.getName()); populateHiveTable(tableName, NATION.getName()); assertThat(query(format("SELECT count(*) FROM %s WHERE n_nationkey = 1", tableName))) .containsExactlyInOrder(row(2)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey = 1", tableName))) .containsExactlyInOrder(row(10)); assertThat(query(format("SELECT n_regionkey, count(*) FROM %s GROUP BY n_regionkey", tableName))) .containsOnly(row(0, 10), row(1, 10), row(2, 10), row(3, 10), row(4, 10)); assertThat(query(format("SELECT count(*) FROM %s n JOIN %s n1 ON n.n_regionkey = n1.n_regionkey", tableName, tableName))) .containsExactlyInOrder(row(500)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectAfterMultipleInsertsForPartitionedTable() { String tableName = mutableTableInstanceOf(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_1'"); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_2'"); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_1'"); populateHivePartitionedTable(tableName, NATION.getName(), "part_key = 'insert_2'"); assertThat(query(format("SELECT count(*) FROM %s WHERE n_nationkey = 1", tableName))) .containsExactlyInOrder(row(4)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey = 1", tableName))) .containsExactlyInOrder(row(20)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey = 1 AND part_key = 'insert_1'", tableName))) .hasRowsCount(1) .containsExactlyInOrder(row(10)); assertThat(query(format("SELECT n_regionkey, count(*) FROM %s WHERE part_key = 'insert_2' GROUP BY n_regionkey", tableName))) .containsOnly(row(0, 10), row(1, 10), row(2, 10), row(3, 10), row(4, 10)); assertThat(query(format("SELECT count(*) FROM %s n JOIN %s n1 ON n.n_regionkey = n1.n_regionkey", tableName, tableName))) .containsExactlyInOrder(row(2000)); assertThat(query(format("SELECT count(*) FROM %s n JOIN %s n1 ON n.n_regionkey = n1.n_regionkey WHERE n.part_key = 'insert_1'", tableName, tableName))) .containsExactlyInOrder(row(1000)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectFromEmptyBucketedTableEmptyTablesAllowed() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); assertThat(query(format("SELECT count(*) FROM %s", tableName))) .containsExactlyInOrder(row(0)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testSelectFromIncompleteBucketedTableEmptyTablesAllowed() { String tableName = mutableTableInstanceOf(BUCKETED_NATION).getNameInDatabase(); populateRowToHiveTable(tableName, ImmutableList.of("2", "'name'", "2", "'comment'"), Optional.empty()); // insert one row into nation assertThat(query(format("SELECT count(*) from %s", tableName))) .containsExactlyInOrder(row(1)); assertThat(query(format("select n_nationkey from %s where n_regionkey = 2", tableName))) .containsExactlyInOrder(row(2)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testInsertPartitionedBucketed() { String tableName = mutableTablesState().get(BUCKETED_NATION_PREPARED).getNameInDatabase(); String ctasQuery = "CREATE TABLE %s WITH (bucket_count = 4, bucketed_by = ARRAY['n_regionkey'], partitioned_by = ARRAY['part_key']) " + "AS SELECT n_nationkey, n_name, n_regionkey, n_comment, n_name as part_key FROM %s"; query(format(ctasQuery, tableName, NATION.getName())); assertThat(query(format("SELECT count(*) FROM %s", tableName))).containsExactlyInOrder(row(25)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0", tableName))).containsExactlyInOrder(row(5)); assertThat(query(format("SELECT count(*) FROM %s WHERE part_key='ALGERIA'", tableName))).containsExactlyInOrder(row(1)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0 AND part_key='ALGERIA'", tableName))).containsExactlyInOrder(row(1)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testCreatePartitionedBucketedTableAsSelect() { String tableName = mutableTablesState().get(BUCKETED_PARTITIONED_NATION).getNameInDatabase(); query(format("INSERT INTO %s SELECT n_nationkey, n_name, n_regionkey, n_comment, n_name FROM %s", tableName, NATION.getName())); assertThat(query(format("SELECT count(*) FROM %s", tableName))).containsExactlyInOrder(row(25)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0", tableName))).containsExactlyInOrder(row(5)); assertThat(query(format("SELECT count(*) FROM %s WHERE part_key='ALGERIA'", tableName))).containsExactlyInOrder(row(1)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0 AND part_key='ALGERIA'", tableName))).containsExactlyInOrder(row(1)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testInsertIntoBucketedTables() { String tableName = mutableTablesState().get(BUCKETED_NATION).getNameInDatabase(); query(format("INSERT INTO %s SELECT * FROM %s", tableName, NATION.getName())); // make sure that insert will not overwrite existing data query(format("INSERT INTO %s SELECT * FROM %s", tableName, NATION.getName())); assertThat(query(format("SELECT count(*) FROM %s", tableName))).containsExactlyInOrder(row(50)); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0", tableName))).containsExactlyInOrder(row(10)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testCreateBucketedTableAsSelect() { String tableName = mutableTablesState().get(BUCKETED_NATION_PREPARED).getNameInDatabase(); // nations has 25 rows and NDV=5 for n_regionkey, setting bucket_count=10 will surely create empty buckets query(format("CREATE TABLE %s WITH (bucket_count = 10, bucketed_by = ARRAY['n_regionkey']) AS SELECT * FROM %s", tableName, NATION.getName())); assertThat(query(format("SELECT * FROM %s", tableName))).matches(PRESTO_NATION_RESULT); assertThat(query(format("SELECT count(*) FROM %s WHERE n_regionkey=0", tableName))).containsExactlyInOrder(row(5)); } @Test @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testBucketingVersion() { String value = "Trino rocks"; String bucketV1 = "000002_0"; String bucketV2Standard = "000001_0"; String bucketV2DirectInsert = "bucket_00001"; List<String> bucketV1NameOptions = ImmutableList.of(bucketV1); List<String> bucketV2NameOptions = ImmutableList.of(bucketV2Standard, bucketV2DirectInsert); testBucketingVersion(BUCKETED_DEFAULT, value, false, (getHiveVersionMajor() < 3) ? bucketV1NameOptions : bucketV2NameOptions); testBucketingVersion(BUCKETED_DEFAULT, value, true, (getHiveVersionMajor() < 3) ? bucketV1NameOptions : bucketV2NameOptions); testBucketingVersion(BUCKETED_V1, value, false, bucketV1NameOptions); testBucketingVersion(BUCKETED_V1, value, true, bucketV1NameOptions); if (getHiveVersionMajor() >= 3) { testBucketingVersion(BUCKETED_V2, value, false, bucketV2NameOptions); testBucketingVersion(BUCKETED_V2, value, true, bucketV2NameOptions); } } @Test(dataProvider = "testBucketingWithUnsupportedDataTypesDataProvider") @Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH) public void testBucketingWithUnsupportedDataTypes(BucketingType bucketingType, String columnToBeBucketed) { try (TemporaryHiveTable table = temporaryHiveTable("table_with_unsupported_bucketing_types_" + randomTableSuffix())) { String tableName = table.getName(); onHive().executeQuery(format("CREATE TABLE %s (" + "n_integer INT," + "n_decimal DECIMAL(9, 2)," + "n_timestamp TIMESTAMP," + "n_char CHAR(10)," + "n_binary BINARY," + "n_union UNIONTYPE<INT,STRING>," + "n_struct STRUCT<field1:INT,field2:STRING>) " + "CLUSTERED BY (%s) INTO 2 BUCKETS " + "STORED AS ORC " + "%s", tableName, columnToBeBucketed, hiveTableProperties(bucketingType))); assertThat(onTrino().executeQuery("SHOW CREATE TABLE " + tableName)) .containsOnly(row(format( "CREATE TABLE hive.default.%s (\n" + " n_integer integer,\n" + " n_decimal decimal(9, 2),\n" + " n_timestamp timestamp(3),\n" + " n_char char(10),\n" + " n_binary varbinary,\n" + " n_union ROW(tag tinyint, field0 integer, field1 varchar),\n" + " n_struct ROW(field1 integer, field2 varchar)\n" + ")\n" + "WITH (\n" + " bucket_count = 2,\n" + " bucketed_by = ARRAY['%s'],\n" + " bucketing_version = %s,\n" + " format = 'ORC',\n" + " sorted_by = ARRAY[]\n" + ")", tableName, columnToBeBucketed, getExpectedBucketVersion(bucketingType)))); populateRowToHiveTable( tableName, ImmutableList.<String>builder() .add("1") .add("CAST(1 AS DECIMAL(9, 2))") .add("CAST('2015-01-01T00:01:00.15' AS TIMESTAMP)") .add("'char value'") .add("unhex('00010203')") .add("create_union(0, 1, 'union value')") .add("named_struct('field1', 1, 'field2', 'Field2')") .build(), Optional.empty()); assertThat(onTrino().executeQuery(format("SELECT * FROM %s", tableName))) .hasRowsCount(1); assertQueryFailure(() -> onTrino().executeQuery("SELECT \"$bucket\" FROM " + tableName)) .hasMessageMatching("Query failed \\(#\\w+\\):\\Q line 1:8: Column '$bucket' cannot be resolved"); assertQueryFailure(() -> onTrino().executeQuery(format("INSERT INTO %s(n_integer) VALUES (1)", tableName))) .hasMessageMatching("Query failed \\(#\\w+\\): Cannot write to a table bucketed on an unsupported type"); String newTableName = "new_" + tableName; // TODO Trino should reject if the user specifies to bucket on unsupported column type (https://github.com/trinodb/trino/issues/9094) onTrino().executeQuery(format("CREATE TABLE %s (LIKE %s INCLUDING PROPERTIES)", newTableName, tableName)); assertQueryFailure(() -> onTrino().executeQuery("SELECT \"$bucket\" FROM " + newTableName)) .hasMessageMatching("Query failed \\(#\\w+\\):\\Q line 1:8: Column '$bucket' cannot be resolved"); assertQueryFailure(() -> onTrino().executeQuery(format("INSERT INTO %s(n_integer) VALUES (1)", newTableName))) .hasMessageMatching("Query failed \\(#\\w+\\): Cannot write to a table bucketed on an unsupported type"); onTrino().executeQuery("DROP TABLE " + newTableName); assertQueryFailure(() -> onTrino() .executeQuery(format( "CREATE TABLE %s WITH (%s) AS SELECT * FROM %s", newTableName, bucketingType.getTrinoTableProperties(columnToBeBucketed, 2).stream().collect(joining(",")), tableName))) .hasMessageMatching("Query failed \\(#\\w+\\): Cannot write to a table bucketed on an unsupported type"); } } @DataProvider public static Object[][] testBucketingWithUnsupportedDataTypesDataProvider() { return cartesianProduct( ImmutableList.of(BUCKETED_DEFAULT, BUCKETED_V1, BUCKETED_V2), ImmutableList.<String>builder() .add("n_decimal") .add("n_timestamp") .add("n_char") .add("n_binary") .add("n_union") .add("n_struct") .build()).stream() .map(List::toArray) .toArray(Object[][]::new); } private void testBucketingVersion(BucketingType bucketingType, String value, boolean insertWithTrino, List<String> expectedFileNameOptions) { log.info("Testing with bucketingType=%s, value='%s', insertWithTrino=%s, expectedFileNamePossibilites=%s", bucketingType, value, insertWithTrino, expectedFileNameOptions); onHive().executeQuery("DROP TABLE IF EXISTS test_bucketing_version"); onHive().executeQuery("" + "CREATE TABLE test_bucketing_version(a string) " + bucketingType.getHiveClustering("a", 4) + " " + "STORED AS ORC " + hiveTableProperties(bucketingType)); if (insertWithTrino) { onTrino().executeQuery("INSERT INTO test_bucketing_version(a) VALUES (?)", param(VARCHAR, value)); } else { onHive().executeQuery("SET hive.enforce.bucketing = true"); onHive().executeQuery("INSERT INTO test_bucketing_version(a) VALUES ('" + value + "')"); } assertThat(onTrino().executeQuery("SELECT a, regexp_extract(\"$path\", '^.*/([^_/]+_[^_/]+)(_[^/]+)?$', 1) FROM test_bucketing_version")) .containsOnly(row(value, anyOf(expectedFileNameOptions.toArray()))); } private String hiveTableProperties(BucketingType bucketingType) { ImmutableList.Builder<String> tableProperties = ImmutableList.builder(); tableProperties.add("'transactional'='false'"); tableProperties.addAll(bucketingType.getHiveTableProperties()); return "TBLPROPERTIES(" + join(",", tableProperties.build()) + ")"; } private String getExpectedBucketVersion(BucketingType bucketingType) { switch (bucketingType) { case BUCKETED_DEFAULT: return getHiveVersionMajor() < 3 ? "1" : "2"; case BUCKETED_V1: return "1"; case BUCKETED_V2: return "2"; default: throw new UnsupportedOperationException("Not supported for " + bucketingType); } } private static void populateRowToHiveTable(String destination, List<String> values, Optional<String> partition) { String queryStatement = "INSERT INTO TABLE " + destination + (partition.isPresent() ? format(" PARTITION (%s) ", partition.get()) : " ") + "SELECT " + join(",", values) + " FROM (SELECT 'foo') x"; onHive().executeQuery("set hive.enforce.bucketing = true"); onHive().executeQuery("set hive.enforce.sorting = true"); onHive().executeQuery(queryStatement); } private static void populateHivePartitionedTable(String destination, String source, String partition) { String queryStatement = format("INSERT INTO TABLE %s PARTITION (%s) SELECT * FROM %s", destination, partition, source); onHive().executeQuery("set hive.enforce.bucketing = true"); onHive().executeQuery("set hive.enforce.sorting = true"); onHive().executeQuery(queryStatement); } private static void populateHiveTable(String destination, String source) { onHive().executeQuery("set hive.enforce.bucketing = true"); onHive().executeQuery("set hive.enforce.sorting = true"); onHive().executeQuery(format("INSERT INTO TABLE %s SELECT * FROM %s", destination, source)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.blob; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.fs.Path; import org.apache.flink.core.testutils.CheckedThread; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.OperatingSystem; import org.apache.flink.util.Preconditions; import org.apache.flink.util.TestLogger; import org.apache.commons.io.FileUtils; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.nio.file.AccessDeniedException; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import static org.apache.flink.runtime.blob.BlobClientTest.validateGetAndClose; import static org.apache.flink.runtime.blob.BlobKey.BlobType.PERMANENT_BLOB; import static org.apache.flink.runtime.blob.BlobKey.BlobType.TRANSIENT_BLOB; import static org.apache.flink.runtime.blob.BlobKeyTest.verifyKeyDifferentHashEquals; import static org.apache.flink.runtime.blob.BlobServerGetTest.get; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; /** * Tests for successful and failing PUT operations against the BLOB server, * and successful GET operations. */ public class BlobServerPutTest extends TestLogger { private final Random rnd = new Random(); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public final ExpectedException exception = ExpectedException.none(); // --- concurrency tests for utility methods which could fail during the put operation --- /** * Checked thread that calls {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */ public static class ContentAddressableGetStorageLocation extends CheckedThread { private final BlobServer server; private final JobID jobId; private final BlobKey key; ContentAddressableGetStorageLocation( BlobServer server, @Nullable JobID jobId, BlobKey key) { this.server = server; this.jobId = jobId; this.key = key; } @Override public void go() throws Exception { server.getStorageLocation(jobId, key); } } /** * Tests concurrent calls to {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */ @Test public void testServerContentAddressableGetStorageLocationConcurrentNoJob() throws Exception { testServerContentAddressableGetStorageLocationConcurrent(null); } /** * Tests concurrent calls to {@link BlobServer#getStorageLocation(JobID, BlobKey)}. */ @Test public void testServerContentAddressableGetStorageLocationConcurrentForJob() throws Exception { testServerContentAddressableGetStorageLocationConcurrent(new JobID()); } private void testServerContentAddressableGetStorageLocationConcurrent( @Nullable final JobID jobId) throws Exception { final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); BlobKey key1 = new TransientBlobKey(); BlobKey key2 = new PermanentBlobKey(); CheckedThread[] threads = new CheckedThread[] { new ContentAddressableGetStorageLocation(server, jobId, key1), new ContentAddressableGetStorageLocation(server, jobId, key1), new ContentAddressableGetStorageLocation(server, jobId, key1), new ContentAddressableGetStorageLocation(server, jobId, key2), new ContentAddressableGetStorageLocation(server, jobId, key2), new ContentAddressableGetStorageLocation(server, jobId, key2) }; checkedThreadSimpleTest(threads); } } /** * Helper method to first start all threads and then wait for their completion. * * @param threads threads to use * @throws Exception exceptions that are thrown from the threads */ private void checkedThreadSimpleTest(CheckedThread[] threads) throws Exception { // start all threads for (CheckedThread t: threads) { t.start(); } // wait for thread completion and check exceptions for (CheckedThread t: threads) { t.sync(); } } // -------------------------------------------------------------------------------------------- @Test public void testPutBufferSuccessfulGet1() throws IOException { testPutBufferSuccessfulGet(null, null, TRANSIENT_BLOB); } @Test public void testPutBufferSuccessfulGet2() throws IOException { testPutBufferSuccessfulGet(null, new JobID(), TRANSIENT_BLOB); } @Test public void testPutBufferSuccessfulGet3() throws IOException { testPutBufferSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB); } @Test public void testPutBufferSuccessfulGet4() throws IOException { testPutBufferSuccessfulGet(new JobID(), null, TRANSIENT_BLOB); } @Test public void testPutBufferSuccessfulGetHa() throws IOException { testPutBufferSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB); } /** * Uploads two byte arrays for different jobs into the server via the {@link BlobServer}. File * transfers should be successful. * * @param jobId1 * first job id * @param jobId2 * second job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutBufferSuccessfulGet( @Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType) throws IOException { final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); byte[] data = new byte[2000000]; rnd.nextBytes(data); byte[] data2 = Arrays.copyOfRange(data, 10, 54); // put data for jobId1 and verify BlobKey key1a = put(server, jobId1, data, blobType); assertNotNull(key1a); // second upload of same data should yield a different BlobKey BlobKey key1a2 = put(server, jobId1, data, blobType); assertNotNull(key1a2); verifyKeyDifferentHashEquals(key1a, key1a2); BlobKey key1b = put(server, jobId1, data2, blobType); assertNotNull(key1b); verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); // now put data for jobId2 and verify that both are ok BlobKey key2a = put(server, jobId2, data, blobType); assertNotNull(key2a); verifyKeyDifferentHashEquals(key1a, key2a); BlobKey key2b = put(server, jobId2, data2, blobType); assertNotNull(key2b); verifyKeyDifferentHashEquals(key1b, key2b); // verify the accessibility and the BLOB contents verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); // verify the accessibility and the BLOB contents one more time (transient BLOBs should // not be deleted here) verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); } } // -------------------------------------------------------------------------------------------- @Test public void testPutStreamSuccessfulGet1() throws IOException { testPutStreamSuccessfulGet(null, null, TRANSIENT_BLOB); } @Test public void testPutStreamSuccessfulGet2() throws IOException { testPutStreamSuccessfulGet(null, new JobID(), TRANSIENT_BLOB); } @Test public void testPutStreamSuccessfulGet3() throws IOException { testPutStreamSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB); } @Test public void testPutStreamSuccessfulGet4() throws IOException { testPutStreamSuccessfulGet(new JobID(), null, TRANSIENT_BLOB); } @Test public void testPutStreamSuccessfulGetHa() throws IOException { testPutStreamSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB); } /** * Uploads two file streams for different jobs into the server via the {@link BlobServer}. File * transfers should be successful. * * @param jobId1 * first job id * @param jobId2 * second job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutStreamSuccessfulGet( @Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType) throws IOException { final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); byte[] data = new byte[2000000]; rnd.nextBytes(data); byte[] data2 = Arrays.copyOfRange(data, 10, 54); // put data for jobId1 and verify BlobKey key1a = put(server, jobId1, new ByteArrayInputStream(data), blobType); assertNotNull(key1a); // second upload of same data should yield a different BlobKey BlobKey key1a2 = put(server, jobId1, new ByteArrayInputStream(data), blobType); assertNotNull(key1a2); verifyKeyDifferentHashEquals(key1a, key1a2); BlobKey key1b = put(server, jobId1, new ByteArrayInputStream(data2), blobType); assertNotNull(key1b); verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); // now put data for jobId2 and verify that both are ok BlobKey key2a = put(server, jobId2, new ByteArrayInputStream(data), blobType); assertNotNull(key2a); verifyKeyDifferentHashEquals(key1a, key2a); BlobKey key2b = put(server, jobId2, new ByteArrayInputStream(data2), blobType); assertNotNull(key2b); verifyKeyDifferentHashEquals(key1b, key2b); // verify the accessibility and the BLOB contents verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); // verify the accessibility and the BLOB contents one more time (transient BLOBs should // not be deleted here) verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); } } // -------------------------------------------------------------------------------------------- @Test public void testPutChunkedStreamSuccessfulGet1() throws IOException { testPutChunkedStreamSuccessfulGet(null, null, TRANSIENT_BLOB); } @Test public void testPutChunkedStreamSuccessfulGet2() throws IOException { testPutChunkedStreamSuccessfulGet(null, new JobID(), TRANSIENT_BLOB); } @Test public void testPutChunkedStreamSuccessfulGet3() throws IOException { testPutChunkedStreamSuccessfulGet(new JobID(), new JobID(), TRANSIENT_BLOB); } @Test public void testPutChunkedStreamSuccessfulGet4() throws IOException { testPutChunkedStreamSuccessfulGet(new JobID(), null, TRANSIENT_BLOB); } @Test public void testPutChunkedStreamSuccessfulGetHa() throws IOException { testPutChunkedStreamSuccessfulGet(new JobID(), new JobID(), PERMANENT_BLOB); } /** * Uploads two chunked file streams for different jobs into the server via the {@link * BlobServer}. File transfers should be successful. * * @param jobId1 * first job id * @param jobId2 * second job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutChunkedStreamSuccessfulGet( @Nullable JobID jobId1, @Nullable JobID jobId2, BlobKey.BlobType blobType) throws IOException { final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); byte[] data = new byte[2000000]; rnd.nextBytes(data); byte[] data2 = Arrays.copyOfRange(data, 10, 54); // put data for jobId1 and verify BlobKey key1a = put(server, jobId1, new ChunkedInputStream(data, 19), blobType); assertNotNull(key1a); // second upload of same data should yield a different BlobKey BlobKey key1a2 = put(server, jobId1, new ChunkedInputStream(data, 19), blobType); assertNotNull(key1a2); verifyKeyDifferentHashEquals(key1a, key1a2); BlobKey key1b = put(server, jobId1, new ChunkedInputStream(data2, 19), blobType); assertNotNull(key1b); verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); // now put data for jobId2 and verify that both are ok BlobKey key2a = put(server, jobId2, new ChunkedInputStream(data, 19), blobType); assertNotNull(key2a); verifyKeyDifferentHashEquals(key1a, key2a); BlobKey key2b = put(server, jobId2, new ChunkedInputStream(data2, 19), blobType); assertNotNull(key2b); verifyKeyDifferentHashEquals(key1b, key2b); // verify the accessibility and the BLOB contents verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); // verify the accessibility and the BLOB contents one more time (transient BLOBs should // not be deleted here) verifyContents(server, jobId1, key1a, data); verifyContents(server, jobId1, key1a2, data); verifyContents(server, jobId1, key1b, data2); verifyContents(server, jobId2, key2a, data); verifyContents(server, jobId2, key2b, data2); } } // -------------------------------------------------------------------------------------------- @Test public void testPutBufferFailsNoJob() throws IOException { testPutBufferFails(null, TRANSIENT_BLOB); } @Test public void testPutBufferFailsForJob() throws IOException { testPutBufferFails(new JobID(), TRANSIENT_BLOB); } @Test public void testPutBufferFailsForJobHa() throws IOException { testPutBufferFails(new JobID(), PERMANENT_BLOB); } /** * Uploads a byte array to a server which cannot create any files via the {@link BlobServer}. * File transfers should fail. * * @param jobId * job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutBufferFails(@Nullable final JobID jobId, BlobKey.BlobType blobType) throws IOException { assumeTrue(!OperatingSystem.isWindows()); //setWritable doesn't work on Windows. final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); File tempFileDir = null; try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); // make sure the blob server cannot create any files in its storage dir tempFileDir = server.createTemporaryFilename().getParentFile().getParentFile(); assertTrue(tempFileDir.setExecutable(true, false)); assertTrue(tempFileDir.setReadable(true, false)); assertTrue(tempFileDir.setWritable(false, false)); byte[] data = new byte[2000000]; rnd.nextBytes(data); // upload the file to the server directly exception.expect(AccessDeniedException.class); put(server, jobId, data, blobType); } finally { // set writable again to make sure we can remove the directory if (tempFileDir != null) { //noinspection ResultOfMethodCallIgnored tempFileDir.setWritable(true, false); } } } @Test public void testPutBufferFailsIncomingNoJob() throws IOException { testPutBufferFailsIncoming(null, TRANSIENT_BLOB); } @Test public void testPutBufferFailsIncomingForJob() throws IOException { testPutBufferFailsIncoming(new JobID(), TRANSIENT_BLOB); } @Test public void testPutBufferFailsIncomingForJobHa() throws IOException { testPutBufferFailsIncoming(new JobID(), PERMANENT_BLOB); } /** * Uploads a byte array to a server which cannot create incoming files via the {@link * BlobServer}. File transfers should fail. * * @param jobId * job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutBufferFailsIncoming(@Nullable final JobID jobId, BlobKey.BlobType blobType) throws IOException { assumeTrue(!OperatingSystem.isWindows()); //setWritable doesn't work on Windows. final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); File tempFileDir = null; try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); // make sure the blob server cannot create any files in its storage dir tempFileDir = server.createTemporaryFilename().getParentFile(); assertTrue(tempFileDir.setExecutable(true, false)); assertTrue(tempFileDir.setReadable(true, false)); assertTrue(tempFileDir.setWritable(false, false)); byte[] data = new byte[2000000]; rnd.nextBytes(data); // upload the file to the server directly exception.expect(IOException.class); exception.expectMessage(" (Permission denied)"); try { put(server, jobId, data, blobType); } finally { File storageDir = tempFileDir.getParentFile(); // only the incoming directory should exist (no job directory!) assertArrayEquals(new String[] {"incoming"}, storageDir.list()); } } finally { // set writable again to make sure we can remove the directory if (tempFileDir != null) { //noinspection ResultOfMethodCallIgnored tempFileDir.setWritable(true, false); } } } @Test public void testPutBufferFailsStoreNoJob() throws IOException { testPutBufferFailsStore(null, TRANSIENT_BLOB); } @Test public void testPutBufferFailsStoreForJob() throws IOException { testPutBufferFailsStore(new JobID(), TRANSIENT_BLOB); } @Test public void testPutBufferFailsStoreForJobHa() throws IOException { testPutBufferFailsStore(new JobID(), PERMANENT_BLOB); } /** * Uploads a byte array to a server which cannot move incoming files to the final blob store via * the {@link BlobServer}. File transfers should fail. * * @param jobId * job id * @param blobType * whether the BLOB should become permanent or transient */ private void testPutBufferFailsStore(@Nullable final JobID jobId, BlobKey.BlobType blobType) throws IOException { assumeTrue(!OperatingSystem.isWindows()); //setWritable doesn't work on Windows. final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); File jobStoreDir = null; try (BlobServer server = new BlobServer(config, new VoidBlobStore())) { server.start(); // make sure the blob server cannot create any files in its storage dir jobStoreDir = server.getStorageLocation(jobId, BlobKey.createKey(blobType)).getParentFile(); assertTrue(jobStoreDir.setExecutable(true, false)); assertTrue(jobStoreDir.setReadable(true, false)); assertTrue(jobStoreDir.setWritable(false, false)); byte[] data = new byte[2000000]; rnd.nextBytes(data); // upload the file to the server directly exception.expect(AccessDeniedException.class); try { put(server, jobId, data, blobType); } finally { // there should be no remaining incoming files File incomingFileDir = new File(jobStoreDir.getParent(), "incoming"); assertArrayEquals(new String[] {}, incomingFileDir.list()); // there should be no files in the job directory assertArrayEquals(new String[] {}, jobStoreDir.list()); } } finally { // set writable again to make sure we can remove the directory if (jobStoreDir != null) { //noinspection ResultOfMethodCallIgnored jobStoreDir.setWritable(true, false); } } } @Test public void testConcurrentPutOperationsNoJob() throws IOException, ExecutionException, InterruptedException { testConcurrentPutOperations(null, TRANSIENT_BLOB); } @Test public void testConcurrentPutOperationsForJob() throws IOException, ExecutionException, InterruptedException { testConcurrentPutOperations(new JobID(), TRANSIENT_BLOB); } @Test public void testConcurrentPutOperationsForJobHa() throws IOException, ExecutionException, InterruptedException { testConcurrentPutOperations(new JobID(), PERMANENT_BLOB); } /** * [FLINK-6020] * Tests that concurrent put operations will only upload the file once to the {@link BlobStore} * and that the files are not corrupt at any time. * * @param jobId * job ID to use (or <tt>null</tt> if job-unrelated) * @param blobType * whether the BLOB should become permanent or transient */ private void testConcurrentPutOperations( @Nullable final JobID jobId, final BlobKey.BlobType blobType) throws IOException, InterruptedException, ExecutionException { final Configuration config = new Configuration(); config.setString(BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); BlobStore blobStore = mock(BlobStore.class); int concurrentPutOperations = 2; int dataSize = 1024; final CountDownLatch countDownLatch = new CountDownLatch(concurrentPutOperations); final byte[] data = new byte[dataSize]; ArrayList<CompletableFuture<BlobKey>> allFutures = new ArrayList<>(concurrentPutOperations); ExecutorService executor = Executors.newFixedThreadPool(concurrentPutOperations); try (final BlobServer server = new BlobServer(config, blobStore)) { server.start(); for (int i = 0; i < concurrentPutOperations; i++) { CompletableFuture<BlobKey> putFuture = CompletableFuture .supplyAsync( () -> { try { BlockingInputStream inputStream = new BlockingInputStream(countDownLatch, data); BlobKey uploadedKey = put(server, jobId, inputStream, blobType); // check the uploaded file's contents (concurrently) verifyContents(server, jobId, uploadedKey, data); return uploadedKey; } catch (IOException e) { throw new CompletionException(new FlinkException( "Could not upload blob.", e)); } }, executor); allFutures.add(putFuture); } FutureUtils.ConjunctFuture<Collection<BlobKey>> conjunctFuture = FutureUtils.combineAll(allFutures); // wait until all operations have completed and check that no exception was thrown Collection<BlobKey> blobKeys = conjunctFuture.get(); Iterator<BlobKey> blobKeyIterator = blobKeys.iterator(); assertTrue(blobKeyIterator.hasNext()); BlobKey blobKey = blobKeyIterator.next(); // make sure that all blob keys are the same while (blobKeyIterator.hasNext()) { verifyKeyDifferentHashEquals(blobKey, blobKeyIterator.next()); } // check the uploaded file's contents verifyContents(server, jobId, blobKey, data); // check that we only uploaded the file once to the blob store if (blobType == PERMANENT_BLOB) { verify(blobStore, times(1)).put(any(File.class), eq(jobId), eq(blobKey)); } else { // can't really verify much in the other cases other than that the put operations should // work and not corrupt files verify(blobStore, times(0)).put(any(File.class), eq(jobId), eq(blobKey)); } } finally { executor.shutdownNow(); } } // -------------------------------------------------------------------------------------------- /** * Helper to choose the right {@link BlobServer#putTransient} method. * * @param blobType * whether the BLOB should become permanent or transient * * @return blob key for the uploaded data */ static BlobKey put(BlobService service, @Nullable JobID jobId, InputStream data, BlobKey.BlobType blobType) throws IOException { if (blobType == PERMANENT_BLOB) { if (service instanceof BlobServer) { return ((BlobServer) service).putPermanent(jobId, data); } else { throw new UnsupportedOperationException("uploading streams is only possible at the BlobServer"); } } else if (jobId == null) { return service.getTransientBlobService().putTransient(data); } else { return service.getTransientBlobService().putTransient(jobId, data); } } /** * Helper to choose the right {@link BlobServer#putTransient} method. * * @param blobType * whether the BLOB should become permanent or transient * * @return blob key for the uploaded data */ static BlobKey put(BlobService service, @Nullable JobID jobId, byte[] data, BlobKey.BlobType blobType) throws IOException { if (blobType == PERMANENT_BLOB) { if (service instanceof BlobServer) { return ((BlobServer) service).putPermanent(jobId, data); } else { // implement via JAR file upload instead: File tmpFile = Files.createTempFile("blob", ".jar").toFile(); try { FileUtils.writeByteArrayToFile(tmpFile, data); InetSocketAddress serverAddress = new InetSocketAddress("localhost", service.getPort()); // uploading HA BLOBs works on BlobServer only (and, for now, via the BlobClient) Configuration clientConfig = new Configuration(); List<Path> jars = Collections.singletonList(new Path(tmpFile.getAbsolutePath())); List<PermanentBlobKey> keys = BlobClient.uploadFiles(serverAddress, clientConfig, jobId, jars); assertEquals(1, keys.size()); return keys.get(0); } finally { //noinspection ResultOfMethodCallIgnored tmpFile.delete(); } } } else if (jobId == null) { return service.getTransientBlobService().putTransient(data); } else { return service.getTransientBlobService().putTransient(jobId, data); } } /** * GET the data stored at the two keys and check that it is equal to <tt>data</tt>. * * @param blobService * BlobServer to use * @param jobId * job ID or <tt>null</tt> if job-unrelated * @param key * blob key * @param data * expected data */ static void verifyContents( BlobService blobService, @Nullable JobID jobId, BlobKey key, byte[] data) throws IOException { File file = get(blobService, jobId, key); validateGetAndClose(new FileInputStream(file), data); } /** * GET the data stored at the two keys and check that it is equal to <tt>data</tt>. * * @param blobService * BlobServer to use * @param jobId * job ID or <tt>null</tt> if job-unrelated * @param key * blob key * @param data * expected data */ static void verifyContents( BlobService blobService, @Nullable JobID jobId, BlobKey key, InputStream data) throws IOException { File file = get(blobService, jobId, key); validateGetAndClose(new FileInputStream(file), data); } // -------------------------------------------------------------------------------------------- static final class BlockingInputStream extends InputStream { private final CountDownLatch countDownLatch; private final byte[] data; private int index = 0; BlockingInputStream(CountDownLatch countDownLatch, byte[] data) { this.countDownLatch = Preconditions.checkNotNull(countDownLatch); this.data = Preconditions.checkNotNull(data); } @Override public int read() throws IOException { countDownLatch.countDown(); try { countDownLatch.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("Blocking operation was interrupted.", e); } if (index >= data.length) { return -1; } else { return data[index++]; } } } // -------------------------------------------------------------------------------------------- static final class ChunkedInputStream extends InputStream { private final byte[][] data; private int x = 0, y = 0; ChunkedInputStream(byte[] data, int numChunks) { this.data = new byte[numChunks][]; int bytesPerChunk = data.length / numChunks; int bytesTaken = 0; for (int i = 0; i < numChunks - 1; i++, bytesTaken += bytesPerChunk) { this.data[i] = new byte[bytesPerChunk]; System.arraycopy(data, bytesTaken, this.data[i], 0, bytesPerChunk); } this.data[numChunks - 1] = new byte[data.length - bytesTaken]; System.arraycopy(data, bytesTaken, this.data[numChunks - 1], 0, this.data[numChunks - 1].length); } @Override public int read() { if (x < data.length) { byte[] curr = data[x]; if (y < curr.length) { byte next = curr[y]; y++; return next; } else { y = 0; x++; return read(); } } else { return -1; } } @Override public int read(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } if (x < data.length) { byte[] curr = data[x]; if (y < curr.length) { int toCopy = Math.min(len, curr.length - y); System.arraycopy(curr, y, b, off, toCopy); y += toCopy; return toCopy; } else { y = 0; x++; return read(b, off, len); } } else { return -1; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.core; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Iterables; import java.util.List; import java.util.UUID; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.state.TimeDomain; import org.apache.beam.sdk.state.ValueState; import org.apache.beam.sdk.state.WatermarkHoldState; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.WithKeys; import org.apache.beam.sdk.transforms.reflect.DoFnInvoker; import org.apache.beam.sdk.transforms.reflect.DoFnInvokers; import org.apache.beam.sdk.transforms.reflect.DoFnSignature; import org.apache.beam.sdk.transforms.reflect.DoFnSignatures; import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindows; import org.apache.beam.sdk.transforms.windowing.TimestampCombiner; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; import org.apache.beam.sdk.values.WindowingStrategy; import org.joda.time.Instant; /** * A utility transform that executes a <a * href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn} by expanding it into a * network of simpler transforms: * * <ol> * <li>Pair each element with an initial restriction * <li>Split each restriction into sub-restrictions * <li>Assign a unique key to each element/restriction pair * <li>Group by key (so that work is partitioned by key and we can access state/timers) * <li>Process each keyed element/restriction pair with the splittable {@link DoFn}'s {@link * DoFn.ProcessElement} method, using state and timers API. * </ol> * * <p>This transform is intended as a helper for internal use by runners when implementing {@code * ParDo.of(splittable DoFn)}, but not for direct use by pipeline writers. */ @Experimental(Experimental.Kind.SPLITTABLE_DO_FN) public class SplittableParDo<InputT, OutputT, RestrictionT> extends PTransform<PCollection<InputT>, PCollectionTuple> { private final ParDo.MultiOutput<InputT, OutputT> parDo; /** * Creates the transform for the given original multi-output {@link ParDo}. * * @param parDo The splittable {@link ParDo} transform. */ public SplittableParDo(ParDo.MultiOutput<InputT, OutputT> parDo) { checkNotNull(parDo, "parDo must not be null"); this.parDo = parDo; checkArgument( DoFnSignatures.getSignature(parDo.getFn().getClass()).processElement().isSplittable(), "fn must be a splittable DoFn"); } @Override public PCollectionTuple expand(PCollection<InputT> input) { return applyTyped(input); } private PCollectionTuple applyTyped(PCollection<InputT> input) { DoFn<InputT, OutputT> fn = parDo.getFn(); Coder<RestrictionT> restrictionCoder = DoFnInvokers.invokerFor(fn) .invokeGetRestrictionCoder(input.getPipeline().getCoderRegistry()); PCollection<KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>> keyedWorkItems = applySplitIntoKeyedWorkItems(input, fn, restrictionCoder); return keyedWorkItems.apply( "Process", new ProcessElements<>( fn, input.getCoder(), restrictionCoder, (WindowingStrategy<InputT, ?>) input.getWindowingStrategy(), parDo.getSideInputs(), parDo.getMainOutputTag(), parDo.getAdditionalOutputTags())); } private static <InputT, OutputT, RestrictionT> PCollection<KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>> applySplitIntoKeyedWorkItems( PCollection<InputT> input, DoFn<InputT, OutputT> fn, Coder<RestrictionT> restrictionCoder) { Coder<ElementAndRestriction<InputT, RestrictionT>> splitCoder = ElementAndRestrictionCoder.of(input.getCoder(), restrictionCoder); PCollection<KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>> keyedWorkItems = input .apply( "Pair with initial restriction", ParDo.of(new PairWithRestrictionFn<InputT, OutputT, RestrictionT>(fn))) .setCoder(splitCoder) .apply("Split restriction", ParDo.of(new SplitRestrictionFn<InputT, RestrictionT>(fn))) .setCoder(splitCoder) // ProcessFn requires all input elements to be in a single window and have a single // element per work item. This must precede the unique keying so each key has a single // associated element. .apply( "Explode windows", ParDo.of(new ExplodeWindowsFn<ElementAndRestriction<InputT, RestrictionT>>())) .apply( "Assign unique key", WithKeys.of(new RandomUniqueKeyFn<ElementAndRestriction<InputT, RestrictionT>>())) .apply( "Group by key", new GBKIntoKeyedWorkItems<String, ElementAndRestriction<InputT, RestrictionT>>()) .setCoder( KeyedWorkItemCoder.of( StringUtf8Coder.of(), splitCoder, input.getWindowingStrategy().getWindowFn().windowCoder())); checkArgument( keyedWorkItems.getWindowingStrategy().getWindowFn() instanceof GlobalWindows, "GBKIntoKeyedWorkItems must produce a globally windowed collection, " + "but windowing strategy was: %s", keyedWorkItems.getWindowingStrategy()); return keyedWorkItems; } /** * A {@link DoFn} that forces each of its outputs to be in a single window, by indicating to the * runner that it observes the window of its input element, so the runner is forced to apply it to * each input in a single window and thus its output is also in a single window. */ private static class ExplodeWindowsFn<InputT> extends DoFn<InputT, InputT> { @ProcessElement public void process(ProcessContext c, BoundedWindow window) { c.output(c.element()); } } /** * Runner-specific primitive {@link GroupByKey GroupByKey-like} {@link PTransform} that produces * {@link KeyedWorkItem KeyedWorkItems} so that downstream transforms can access state and timers. * * <p>Unlike a real {@link GroupByKey}, ignores the input's windowing and triggering strategy and * emits output immediately. */ public static class GBKIntoKeyedWorkItems<KeyT, InputT> extends PTransform<PCollection<KV<KeyT, InputT>>, PCollection<KeyedWorkItem<KeyT, InputT>>> { @Override public PCollection<KeyedWorkItem<KeyT, InputT>> expand(PCollection<KV<KeyT, InputT>> input) { return PCollection.createPrimitiveOutputInternal( input.getPipeline(), WindowingStrategy.globalDefault(), input.isBounded()); } } /** * Runner-specific primitive {@link PTransform} that invokes the {@link DoFn.ProcessElement} * method for a splittable {@link DoFn}. */ public static class ProcessElements< InputT, OutputT, RestrictionT, TrackerT extends RestrictionTracker<RestrictionT>> extends PTransform< PCollection<? extends KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>>, PCollectionTuple> { private final DoFn<InputT, OutputT> fn; private final Coder<InputT> elementCoder; private final Coder<RestrictionT> restrictionCoder; private final WindowingStrategy<InputT, ?> windowingStrategy; private final List<PCollectionView<?>> sideInputs; private final TupleTag<OutputT> mainOutputTag; private final TupleTagList additionalOutputTags; /** * @param fn the splittable {@link DoFn}. * @param windowingStrategy the {@link WindowingStrategy} of the input collection. * @param sideInputs list of side inputs that should be available to the {@link DoFn}. * @param mainOutputTag {@link TupleTag Tag} of the {@link DoFn DoFn's} main output. * @param additionalOutputTags {@link TupleTagList Tags} of the {@link DoFn DoFn's} additional * outputs. */ public ProcessElements( DoFn<InputT, OutputT> fn, Coder<InputT> elementCoder, Coder<RestrictionT> restrictionCoder, WindowingStrategy<InputT, ?> windowingStrategy, List<PCollectionView<?>> sideInputs, TupleTag<OutputT> mainOutputTag, TupleTagList additionalOutputTags) { this.fn = fn; this.elementCoder = elementCoder; this.restrictionCoder = restrictionCoder; this.windowingStrategy = windowingStrategy; this.sideInputs = sideInputs; this.mainOutputTag = mainOutputTag; this.additionalOutputTags = additionalOutputTags; } public DoFn<InputT, OutputT> getFn() { return fn; } public List<PCollectionView<?>> getSideInputs() { return sideInputs; } public TupleTag<OutputT> getMainOutputTag() { return mainOutputTag; } public TupleTagList getAdditionalOutputTags() { return additionalOutputTags; } public ProcessFn<InputT, OutputT, RestrictionT, TrackerT> newProcessFn( DoFn<InputT, OutputT> fn) { return new SplittableParDo.ProcessFn<>( fn, elementCoder, restrictionCoder, windowingStrategy); } @Override public PCollectionTuple expand( PCollection<? extends KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>> input) { DoFnSignature signature = DoFnSignatures.getSignature(fn.getClass()); PCollectionTuple outputs = PCollectionTuple.ofPrimitiveOutputsInternal( input.getPipeline(), TupleTagList.of(mainOutputTag).and(additionalOutputTags.getAll()), windowingStrategy, input.isBounded().and(signature.isBoundedPerElement())); // Set output type descriptor similarly to how ParDo.MultiOutput does it. outputs.get(mainOutputTag).setTypeDescriptor(fn.getOutputTypeDescriptor()); return outputs; } @Override public <T> Coder<T> getDefaultOutputCoder( PCollection<? extends KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>> input, PCollection<T> output) throws CannotProvideCoderException { // Similar logic to ParDo.MultiOutput.getOutputCoder. @SuppressWarnings("unchecked") KeyedWorkItemCoder<String, ElementAndRestriction<InputT, RestrictionT>> kwiCoder = (KeyedWorkItemCoder) input.getCoder(); Coder<InputT> inputCoder = ((ElementAndRestrictionCoder<InputT, RestrictionT>) kwiCoder.getElementCoder()) .getElementCoder(); return input .getPipeline() .getCoderRegistry() .getCoder(output.getTypeDescriptor(), fn.getInputTypeDescriptor(), inputCoder); } } /** * Assigns a random unique key to each element of the input collection, so that the output * collection is effectively the same elements as input, but the per-key state and timers are now * effectively per-element. */ private static class RandomUniqueKeyFn<T> implements SerializableFunction<T, String> { @Override public String apply(T input) { return UUID.randomUUID().toString(); } } /** * Pairs each input element with its initial restriction using the given splittable {@link DoFn}. */ private static class PairWithRestrictionFn<InputT, OutputT, RestrictionT> extends DoFn<InputT, ElementAndRestriction<InputT, RestrictionT>> { private DoFn<InputT, OutputT> fn; private transient DoFnInvoker<InputT, OutputT> invoker; PairWithRestrictionFn(DoFn<InputT, OutputT> fn) { this.fn = fn; } @Setup public void setup() { invoker = DoFnInvokers.invokerFor(fn); } @ProcessElement public void processElement(ProcessContext context) { context.output( ElementAndRestriction.of( context.element(), invoker.<RestrictionT>invokeGetInitialRestriction(context.element()))); } } /** * The heart of splittable {@link DoFn} execution: processes a single (element, restriction) pair * by creating a tracker for the restriction and checkpointing/resuming processing later if * necessary. * * <p>Takes {@link KeyedWorkItem} and assumes that the KeyedWorkItem contains a single element * (or a single timer set by {@link ProcessFn itself}, in a single window. This is necessary * because {@link ProcessFn} sets timers, and timers are namespaced to a single window and it * should be the window of the input element. * * <p>See also: https://issues.apache.org/jira/browse/BEAM-1983 */ @VisibleForTesting public static class ProcessFn< InputT, OutputT, RestrictionT, TrackerT extends RestrictionTracker<RestrictionT>> extends DoFn<KeyedWorkItem<String, ElementAndRestriction<InputT, RestrictionT>>, OutputT> { /** * The state cell containing a watermark hold for the output of this {@link DoFn}. The hold is * acquired during the first {@link DoFn.ProcessElement} call for each element and restriction, * and is released when the {@link DoFn.ProcessElement} call returns and there is no residual * restriction captured by the {@link SplittableProcessElementInvoker}. * * <p>A hold is needed to avoid letting the output watermark immediately progress together with * the input watermark when the first {@link DoFn.ProcessElement} call for this element * completes. */ private static final StateTag<WatermarkHoldState> watermarkHoldTag = StateTags.makeSystemTagInternal( StateTags.<GlobalWindow>watermarkStateInternal( "hold", TimestampCombiner.LATEST)); /** * The state cell containing a copy of the element. Written during the first {@link * DoFn.ProcessElement} call and read during subsequent calls in response to timer firings, when * the original element is no longer available. */ private final StateTag<ValueState<WindowedValue<InputT>>> elementTag; /** * The state cell containing a restriction representing the unprocessed part of work for this * element. */ private StateTag<ValueState<RestrictionT>> restrictionTag; private final DoFn<InputT, OutputT> fn; private final Coder<InputT> elementCoder; private final Coder<RestrictionT> restrictionCoder; private final WindowingStrategy<InputT, ?> inputWindowingStrategy; private transient StateInternalsFactory<String> stateInternalsFactory; private transient TimerInternalsFactory<String> timerInternalsFactory; private transient SplittableProcessElementInvoker<InputT, OutputT, RestrictionT, TrackerT> processElementInvoker; private transient DoFnInvoker<InputT, OutputT> invoker; public ProcessFn( DoFn<InputT, OutputT> fn, Coder<InputT> elementCoder, Coder<RestrictionT> restrictionCoder, WindowingStrategy<InputT, ?> inputWindowingStrategy) { this.fn = fn; this.elementCoder = elementCoder; this.restrictionCoder = restrictionCoder; this.inputWindowingStrategy = inputWindowingStrategy; this.elementTag = StateTags.value( "element", WindowedValue.getFullCoder( elementCoder, inputWindowingStrategy.getWindowFn().windowCoder())); this.restrictionTag = StateTags.value("restriction", restrictionCoder); } public void setStateInternalsFactory(StateInternalsFactory<String> stateInternalsFactory) { this.stateInternalsFactory = stateInternalsFactory; } public void setTimerInternalsFactory(TimerInternalsFactory<String> timerInternalsFactory) { this.timerInternalsFactory = timerInternalsFactory; } public void setProcessElementInvoker( SplittableProcessElementInvoker<InputT, OutputT, RestrictionT, TrackerT> invoker) { this.processElementInvoker = invoker; } public DoFn<InputT, OutputT> getFn() { return fn; } public Coder<InputT> getElementCoder() { return elementCoder; } public Coder<RestrictionT> getRestrictionCoder() { return restrictionCoder; } public WindowingStrategy<InputT, ?> getInputWindowingStrategy() { return inputWindowingStrategy; } @Setup public void setup() throws Exception { invoker = DoFnInvokers.invokerFor(fn); invoker.invokeSetup(); } @Teardown public void tearDown() throws Exception { invoker.invokeTeardown(); } @StartBundle public void startBundle(StartBundleContext c) throws Exception { invoker.invokeStartBundle(wrapContextAsStartBundle(c)); } @FinishBundle public void finishBundle(FinishBundleContext c) throws Exception { invoker.invokeFinishBundle(wrapContextAsFinishBundle(c)); } @ProcessElement public void processElement(final ProcessContext c) { String key = c.element().key(); StateInternals stateInternals = stateInternalsFactory.stateInternalsForKey(key); TimerInternals timerInternals = timerInternalsFactory.timerInternalsForKey(key); // Initialize state (element and restriction) depending on whether this is the seed call. // The seed call is the first call for this element, which actually has the element. // Subsequent calls are timer firings and the element has to be retrieved from the state. TimerInternals.TimerData timer = Iterables.getOnlyElement(c.element().timersIterable(), null); boolean isSeedCall = (timer == null); StateNamespace stateNamespace; if (isSeedCall) { WindowedValue<ElementAndRestriction<InputT, RestrictionT>> windowedValue = Iterables.getOnlyElement(c.element().elementsIterable()); BoundedWindow window = Iterables.getOnlyElement(windowedValue.getWindows()); stateNamespace = StateNamespaces.window( (Coder<BoundedWindow>) inputWindowingStrategy.getWindowFn().windowCoder(), window); } else { stateNamespace = timer.getNamespace(); } ValueState<WindowedValue<InputT>> elementState = stateInternals.state(stateNamespace, elementTag); ValueState<RestrictionT> restrictionState = stateInternals.state(stateNamespace, restrictionTag); WatermarkHoldState holdState = stateInternals.state(stateNamespace, watermarkHoldTag); ElementAndRestriction<WindowedValue<InputT>, RestrictionT> elementAndRestriction; if (isSeedCall) { WindowedValue<ElementAndRestriction<InputT, RestrictionT>> windowedValue = Iterables.getOnlyElement(c.element().elementsIterable()); WindowedValue<InputT> element = windowedValue.withValue(windowedValue.getValue().element()); elementState.write(element); elementAndRestriction = ElementAndRestriction.of(element, windowedValue.getValue().restriction()); } else { // This is not the first ProcessElement call for this element/restriction - rather, // this is a timer firing, so we need to fetch the element and restriction from state. elementState.readLater(); restrictionState.readLater(); elementAndRestriction = ElementAndRestriction.of(elementState.read(), restrictionState.read()); } final TrackerT tracker = invoker.invokeNewTracker(elementAndRestriction.restriction()); SplittableProcessElementInvoker<InputT, OutputT, RestrictionT, TrackerT>.Result result = processElementInvoker.invokeProcessElement( invoker, elementAndRestriction.element(), tracker); // Save state for resuming. if (result.getResidualRestriction() == null) { // All work for this element/restriction is completed. Clear state and release hold. elementState.clear(); restrictionState.clear(); holdState.clear(); return; } restrictionState.write(result.getResidualRestriction()); Instant futureOutputWatermark = result.getFutureOutputWatermark(); if (futureOutputWatermark == null) { futureOutputWatermark = elementAndRestriction.element().getTimestamp(); } holdState.add(futureOutputWatermark); // Set a timer to continue processing this element. timerInternals.setTimer( TimerInternals.TimerData.of( stateNamespace, timerInternals.currentProcessingTime(), TimeDomain.PROCESSING_TIME)); } private DoFn<InputT, OutputT>.StartBundleContext wrapContextAsStartBundle( final StartBundleContext baseContext) { return fn.new StartBundleContext() { @Override public PipelineOptions getPipelineOptions() { return baseContext.getPipelineOptions(); } private void throwUnsupportedOutput() { throw new UnsupportedOperationException( String.format( "Splittable DoFn can only output from @%s", ProcessElement.class.getSimpleName())); } }; } private DoFn<InputT, OutputT>.FinishBundleContext wrapContextAsFinishBundle( final FinishBundleContext baseContext) { return fn.new FinishBundleContext() { @Override public void output(OutputT output, Instant timestamp, BoundedWindow window) { throwUnsupportedOutput(); } @Override public <T> void output(TupleTag<T> tag, T output, Instant timestamp, BoundedWindow window) { throwUnsupportedOutput(); } @Override public PipelineOptions getPipelineOptions() { return baseContext.getPipelineOptions(); } private void throwUnsupportedOutput() { throw new UnsupportedOperationException( String.format( "Splittable DoFn can only output from @%s", ProcessElement.class.getSimpleName())); } }; } } /** Splits the restriction using the given {@link DoFn.SplitRestriction} method. */ private static class SplitRestrictionFn<InputT, RestrictionT> extends DoFn< ElementAndRestriction<InputT, RestrictionT>, ElementAndRestriction<InputT, RestrictionT>> { private final DoFn<InputT, ?> splittableFn; private transient DoFnInvoker<InputT, ?> invoker; SplitRestrictionFn(DoFn<InputT, ?> splittableFn) { this.splittableFn = splittableFn; } @Setup public void setup() { invoker = DoFnInvokers.invokerFor(splittableFn); } @ProcessElement public void processElement(final ProcessContext c) { final InputT element = c.element().element(); invoker.invokeSplitRestriction( element, c.element().restriction(), new OutputReceiver<RestrictionT>() { @Override public void output(RestrictionT part) { c.output(ElementAndRestriction.of(element, part)); } }); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.completion.util.MethodParenthesesHandler; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.JavaElementLookupRenderer; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.openapi.editor.Document; import com.intellij.openapi.util.ClassConditionKey; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * @author peter */ public class JavaMethodCallElement extends LookupItem<PsiMethod> implements TypedLookupItem, StaticallyImportable { public static final ClassConditionKey<JavaMethodCallElement> CLASS_CONDITION_KEY = ClassConditionKey.create(JavaMethodCallElement.class); @Nullable private final PsiClass myContainingClass; private final PsiMethod myMethod; private final MemberLookupHelper myHelper; private PsiSubstitutor myQualifierSubstitutor = PsiSubstitutor.EMPTY; private PsiSubstitutor myInferenceSubstitutor = PsiSubstitutor.EMPTY; private boolean myMayNeedExplicitTypeParameters; public JavaMethodCallElement(@NotNull PsiMethod method) { this(method, method.getName()); } public JavaMethodCallElement(@NotNull PsiMethod method, String methodName) { super(method, methodName); myMethod = method; myHelper = null; myContainingClass = method.getContainingClass(); } public JavaMethodCallElement(PsiMethod method, boolean shouldImportStatic, boolean mergedOverloads) { super(method, method.getName()); myMethod = method; myContainingClass = method.getContainingClass(); myHelper = new MemberLookupHelper(method, myContainingClass, shouldImportStatic, mergedOverloads); if (!shouldImportStatic) { if (myContainingClass != null) { String className = myContainingClass.getName(); if (className != null) { addLookupStrings(className + "." + myMethod.getName()); } } } } @Override public PsiType getType() { return getSubstitutor().substitute(getInferenceSubstitutor().substitute(getObject().getReturnType())); } public void setInferenceSubstitutor(@NotNull final PsiSubstitutor substitutor, PsiElement place) { myInferenceSubstitutor = substitutor; myMayNeedExplicitTypeParameters = mayNeedTypeParameters(place); } public JavaMethodCallElement setQualifierSubstitutor(@NotNull PsiSubstitutor qualifierSubstitutor) { myQualifierSubstitutor = qualifierSubstitutor; return this; } @NotNull public PsiSubstitutor getSubstitutor() { return myQualifierSubstitutor; } @NotNull public PsiSubstitutor getInferenceSubstitutor() { return myInferenceSubstitutor; } @Override public void setShouldBeImported(boolean shouldImportStatic) { myHelper.setShouldBeImported(shouldImportStatic); } @Override public boolean canBeImported() { return myHelper != null; } @Override public boolean willBeImported() { return canBeImported() && myHelper.willBeImported(); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof JavaMethodCallElement)) return false; if (!super.equals(o)) return false; return myInferenceSubstitutor.equals(((JavaMethodCallElement)o).myInferenceSubstitutor); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + myInferenceSubstitutor.hashCode(); return result; } @Override public void handleInsert(InsertionContext context) { final Document document = context.getDocument(); final PsiFile file = context.getFile(); final PsiMethod method = getObject(); final LookupElement[] allItems = context.getElements(); final boolean overloadsMatter = allItems.length == 1 && getUserData(JavaCompletionUtil.FORCE_SHOW_SIGNATURE_ATTR) == null; final boolean hasParams = MethodParenthesesHandler.hasParams(this, allItems, overloadsMatter, method); JavaCompletionUtil.insertParentheses(context, this, overloadsMatter, hasParams); final int startOffset = context.getStartOffset(); final OffsetKey refStart = context.trackOffset(startOffset, true); if (shouldInsertTypeParameters() && mayNeedTypeParameters(context.getFile().findElementAt(context.getStartOffset()))) { qualifyMethodCall(file, startOffset, document); insertExplicitTypeParameters(context, refStart); } else if (myHelper != null) { context.commitDocument(); if (willBeImported()) { final PsiReferenceExpression ref = PsiTreeUtil.findElementOfClassAtOffset(file, startOffset, PsiReferenceExpression.class, false); if (ref != null && myContainingClass != null && !ref.isReferenceTo(method)) { ref.bindToElementViaStaticImport(myContainingClass); } return; } qualifyMethodCall(file, startOffset, document); } final PsiType type = method.getReturnType(); if (context.getCompletionChar() == '!' && type != null && PsiType.BOOLEAN.isAssignableFrom(type)) { context.setAddCompletionChar(false); context.commitDocument(); final int offset = context.getOffset(refStart); final PsiMethodCallExpression methodCall = PsiTreeUtil.findElementOfClassAtOffset(file, offset, PsiMethodCallExpression.class, false); if (methodCall != null) { FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EXCLAMATION_FINISH); document.insertString(methodCall.getTextRange().getStartOffset(), "!"); } } } private boolean shouldInsertTypeParameters() { return myMayNeedExplicitTypeParameters && !getInferenceSubstitutor().equals(PsiSubstitutor.EMPTY) && myMethod.getParameterList().getParametersCount() == 0; } public static boolean mayNeedTypeParameters(@Nullable final PsiElement leaf) { if (PsiTreeUtil.getParentOfType(leaf, PsiExpressionList.class, true, PsiCodeBlock.class, PsiModifierListOwner.class) == null) { if (PsiTreeUtil.getParentOfType(leaf, PsiConditionalExpression.class, true, PsiCodeBlock.class, PsiModifierListOwner.class) == null) { return false; } } if (PsiUtil.getLanguageLevel(leaf).isAtLeast(LanguageLevel.JDK_1_8)) return false; final PsiElement parent = leaf.getParent(); if (parent instanceof PsiReferenceExpression && ((PsiReferenceExpression)parent).getTypeParameters().length > 0) { return false; } return true; } private void insertExplicitTypeParameters(InsertionContext context, OffsetKey refStart) { context.commitDocument(); final String typeParams = getTypeParamsText(false); if (typeParams != null) { context.getDocument().insertString(context.getOffset(refStart), typeParams); JavaCompletionUtil.shortenReference(context.getFile(), context.getOffset(refStart)); } } private void qualifyMethodCall(PsiFile file, final int startOffset, final Document document) { final PsiReference reference = file.findReferenceAt(startOffset); if (reference instanceof PsiReferenceExpression && ((PsiReferenceExpression)reference).isQualified()) { return; } final PsiMethod method = getObject(); if (!method.hasModifierProperty(PsiModifier.STATIC)) { document.insertString(startOffset, "this."); return; } if (myContainingClass == null) return; document.insertString(startOffset, "."); JavaCompletionUtil.insertClassReference(myContainingClass, file, startOffset); } @Nullable private String getTypeParamsText(boolean presentable) { final PsiMethod method = getObject(); final PsiSubstitutor substitutor = getInferenceSubstitutor(); final PsiTypeParameter[] parameters = method.getTypeParameters(); assert parameters.length > 0; final StringBuilder builder = new StringBuilder("<"); boolean first = true; for (final PsiTypeParameter parameter : parameters) { if (!first) builder.append(", "); first = false; PsiType type = substitutor.substitute(parameter); if (type instanceof PsiWildcardType) { type = ((PsiWildcardType)type).getExtendsBound(); } if (type == null || type instanceof PsiCapturedWildcardType) return null; if (type.equals(TypeConversionUtil.typeParameterErasure(parameter))) return null; final String text = presentable ? type.getPresentableText() : type.getCanonicalText(); if (text.indexOf('?') >= 0) return null; builder.append(text); } return builder.append(">").toString(); } @Override public boolean isValid() { return super.isValid() && myInferenceSubstitutor.isValid() && getSubstitutor().isValid(); } @Override public void renderElement(LookupElementPresentation presentation) { presentation.setIcon(DefaultLookupItemRenderer.getRawIcon(this, presentation.isReal())); presentation.setStrikeout(JavaElementLookupRenderer.isToStrikeout(this)); MemberLookupHelper helper = myHelper != null ? myHelper : new MemberLookupHelper(myMethod, myContainingClass, false, false); helper.renderElement(presentation, myHelper != null, myHelper != null && !myHelper.willBeImported(), getSubstitutor()); if (shouldInsertTypeParameters()) { String typeParamsText = getTypeParamsText(true); if (typeParamsText != null) { if (typeParamsText.length() > 10) { typeParamsText = typeParamsText.substring(0, 10) + "...>"; } String itemText = presentation.getItemText(); assert itemText != null; int i = itemText.indexOf('.'); if (i > 0) { presentation.setItemText(itemText.substring(0, i + 1) + typeParamsText + itemText.substring(i + 1)); } } } } }
/* * Copyright (C) 2011 htbest2000@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.htbest2000.v2ex.util; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.regex.Matcher; import java.util.regex.Pattern; import android.content.Context; import android.database.Cursor; import android.util.Log; import com.htbest2000.v2ex.api.Topics; import com.htbest2000.v2ex.provider.Database; public class Misc { private static Pattern sPatNumber = Pattern.compile("([1-9][0-9]*)"); static public long extractNumber( String s ) { Matcher m = sPatNumber.matcher(s); if (null != m && m.find()) { return Integer.parseInt( m.group(1) ); } return -1; } public static Cursor getTopicsCursor(Context context) { return Database.getInstance(context).query( Database.TABLE_TOPICS, null, null, null, null, null, "last_modified DESC"); } public static Cursor getAllNodesCursor(Context context) { return Database.getInstance(context).query( Database.TABLE_NODES, null, null, null, null, null, null); } public static Topics.Topic getTopic( Context context, long id ) { Cursor cursor = null; try { cursor = Database.getInstance(context).query( Database.TABLE_TOPICS, null, Database.Columns.Topics._ID + "=" + id, null, null, null, null); if (null != cursor && 0 != cursor.getCount()) { cursor.moveToFirst(); Topics.Topic topic = new Topics.Topic(); topic.id = cursor.getLong(Database.Columns.Topics.ID_TOPTIC_ID); topic.title = cursor.getString(Database.Columns.Topics.ID_TITLE); topic.url = cursor.getString(Database.Columns.Topics.ID_URL); topic.content = cursor.getString(Database.Columns.Topics.ID_CONTENT); topic.content_rendered = cursor.getString(Database.Columns.Topics.ID_CONTENT_RENDERED); topic.replies = cursor.getShort(Database.Columns.Topics.ID_REPLIES); topic.member_id = cursor.getLong(Database.Columns.Topics.ID_MEMBER_ID); topic.member_username = cursor.getString(Database.Columns.Topics.ID_MEMBER_NAME); topic.node_id = cursor.getLong(Database.Columns.Topics.ID_NODE_ID); topic.node_name = cursor.getString(Database.Columns.Topics.ID_NODE_NAME); topic.node_title = cursor.getString(Database.Columns.Topics.ID_NODE_TITLE); topic.node_title_alternative = cursor.getString(Database.Columns.Topics.ID_NODE_TITLE_ALTERNATIVE); topic.node_topics = cursor.getInt(Database.Columns.Topics.ID_NODE_TOPICS); topic.node_url = cursor.getString(Database.Columns.Topics.ID_NODE_URL); return topic; } else { return null; } } finally { if (null!= cursor) cursor.close(); } } static long getTopicId( Context context, long id ) { Cursor cursor = null; long ret = -1; try { cursor = Database.getInstance(context).query( Database.TABLE_TOPICS, null, Database.Columns.Topics._ID + "=" + id, null, null, null, null); if (null != cursor && 0 != cursor.getCount()) { cursor.moveToFirst(); ret = cursor.getLong(Database.Columns.Topics.ID_TOPTIC_ID); } } finally { if (null!= cursor) cursor.close(); } return ret; } /** * Get the account of specified record id * @param id db record id */ public static long getTopicCount(Context context, long id) { Cursor cursor = null; long ret = -1; try { cursor = Database.getInstance(context).query( Database.TABLE_TOPICS, null, Database.Columns.Topics.NAME_TOPTIC_ID + "=" + id, null, null, null, null); if (null != cursor) { ret = cursor.getCount(); } } finally { closeCursor(cursor); } return ret; } /** * get node count by node_id * @param node_id * <b>NOTE</b> node_id is NOT _id */ public static long getNodeCount(Context context, long node_id) { Cursor cursor = null; long cnt = -1; try { cursor = Database.getInstance(context).query( Database.TABLE_NODES, null, Database.Columns.Nodes.NAME_NODE_ID + "=" + node_id, null, null, null, null); if (null != cursor) { cnt = cursor.getCount(); } } finally { closeCursor(cursor); } return cnt; } public static void dump(String tag, BufferedInputStream in) { try { InputStreamReader buffer = new InputStreamReader(in); BufferedReader buffer2 = new BufferedReader(buffer); BufferedReader reader = new BufferedReader(buffer2); while (true) { String line = reader.readLine(); if (null == line) break; Log.i("=ht=", tag + ": " + line); } } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static void closeCursor( Cursor cur ) { if (null != cur) cur.close(); } final static long YEAR = 60 * 60 * 24 * 365; final static long MONTH = 60 * 60 * 24 * 30; final static long WEEK = 60 * 60 * 24 * 6; final static long DAY = 60 * 60 * 24; final static long HOUR = 60 * 60; final static long MIN = 60; public static String formatRelativeTime(long interval) { String ret = ""; if (interval >= YEAR) { ret = "" + interval / YEAR + " year ago"; } else if (interval >= MONTH){ ret = "" + interval / MONTH + " month ago"; } else if (interval >= WEEK) { ret = "" + interval / WEEK + " week ago"; } else if (interval >= DAY) { ret = "" + interval / DAY + " day ago"; } else if (interval >= HOUR) { ret = "" + interval / HOUR + " hour ago"; } else if (interval >= MIN) { ret = "" + interval / MIN + " min ago"; } return ret; } }
/* * Copyright 2003-2006 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.psiutils; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; public class EquivalenceChecker{ private EquivalenceChecker(){} public static boolean modifierListsAreEquivalent( @Nullable PsiModifierList list1, @Nullable PsiModifierList list2) { if (list1 == null) { return list2 == null; } else if (list2 == null) { return false; } final PsiAnnotation[] annotations = list1.getAnnotations(); for (PsiAnnotation annotation : annotations) { final String qualifiedName = annotation.getQualifiedName(); if (qualifiedName == null) { return false; } if (list2.findAnnotation(qualifiedName) == null) { return false; } } if (list1.hasModifierProperty(PsiModifier.ABSTRACT) && !list2.hasModifierProperty(PsiModifier.ABSTRACT)) { return false; } if (list1.hasModifierProperty(PsiModifier.FINAL) && !list2.hasModifierProperty(PsiModifier.FINAL)) { return false; } if (list1.hasModifierProperty(PsiModifier.NATIVE) && !list2.hasModifierProperty(PsiModifier.NATIVE)) { return false; } if (list1.hasModifierProperty(PsiModifier.PACKAGE_LOCAL) && !list2.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) { return false; } if (list1.hasModifierProperty(PsiModifier.PRIVATE) && !list2.hasModifierProperty(PsiModifier.PRIVATE)) { return false; } if (list1.hasModifierProperty(PsiModifier.PROTECTED) && !list2.hasModifierProperty(PsiModifier.PROTECTED)) { return false; } if (list1.hasModifierProperty(PsiModifier.PUBLIC) && !list2.hasModifierProperty(PsiModifier.PUBLIC)) { return false; } if (list1.hasModifierProperty(PsiModifier.STATIC) && !list2.hasModifierProperty(PsiModifier.STATIC)) { return false; } if (list1.hasModifierProperty(PsiModifier.STRICTFP) && !list2.hasModifierProperty(PsiModifier.STRICTFP)) { return false; } if (list1.hasModifierProperty(PsiModifier.SYNCHRONIZED) && !list2.hasModifierProperty(PsiModifier.SYNCHRONIZED)) { return false; } if (list1.hasModifierProperty(PsiModifier.TRANSIENT) && !list2.hasModifierProperty(PsiModifier.TRANSIENT)) { return false; } return !(list1.hasModifierProperty(PsiModifier.VOLATILE) && !list2.hasModifierProperty(PsiModifier.VOLATILE)); } public static boolean statementsAreEquivalent( @Nullable PsiStatement statement1, @Nullable PsiStatement statement2) { if(statement1 == null && statement2 == null){ return true; } if(statement1 == null || statement2 == null){ return false; } if(statement1.getClass() != statement2.getClass()){ return false; } if(statement1 instanceof PsiAssertStatement){ final PsiAssertStatement assertStatement1 = (PsiAssertStatement)statement1; final PsiAssertStatement assertStatement2 = (PsiAssertStatement)statement2; return assertStatementsAreEquivalent(assertStatement1, assertStatement2); } if(statement1 instanceof PsiBlockStatement){ final PsiBlockStatement blockStatement1 = (PsiBlockStatement)statement1; final PsiBlockStatement blockStatement2 = (PsiBlockStatement)statement2; return blockStatementsAreEquivalent(blockStatement1, blockStatement2); } if(statement1 instanceof PsiBreakStatement){ final PsiBreakStatement breakStatement1 = (PsiBreakStatement)statement1; final PsiBreakStatement breakStatement2 = (PsiBreakStatement)statement2; return breakStatementsAreEquivalent(breakStatement1, breakStatement2); } if(statement1 instanceof PsiContinueStatement){ final PsiContinueStatement continueStatement1 = (PsiContinueStatement)statement1; final PsiContinueStatement continueStatement2 = (PsiContinueStatement)statement2; return continueStatementsAreEquivalent(continueStatement1, continueStatement2); } if(statement1 instanceof PsiDeclarationStatement){ final PsiDeclarationStatement declarationStatement1 = (PsiDeclarationStatement)statement1; final PsiDeclarationStatement declarationStatement2 = (PsiDeclarationStatement)statement2; return declarationStatementsAreEquivalent(declarationStatement1, declarationStatement2); } if(statement1 instanceof PsiDoWhileStatement){ final PsiDoWhileStatement doWhileStatement1 = (PsiDoWhileStatement)statement1; final PsiDoWhileStatement doWhileStatement2 = (PsiDoWhileStatement)statement2; return doWhileStatementsAreEquivalent( doWhileStatement1, doWhileStatement2); } if(statement1 instanceof PsiEmptyStatement){ return true; } if(statement1 instanceof PsiExpressionListStatement){ final PsiExpressionListStatement expressionListStatement1 = (PsiExpressionListStatement)statement1; final PsiExpressionListStatement expressionListStatement2 = (PsiExpressionListStatement)statement2; return expressionListStatementsAreEquivalent( expressionListStatement1, expressionListStatement2); } if(statement1 instanceof PsiExpressionStatement){ final PsiExpressionStatement expressionStatement1 = (PsiExpressionStatement)statement1; final PsiExpressionStatement expressionStatement2 = (PsiExpressionStatement)statement2; return expressionStatementsAreEquivalent( expressionStatement1, expressionStatement2); } if(statement1 instanceof PsiForStatement){ final PsiForStatement forStatement1 = (PsiForStatement)statement1; final PsiForStatement forStatement2 = (PsiForStatement)statement2; return forStatementsAreEquivalent(forStatement1, forStatement2); } if(statement1 instanceof PsiForeachStatement){ final PsiForeachStatement forEachStatement1 = (PsiForeachStatement)statement1; final PsiForeachStatement forEachStatement2 = (PsiForeachStatement)statement2; return forEachStatementsAreEquivalent(forEachStatement1, forEachStatement2); } if(statement1 instanceof PsiIfStatement){ return ifStatementsAreEquivalent( (PsiIfStatement) statement1, (PsiIfStatement) statement2); } if(statement1 instanceof PsiLabeledStatement){ final PsiLabeledStatement labeledStatement1 = (PsiLabeledStatement)statement1; final PsiLabeledStatement labeledStatement2 = (PsiLabeledStatement)statement2; return labeledStatementsAreEquivalent(labeledStatement1, labeledStatement2); } if(statement1 instanceof PsiReturnStatement){ final PsiReturnStatement returnStatement1 = (PsiReturnStatement)statement1; final PsiReturnStatement returnStatement2 = (PsiReturnStatement)statement2; return returnStatementsAreEquivalent(returnStatement1, returnStatement2); } if(statement1 instanceof PsiSwitchStatement){ final PsiSwitchStatement switchStatement1 = (PsiSwitchStatement)statement1; final PsiSwitchStatement switchStatement2 = (PsiSwitchStatement)statement2; return switchStatementsAreEquivalent(switchStatement1, switchStatement2); } if(statement1 instanceof PsiSwitchLabelStatement){ final PsiSwitchLabelStatement switchLabelStatement1 = (PsiSwitchLabelStatement)statement1; final PsiSwitchLabelStatement switchLabelStatement2 = (PsiSwitchLabelStatement)statement2; return switchLabelStatementsAreEquivalent(switchLabelStatement1, switchLabelStatement2); } if(statement1 instanceof PsiSynchronizedStatement){ final PsiSynchronizedStatement synchronizedStatement1 = (PsiSynchronizedStatement)statement1; final PsiSynchronizedStatement synchronizedStatement2 = (PsiSynchronizedStatement)statement2; return synchronizedStatementsAreEquivalent( synchronizedStatement1, synchronizedStatement2); } if(statement1 instanceof PsiThrowStatement){ final PsiThrowStatement throwStatement1 = (PsiThrowStatement)statement1; final PsiThrowStatement throwStatement2 = (PsiThrowStatement)statement2; return throwStatementsAreEquivalent(throwStatement1, throwStatement2); } if(statement1 instanceof PsiTryStatement){ final PsiTryStatement tryStatement1 = (PsiTryStatement)statement1; final PsiTryStatement tryStatement2 = (PsiTryStatement)statement2; return tryStatementsAreEquivalent(tryStatement1, tryStatement2); } if(statement1 instanceof PsiWhileStatement){ final PsiWhileStatement whileStatement1 = (PsiWhileStatement)statement1; final PsiWhileStatement whileStatement2 = (PsiWhileStatement)statement2; return whileStatementsAreEquivalent(whileStatement1, whileStatement2); } final String text1 = statement1.getText(); final String text2 = statement2.getText(); return text1.equals(text2); } private static boolean declarationStatementsAreEquivalent( @NotNull PsiDeclarationStatement statement1, @NotNull PsiDeclarationStatement statement2){ final PsiElement[] elements1 = statement1.getDeclaredElements(); final List<PsiLocalVariable> vars1 = new ArrayList<PsiLocalVariable>(elements1.length); for(PsiElement anElement : elements1){ if(anElement instanceof PsiLocalVariable){ vars1.add((PsiLocalVariable) anElement); } } final PsiElement[] elements2 = statement2.getDeclaredElements(); final List<PsiLocalVariable> vars2 = new ArrayList<PsiLocalVariable>(elements2.length); for(PsiElement anElement : elements2){ if(anElement instanceof PsiLocalVariable){ vars2.add((PsiLocalVariable) anElement); } } final int size = vars1.size(); if(size != vars2.size()){ return false; } for(int i = 0; i < size; i++){ final PsiLocalVariable var1 = vars1.get(i); final PsiLocalVariable var2 = vars2.get(i); if(!localVariableAreEquivalent(var1, var2)){ return false; } } return true; } private static boolean localVariableAreEquivalent( @NotNull PsiLocalVariable var1, @NotNull PsiLocalVariable var2) { final PsiType type1 = var1.getType(); final PsiType type2 = var2.getType(); if(!typesAreEquivalent(type1, type2)){ return false; } final String name1 = var1.getName(); final String name2 = var2.getName(); if (!name1.equals(name2)) { return false; } final PsiExpression initializer1 = var1.getInitializer(); final PsiExpression initializer2 = var2.getInitializer(); return expressionsAreEquivalent(initializer1, initializer2); } private static boolean tryStatementsAreEquivalent( @NotNull PsiTryStatement statement1, @NotNull PsiTryStatement statement2) { final PsiCodeBlock tryBlock1 = statement1.getTryBlock(); final PsiCodeBlock tryBlock2 = statement2.getTryBlock(); if(!codeBlocksAreEquivalent(tryBlock1, tryBlock2)){ return false; } final PsiCodeBlock finallyBlock1 = statement1.getFinallyBlock(); final PsiCodeBlock finallyBlock2 = statement2.getFinallyBlock(); if(!codeBlocksAreEquivalent(finallyBlock1, finallyBlock2)){ return false; } final PsiCodeBlock[] catchBlocks1 = statement1.getCatchBlocks(); final PsiCodeBlock[] catchBlocks2 = statement2.getCatchBlocks(); if(catchBlocks1.length != catchBlocks2.length){ return false; } for(int i = 0; i < catchBlocks2.length; i++){ if(!codeBlocksAreEquivalent(catchBlocks1[i], catchBlocks2[i])){ return false; } } final PsiParameter[] catchParameters1 = statement1.getCatchBlockParameters(); final PsiParameter[] catchParameters2 = statement2.getCatchBlockParameters(); if(catchParameters1.length != catchParameters2.length){ return false; } for(int i = 0; i < catchParameters2.length; i++){ if(!parametersAreEquivalent(catchParameters2[i], catchParameters1[i])){ return false; } } return true; } private static boolean parametersAreEquivalent( @NotNull PsiParameter parameter1, @NotNull PsiParameter parameter2) { final PsiType type1 = parameter1.getType(); final PsiType type2 = parameter2.getType(); if(!typesAreEquivalent(type1, type2)){ return false; } final String name1 = parameter1.getName(); final String name2 = parameter2.getName(); return name1.equals(name2); } public static boolean typesAreEquivalent( @Nullable PsiType type1, @Nullable PsiType type2){ if(type1 == null){ return type2 == null; } if(type2 == null){ return false; } final String type1Text = type1.getCanonicalText(); final String type2Text = type2.getCanonicalText(); return type1Text.equals(type2Text); } private static boolean whileStatementsAreEquivalent( @NotNull PsiWhileStatement statement1, @NotNull PsiWhileStatement statement2){ final PsiExpression condition1 = statement1.getCondition(); final PsiExpression condition2 = statement2.getCondition(); final PsiStatement body1 = statement1.getBody(); final PsiStatement body2 = statement2.getBody(); return expressionsAreEquivalent(condition1, condition2) && statementsAreEquivalent(body1, body2); } private static boolean forStatementsAreEquivalent( @NotNull PsiForStatement statement1, @NotNull PsiForStatement statement2) { final PsiExpression condition1 = statement1.getCondition(); final PsiExpression condition2 = statement2.getCondition(); if(!expressionsAreEquivalent(condition1, condition2)){ return false; } final PsiStatement initialization1 = statement1.getInitialization(); final PsiStatement initialization2 = statement2.getInitialization(); if(!statementsAreEquivalent(initialization1, initialization2)){ return false; } final PsiStatement update1 = statement1.getUpdate(); final PsiStatement update2 = statement2.getUpdate(); if(!statementsAreEquivalent(update1, update2)){ return false; } final PsiStatement body1 = statement1.getBody(); final PsiStatement body2 = statement2.getBody(); return statementsAreEquivalent(body1, body2); } private static boolean forEachStatementsAreEquivalent( @NotNull PsiForeachStatement statement1, @NotNull PsiForeachStatement statement2) { final PsiExpression value1 = statement1.getIteratedValue(); final PsiExpression value2 = statement2.getIteratedValue(); if(!expressionsAreEquivalent(value1, value2)){ return false; } final PsiParameter parameter1 = statement1.getIterationParameter(); final PsiParameter parameter2 = statement1.getIterationParameter(); final String name1 = parameter1.getName(); final String name2 = parameter2.getName(); if (!name1.equals(name2)){ return false; } final PsiType type1 = parameter1.getType(); if(!type1.equals(parameter2.getType())){ return false; } final PsiStatement body1 = statement1.getBody(); final PsiStatement body2 = statement2.getBody(); return statementsAreEquivalent(body1, body2); } private static boolean switchStatementsAreEquivalent( @NotNull PsiSwitchStatement statement1, @NotNull PsiSwitchStatement statement2) { final PsiExpression switchExpression1 = statement1.getExpression(); final PsiExpression swithcExpression2 = statement2.getExpression(); final PsiCodeBlock body1 = statement1.getBody(); final PsiCodeBlock body2 = statement2.getBody(); return expressionsAreEquivalent(switchExpression1, swithcExpression2) && codeBlocksAreEquivalent(body1, body2); } private static boolean doWhileStatementsAreEquivalent( @NotNull PsiDoWhileStatement statement1, @NotNull PsiDoWhileStatement statement2) { final PsiExpression condition1 = statement1.getCondition(); final PsiExpression condition2 = statement2.getCondition(); final PsiStatement body1 = statement1.getBody(); final PsiStatement body2 = statement2.getBody(); return expressionsAreEquivalent(condition1, condition2) && statementsAreEquivalent(body1, body2); } private static boolean assertStatementsAreEquivalent( @NotNull PsiAssertStatement statement1, @NotNull PsiAssertStatement statement2) { final PsiExpression condition1 = statement1.getAssertCondition(); final PsiExpression condition2 = statement2.getAssertCondition(); final PsiExpression description1 = statement1.getAssertDescription(); final PsiExpression description2 = statement2.getAssertDescription(); return expressionsAreEquivalent(condition1, condition2) && expressionsAreEquivalent(description1, description2); } private static boolean synchronizedStatementsAreEquivalent( @NotNull PsiSynchronizedStatement statement1, @NotNull PsiSynchronizedStatement statement2) { final PsiExpression lock1 = statement1.getLockExpression(); final PsiExpression lock2 = statement2.getLockExpression(); final PsiCodeBlock body1 = statement1.getBody(); final PsiCodeBlock body2 = statement2.getBody(); return expressionsAreEquivalent(lock1, lock2) && codeBlocksAreEquivalent(body1, body2); } private static boolean blockStatementsAreEquivalent( @NotNull PsiBlockStatement statement1, @NotNull PsiBlockStatement statement2){ final PsiCodeBlock block1 = statement1.getCodeBlock(); final PsiCodeBlock block2 = statement2.getCodeBlock(); return codeBlocksAreEquivalent(block1, block2); } private static boolean breakStatementsAreEquivalent( @NotNull PsiBreakStatement statement1, @NotNull PsiBreakStatement statement2) { final PsiIdentifier identifier1 = statement1.getLabelIdentifier(); final PsiIdentifier identifier2 = statement2.getLabelIdentifier(); if(identifier1 == null){ return identifier2 == null; } if(identifier2 == null){ return false; } final String text1 = identifier1.getText(); final String text2 = identifier2.getText(); return text1.equals(text2); } private static boolean continueStatementsAreEquivalent( @NotNull PsiContinueStatement statement1, @NotNull PsiContinueStatement statement2) { final PsiIdentifier identifier1 = statement1.getLabelIdentifier(); final PsiIdentifier identifier2 = statement2.getLabelIdentifier(); if(identifier1 == null){ return identifier2 == null; } if(identifier2 == null){ return false; } final String text1 = identifier1.getText(); final String text2 = identifier2.getText(); return text1.equals(text2); } private static boolean switchLabelStatementsAreEquivalent( @NotNull PsiSwitchLabelStatement statement1, @NotNull PsiSwitchLabelStatement statement2) { if (statement1.isDefaultCase()){ return statement2.isDefaultCase(); } if(statement2.isDefaultCase()){ return false; } final PsiExpression caseExpression1 = statement1.getCaseValue(); final PsiExpression caseExpression2 = statement2.getCaseValue(); return expressionsAreEquivalent(caseExpression1, caseExpression2); } private static boolean labeledStatementsAreEquivalent( @NotNull PsiLabeledStatement statement1, @NotNull PsiLabeledStatement statement2) { final PsiIdentifier identifier1 = statement1.getLabelIdentifier(); final PsiIdentifier identifier2 = statement2.getLabelIdentifier(); final String text1 = identifier1.getText(); final String text2 = identifier2.getText(); return text1.equals(text2); } public static boolean codeBlocksAreEquivalent( @Nullable PsiCodeBlock block1, @Nullable PsiCodeBlock block2) { if (block1 == null && block2 == null){ return true; } if(block1 == null || block2 == null){ return false; } final PsiStatement[] statements1 = block1.getStatements(); final PsiStatement[] statements2 = block2.getStatements(); if(statements2.length != statements1.length){ return false; } for(int i = 0; i < statements2.length; i++){ if(!statementsAreEquivalent(statements2[i], statements1[i])){ return false; } } return true; } private static boolean ifStatementsAreEquivalent( @NotNull PsiIfStatement statement1, @NotNull PsiIfStatement statement2) { final PsiExpression condition1 = statement1.getCondition(); final PsiExpression condition2 = statement2.getCondition(); final PsiStatement thenBranch1 = statement1.getThenBranch(); final PsiStatement thenBranch2 = statement2.getThenBranch(); final PsiStatement elseBranch1 = statement1.getElseBranch(); final PsiStatement elseBranch2 = statement2.getElseBranch(); return expressionsAreEquivalent(condition1, condition2) && statementsAreEquivalent(thenBranch1, thenBranch2) && statementsAreEquivalent(elseBranch1, elseBranch2); } private static boolean expressionStatementsAreEquivalent( @NotNull PsiExpressionStatement statement1, @NotNull PsiExpressionStatement statement2) { final PsiExpression expression1 = statement1.getExpression(); final PsiExpression expression2 = statement2.getExpression(); return expressionsAreEquivalent(expression1, expression2); } private static boolean returnStatementsAreEquivalent( @NotNull PsiReturnStatement statement1, @NotNull PsiReturnStatement statement2) { final PsiExpression returnValue1 = statement1.getReturnValue(); final PsiExpression returnValue2 = statement2.getReturnValue(); return expressionsAreEquivalent(returnValue1, returnValue2); } private static boolean throwStatementsAreEquivalent( @NotNull PsiThrowStatement statement1, @NotNull PsiThrowStatement statement2) { final PsiExpression exception1 = statement1.getException(); final PsiExpression exception2 = statement2.getException(); return expressionsAreEquivalent(exception1, exception2); } private static boolean expressionListStatementsAreEquivalent( @NotNull PsiExpressionListStatement statement1, @NotNull PsiExpressionListStatement statement2) { final PsiExpressionList expressionList1 = statement1.getExpressionList(); final PsiExpression[] expressions1 = expressionList1.getExpressions(); final PsiExpressionList expressionList2 = statement2.getExpressionList(); final PsiExpression[] expressions2 = expressionList2.getExpressions(); return expressionListsAreEquivalent(expressions1, expressions2); } public static boolean expressionsAreEquivalent( @Nullable PsiExpression exp1, @Nullable PsiExpression exp2) { if (exp1 == null && exp2 == null){ return true; } if(exp1 == null || exp2 == null){ return false; } final PsiExpression expToCompare1 = ParenthesesUtils.stripParentheses(exp1); final PsiExpression expToCompare2 = ParenthesesUtils.stripParentheses(exp2); if (expToCompare1 == null && expToCompare2 == null){ return true; } if(expToCompare1 == null || expToCompare2 == null){ return false; } if (expToCompare1.getClass() != expToCompare2.getClass()) { return false; } if (expToCompare1 instanceof PsiThisExpression) { return true; } else if (expToCompare1 instanceof PsiSuperExpression) { return true; } else if (expToCompare1 instanceof PsiLiteralExpression) { final String text1 = expToCompare1.getText(); final String text2 = expToCompare2.getText(); return text1.equals(text2); } else if (expToCompare1 instanceof PsiClassObjectAccessExpression) { final String text1 = expToCompare1.getText(); final String text2 = expToCompare2.getText(); return text1.equals(text2); } else if (expToCompare1 instanceof PsiReferenceExpression) { return referenceExpressionsAreEquivalent( (PsiReferenceExpression) expToCompare1, (PsiReferenceExpression) expToCompare2); } else if (expToCompare1 instanceof PsiMethodCallExpression) { return methodCallExpressionsAreEquivalent( (PsiMethodCallExpression) expToCompare1, (PsiMethodCallExpression) expToCompare2); } else if (expToCompare1 instanceof PsiNewExpression) { return newExpressionsAreEquivalent( (PsiNewExpression) expToCompare1, (PsiNewExpression) expToCompare2); } else if (expToCompare1 instanceof PsiArrayInitializerExpression) { return arrayInitializerExpressionsAreEquivalent( (PsiArrayInitializerExpression) expToCompare1, (PsiArrayInitializerExpression) expToCompare2); } else if (expToCompare1 instanceof PsiTypeCastExpression) { return typecastExpressionsAreEquivalent( (PsiTypeCastExpression) expToCompare1, (PsiTypeCastExpression) expToCompare2); } else if (expToCompare1 instanceof PsiArrayAccessExpression) { return arrayAccessExpressionsAreEquivalent( (PsiArrayAccessExpression) expToCompare2, (PsiArrayAccessExpression) expToCompare1); } else if (expToCompare1 instanceof PsiPrefixExpression) { return prefixExpressionsAreEquivalent( (PsiPrefixExpression) expToCompare1, (PsiPrefixExpression) expToCompare2); } else if (expToCompare1 instanceof PsiPostfixExpression) { return postfixExpressionsAreEquivalent( (PsiPostfixExpression) expToCompare1, (PsiPostfixExpression) expToCompare2); } else if (expToCompare1 instanceof PsiBinaryExpression) { return binaryExpressionsAreEquivalent( (PsiBinaryExpression) expToCompare1, (PsiBinaryExpression) expToCompare2); } else if (expToCompare1 instanceof PsiAssignmentExpression) { return assignmentExpressionsAreEquivalent( (PsiAssignmentExpression) expToCompare1, (PsiAssignmentExpression) expToCompare2); } else if (expToCompare1 instanceof PsiConditionalExpression) { return conditionalExpressionsAreEquivalent( (PsiConditionalExpression) expToCompare1, (PsiConditionalExpression) expToCompare2); } else if (expToCompare1 instanceof PsiInstanceOfExpression) { return instanceofExpressionsAreEquivalent( (PsiInstanceOfExpression) expToCompare1, (PsiInstanceOfExpression) expToCompare2); } return false; } private static boolean referenceExpressionsAreEquivalent( PsiReferenceExpression referenceExpression1, PsiReferenceExpression referenceExpression2) { final PsiElement element1 = referenceExpression1.resolve(); final PsiElement element2 = referenceExpression2.resolve(); if (element1 != null) { if (!element1.equals(element2)) { return false; } } else { return element2 == null; } if (element1 instanceof PsiMember ) { final PsiMember member1 = (PsiMember)element1; if (member1.hasModifierProperty(PsiModifier.STATIC)) { return true; } else if (member1 instanceof PsiClass) { return true; } } else { return true; } final PsiExpression qualifier1 = referenceExpression1.getQualifierExpression(); final PsiExpression qualifier2 = referenceExpression2.getQualifierExpression(); if (qualifier1 != null && !(qualifier1 instanceof PsiThisExpression || qualifier1 instanceof PsiSuperExpression)) { if (qualifier2 == null) { return false; } else if (!expressionsAreEquivalent(qualifier1, qualifier2)) { return false; } } else { if (qualifier2 != null && !(qualifier2 instanceof PsiThisExpression || qualifier2 instanceof PsiSuperExpression)) { return false; } } final String text1 = referenceExpression1.getText(); final String text2 = referenceExpression2.getText(); return text1.equals(text2); } private static boolean instanceofExpressionsAreEquivalent( PsiInstanceOfExpression instanceOfExpression1, PsiInstanceOfExpression instanceOfExpression2) { final PsiExpression operand1 = instanceOfExpression1.getOperand(); final PsiExpression operand2 = instanceOfExpression2.getOperand(); if (!expressionsAreEquivalent(operand1, operand2)) { return false; } final PsiTypeElement typeElement1 = instanceOfExpression1.getCheckType(); final PsiTypeElement typeElement2 = instanceOfExpression2.getCheckType(); if (typeElement1 == null) { return typeElement2 == null; } else if (typeElement2 == null) { return false; } final PsiType type1 = typeElement1.getType(); final PsiType type2 = typeElement2.getType(); return typesAreEquivalent(type1, type2); } private static boolean methodCallExpressionsAreEquivalent( @NotNull PsiMethodCallExpression methodExp1, @NotNull PsiMethodCallExpression methodExp2){ final PsiReferenceExpression methodExpression1 = methodExp1.getMethodExpression(); final PsiReferenceExpression methodExpression2 = methodExp2.getMethodExpression(); if(!expressionsAreEquivalent(methodExpression1, methodExpression2)){ return false; } final PsiExpressionList argumentList1 = methodExp1.getArgumentList(); final PsiExpression[] args1 = argumentList1.getExpressions(); final PsiExpressionList argumentList2 = methodExp2.getArgumentList(); final PsiExpression[] args2 = argumentList2.getExpressions(); return expressionListsAreEquivalent(args1, args2); } private static boolean newExpressionsAreEquivalent( @NotNull PsiNewExpression newExp1, @NotNull PsiNewExpression newExp2) { final PsiJavaCodeReferenceElement classRef1 = newExp1.getClassReference(); final PsiJavaCodeReferenceElement classRef2 = newExp2.getClassReference(); if (classRef1 == null || classRef2 == null) { return false; } final String text = classRef1.getText(); if (!text.equals(classRef2.getText())) { return false; } final PsiExpression[] arrayDimensions1 = newExp1.getArrayDimensions(); final PsiExpression[] arrayDimensions2 = newExp2.getArrayDimensions(); if (!expressionListsAreEquivalent(arrayDimensions1, arrayDimensions2)) { return false; } final PsiArrayInitializerExpression arrayInitializer1 = newExp1.getArrayInitializer(); final PsiArrayInitializerExpression arrayInitializer2 = newExp2.getArrayInitializer(); if (!expressionsAreEquivalent(arrayInitializer1, arrayInitializer2)) { return false; } final PsiExpression qualifier1 = newExp1.getQualifier(); final PsiExpression qualifier2 = newExp2.getQualifier(); if (!expressionsAreEquivalent(qualifier1, qualifier2)) { return false; } final PsiExpressionList argumentList1 = newExp1.getArgumentList(); final PsiExpression[] args1; if (argumentList1 == null) { args1 = null; } else { args1 = argumentList1.getExpressions(); } final PsiExpressionList argumentList2 = newExp2.getArgumentList(); final PsiExpression[] args2; if (argumentList2 == null) { args2 = null; } else { args2 = argumentList2.getExpressions(); } return expressionListsAreEquivalent(args1, args2); } private static boolean arrayInitializerExpressionsAreEquivalent( @NotNull PsiArrayInitializerExpression arrInitExp1, @NotNull PsiArrayInitializerExpression arrInitExp2){ final PsiExpression[] initializers1 = arrInitExp1.getInitializers(); final PsiExpression[] initializers2 = arrInitExp2.getInitializers(); return expressionListsAreEquivalent(initializers1, initializers2); } private static boolean typecastExpressionsAreEquivalent( @NotNull PsiTypeCastExpression typecastExp1, @NotNull PsiTypeCastExpression typecastExp2) { final PsiTypeElement typeElement1 = typecastExp1.getCastType(); final PsiTypeElement typeElement2 = typecastExp2.getCastType(); if (typeElement1 == null && typeElement2 == null) { return true; } if (typeElement1 == null || typeElement2 == null) { return false; } final PsiType type1 = typeElement1.getType(); final PsiType type2 = typeElement2.getType(); if(!typesAreEquivalent(type1, type2)) { return false; } final PsiExpression operand1 = typecastExp1.getOperand(); final PsiExpression operand2 = typecastExp2.getOperand(); return expressionsAreEquivalent(operand1, operand2); } private static boolean arrayAccessExpressionsAreEquivalent( @NotNull PsiArrayAccessExpression arrAccessExp2, @NotNull PsiArrayAccessExpression arrAccessExp1){ final PsiExpression arrayExpression2 = arrAccessExp2.getArrayExpression(); final PsiExpression arrayExpression1 = arrAccessExp1.getArrayExpression(); final PsiExpression indexExpression2 = arrAccessExp2.getIndexExpression(); final PsiExpression indexExpression1 = arrAccessExp1.getIndexExpression(); return expressionsAreEquivalent(arrayExpression2, arrayExpression1) && expressionsAreEquivalent(indexExpression2, indexExpression1); } private static boolean prefixExpressionsAreEquivalent( @NotNull PsiPrefixExpression prefixExp1, @NotNull PsiPrefixExpression prefixExp2){ final PsiJavaToken sign1 = prefixExp1.getOperationSign(); final PsiJavaToken sign2 = prefixExp2.getOperationSign(); final IElementType tokenType1 = sign1.getTokenType(); if(!tokenType1.equals(sign2.getTokenType())){ return false; } final PsiExpression operand1 = prefixExp1.getOperand(); final PsiExpression operand2 = prefixExp2.getOperand(); return expressionsAreEquivalent(operand1, operand2); } private static boolean postfixExpressionsAreEquivalent( @NotNull PsiPostfixExpression postfixExp1, @NotNull PsiPostfixExpression postfixExp2){ final PsiJavaToken sign1 = postfixExp1.getOperationSign(); final PsiJavaToken sign2 = postfixExp2.getOperationSign(); final IElementType tokenType1 = sign1.getTokenType(); if(!tokenType1.equals(sign2.getTokenType())){ return false; } final PsiExpression operand1 = postfixExp1.getOperand(); final PsiExpression operand2 = postfixExp2.getOperand(); return expressionsAreEquivalent(operand1, operand2); } private static boolean binaryExpressionsAreEquivalent( @NotNull PsiBinaryExpression binaryExp1, @NotNull PsiBinaryExpression binaryExp2){ final PsiJavaToken sign1 = binaryExp1.getOperationSign(); final PsiJavaToken sign2 = binaryExp2.getOperationSign(); final IElementType tokenType1 = sign1.getTokenType(); if(!tokenType1.equals(sign2.getTokenType())){ return false; } final PsiExpression lhs1 = binaryExp1.getLOperand(); final PsiExpression lhs2 = binaryExp2.getLOperand(); final PsiExpression rhs1 = binaryExp1.getROperand(); final PsiExpression rhs2 = binaryExp2.getROperand(); return expressionsAreEquivalent(lhs1, lhs2) && expressionsAreEquivalent(rhs1, rhs2); } private static boolean assignmentExpressionsAreEquivalent( @NotNull PsiAssignmentExpression assignExp1, @NotNull PsiAssignmentExpression assignExp2){ final PsiJavaToken sign1 = assignExp1.getOperationSign(); final PsiJavaToken sign2 = assignExp2.getOperationSign(); final IElementType tokenType1 = sign1.getTokenType(); if(!tokenType1.equals(sign2.getTokenType())){ return false; } final PsiExpression lhs1 = assignExp1.getLExpression(); final PsiExpression lhs2 = assignExp2.getLExpression(); final PsiExpression rhs1 = assignExp1.getRExpression(); final PsiExpression rhs2 = assignExp2.getRExpression(); return expressionsAreEquivalent(lhs1, lhs2) && expressionsAreEquivalent(rhs1, rhs2); } private static boolean conditionalExpressionsAreEquivalent( @NotNull PsiConditionalExpression condExp1, @NotNull PsiConditionalExpression condExp2){ final PsiExpression condition1 = condExp1.getCondition(); final PsiExpression condition2 = condExp2.getCondition(); final PsiExpression thenExpression1 = condExp1.getThenExpression(); final PsiExpression thenExpression2 = condExp2.getThenExpression(); final PsiExpression elseExpression1 = condExp1.getElseExpression(); final PsiExpression elseExpression2 = condExp2.getElseExpression(); return expressionsAreEquivalent(condition1, condition2) && expressionsAreEquivalent(thenExpression1, thenExpression2) && expressionsAreEquivalent(elseExpression1, elseExpression2); } private static boolean expressionListsAreEquivalent( @Nullable PsiExpression[] expressions1, @Nullable PsiExpression[] expressions2){ if(expressions1 == null && expressions2 == null){ return true; } if(expressions1 == null || expressions2 == null){ return false; } if(expressions1.length != expressions2.length){ return false; } for(int i = 0; i < expressions1.length; i++){ if(!expressionsAreEquivalent(expressions1[i], expressions2[i])){ return false; } } return true; } }
package schaugenau.state.survey; import java.sql.SQLException; import java.util.LinkedList; import java.util.List; import java.util.Random; import com.jme3.math.Vector3f; import com.jme3.scene.Node; import schaugenau.app.App; import schaugenau.app.App.Language; import schaugenau.core.FadableState; import schaugenau.core.SimpleWorldBackground; import schaugenau.database.ScoreOperations; import schaugenau.database.SurveyOperations; import schaugenau.font.TextBox; import schaugenau.gui.IconButton; /** * Distributed under the MIT License. (See accompanying file LICENSE or copy at * https://github.com/raphaelmenges/schaugenau/blob/master/src/LICENSE) * * Survey state. * * @author Raphael Menges * */ public class SurveyState extends FadableState { /** defines **/ protected float welcomeTextBoxScale = 0.8f; protected Vector3f welcomeTextBoxPosition = new Vector3f(0, 2.5f, 0); protected float welcomeButtonScale = 2.5f; protected Vector3f welcomeButtonPosition = new Vector3f(0, -2.25f, 0); /* items */ protected Vector3f restPosition = new Vector3f(25, 0, 0); protected float itemAnimationDuration = 1.0f; /** fields **/ /* welcome screen */ protected Node welcomeNode; protected TextBox welcomeTextBox; protected IconButton welcomeButton; protected boolean welcomeAttached; /* items */ protected List<SurveyItem> items; protected int itemIndex; protected SurveyItem currentItem; protected SurveyItem previousItem; protected float itemAnimationTime; /* other */ protected SurveyOperations surveyOperations; protected ScoreOperations scoreOperations; protected Random rand; protected SimpleWorldBackground background; /** methods **/ /* constructor */ public SurveyState(App app, String name, boolean debugging) { super(app, name, debugging); /* welcome screen */ welcomeNode = new Node("WelcomeNode"); welcomeTextBox = new TextBox(this.app.getPrimaryGuiFont(), schaugenau.font.TextBox.Alignment.CENTER); welcomeTextBox.setLocalScale(welcomeTextBoxScale); welcomeTextBox.setLocalTranslation(welcomeTextBoxPosition); welcomeTextBox.attachTo(welcomeNode); welcomeButton = new IconButton(this.app, this.guiAdapter, welcomeButtonPosition, welcomeButtonScale, "Icon-Ok", "WelcomeButton"); welcomeButton.attachTo(welcomeNode); /* database stuff */ surveyOperations = new SurveyOperations(); scoreOperations = new ScoreOperations(); /* other */ background = new SimpleWorldBackground(this.app, false); background.attachTo(rootNode); rand = new Random(); } /* update */ @Override public boolean update(float tpf, boolean buzzerPressed) { boolean hasDetachedItself = super.update(tpf, buzzerPressed); /* no items ? */ if (items.isEmpty()) { this.app.loadIdleState(); } if (!paused && !items.isEmpty()) { this.background.update(tpf); /* say hello */ if (welcomeAttached) { if (welcomeButton.update(tpf, true)) { guiAdapter.detachChild(welcomeNode); welcomeAttached = false; /* attach first item */ this.nextItem(); } } /* show items */ else { /* animation */ itemAnimationTime -= tpf; itemAnimationTime = Math.max(itemAnimationTime, 0); float t = 1.0f - (itemAnimationTime / itemAnimationDuration); /* animate current item */ currentItem.setLocalTranslation(new Vector3f().interpolate(restPosition, new Vector3f(), t)); /* animate previous item */ if (previousItem != null) { previousItem.setLocalTranslation( new Vector3f().interpolate(new Vector3f(), restPosition.clone().negateLocal(), t)); previousItem.update(tpf); } /* wait for feedback from current */ int result = currentItem.update(tpf); if (result != -1) { /* save results */ /* save results */ int newestScore = scoreOperations.getMaxPkey(); surveyOperations.saveResult(app.getCurrentGameStyleString(), app.getCurrentScore(), currentItem.getId(), result, newestScore); /* next items, if possible */ if (!nextItem()) { /* no further items in this session */ this.app.setDoSurvey(false); this.app.loadIdleState(); } } } } return hasDetachedItself; } /* fade in, returns if finished */ @Override protected boolean fadeIn(float tpf) { boolean fadingDone = super.fadeIn(tpf); /* nothing to do */ return fadingDone; } /* running */ @Override protected void running(float tpf, boolean buzzerPressed) { super.running(tpf, buzzerPressed); /* nothing to do */ } /* fade out, returns if finished */ @Override protected boolean fadeOut(float tpf) { boolean fadingDone = super.fadeOut(tpf); /* nothing to do */ return fadingDone; } /* attach */ @Override protected void attach() { super.attach(); /* fetch items from database */ items = this.fetchSurveyItems(this.app.getLanguage() == Language.ENGLISH); /* say hello */ guiAdapter.attachChild(welcomeNode); welcomeAttached = true; welcomeTextBox.setContent(this.app.getMessages().getString("survey.welcome")); /* other */ currentItem = null; itemIndex = 0; } /* detach */ @Override protected void detach() { super.detach(); /* detach survey items */ for (SurveyItem item : items) { item.detach(); } } /* stop */ @Override public void stop() { super.stop(); /* nothing to do */ } /* fetch items from database */ protected List<SurveyItem> fetchSurveyItems(boolean english) { /* create empty list */ List<SurveyItem> items = new LinkedList<>(); /* set language */ if (english) { SurveyOperations.setLanguage("english"); } else { SurveyOperations.setLanguage("german"); } /* try to get items */ try { int count = surveyOperations.getSessionCount(); int session = this.rand.nextInt(count) + 1; items = surveyOperations.getSession(session, this.app, this.guiAdapter); } catch (ClassNotFoundException | SQLException e) { e.printStackTrace(); } /* attach all items an place them on the right side */ for (SurveyItem item : items) { item.attachTo(guiAdapter.getNode()); item.setLocalTranslation(restPosition.clone()); } /* return items */ return items; } /* attach next items if possible */ protected boolean nextItem() { /* check, whether there are further items */ if (itemIndex < items.size()) { if (currentItem != null) { if (previousItem != null) { /* hide previous */ previousItem.setLocalTranslation(restPosition.clone().negateLocal()); } previousItem = currentItem; } currentItem = items.get(itemIndex); itemIndex++; itemAnimationTime = itemAnimationDuration; return true; } else { return false; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.registration; import org.apache.flink.runtime.rpc.FencedRpcGateway; import org.apache.flink.runtime.rpc.RpcGateway; import org.apache.flink.runtime.rpc.RpcService; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.Preconditions; import org.slf4j.Logger; import javax.annotation.Nullable; import java.io.Serializable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.apache.flink.util.Preconditions.checkNotNull; /** * This utility class implements the basis of registering one component at another component, for * example registering the TaskExecutor at the ResourceManager. This {@code RetryingRegistration} * implements both the initial address resolution and the retries-with-backoff strategy. * * <p>The registration gives access to a future that is completed upon successful registration. The * registration can be canceled, for example when the target where it tries to register at looses * leader status. * * @param <F> The type of the fencing token * @param <G> The type of the gateway to connect to. * @param <S> The type of the successful registration responses. * @param <R> The type of the registration rejection responses. */ public abstract class RetryingRegistration< F extends Serializable, G extends RpcGateway, S extends RegistrationResponse.Success, R extends RegistrationResponse.Rejection> { // ------------------------------------------------------------------------ // Fields // ------------------------------------------------------------------------ private final Logger log; private final RpcService rpcService; private final String targetName; private final Class<G> targetType; private final String targetAddress; private final F fencingToken; private final CompletableFuture<RetryingRegistrationResult<G, S, R>> completionFuture; private final RetryingRegistrationConfiguration retryingRegistrationConfiguration; private volatile boolean canceled; // ------------------------------------------------------------------------ public RetryingRegistration( Logger log, RpcService rpcService, String targetName, Class<G> targetType, String targetAddress, F fencingToken, RetryingRegistrationConfiguration retryingRegistrationConfiguration) { this.log = checkNotNull(log); this.rpcService = checkNotNull(rpcService); this.targetName = checkNotNull(targetName); this.targetType = checkNotNull(targetType); this.targetAddress = checkNotNull(targetAddress); this.fencingToken = checkNotNull(fencingToken); this.retryingRegistrationConfiguration = checkNotNull(retryingRegistrationConfiguration); this.completionFuture = new CompletableFuture<>(); } // ------------------------------------------------------------------------ // completion and cancellation // ------------------------------------------------------------------------ public CompletableFuture<RetryingRegistrationResult<G, S, R>> getFuture() { return completionFuture; } /** Cancels the registration procedure. */ public void cancel() { canceled = true; completionFuture.cancel(false); } /** * Checks if the registration was canceled. * * @return True if the registration was canceled, false otherwise. */ public boolean isCanceled() { return canceled; } // ------------------------------------------------------------------------ // registration // ------------------------------------------------------------------------ protected abstract CompletableFuture<RegistrationResponse> invokeRegistration( G gateway, F fencingToken, long timeoutMillis) throws Exception; /** * This method resolves the target address to a callable gateway and starts the registration * after that. */ @SuppressWarnings("unchecked") public void startRegistration() { if (canceled) { // we already got canceled return; } try { // trigger resolution of the target address to a callable gateway final CompletableFuture<G> rpcGatewayFuture; if (FencedRpcGateway.class.isAssignableFrom(targetType)) { rpcGatewayFuture = (CompletableFuture<G>) rpcService.connect( targetAddress, fencingToken, targetType.asSubclass(FencedRpcGateway.class)); } else { rpcGatewayFuture = rpcService.connect(targetAddress, targetType); } // upon success, start the registration attempts CompletableFuture<Void> rpcGatewayAcceptFuture = rpcGatewayFuture.thenAcceptAsync( (G rpcGateway) -> { log.info("Resolved {} address, beginning registration", targetName); register( rpcGateway, 1, retryingRegistrationConfiguration .getInitialRegistrationTimeoutMillis()); }, rpcService.getExecutor()); // upon failure, retry, unless this is cancelled rpcGatewayAcceptFuture.whenCompleteAsync( (Void v, Throwable failure) -> { if (failure != null && !canceled) { final Throwable strippedFailure = ExceptionUtils.stripCompletionException(failure); if (log.isDebugEnabled()) { log.debug( "Could not resolve {} address {}, retrying in {} ms.", targetName, targetAddress, retryingRegistrationConfiguration.getErrorDelayMillis(), strippedFailure); } else { log.info( "Could not resolve {} address {}, retrying in {} ms: {}", targetName, targetAddress, retryingRegistrationConfiguration.getErrorDelayMillis(), strippedFailure.getMessage()); } startRegistrationLater( retryingRegistrationConfiguration.getErrorDelayMillis()); } }, rpcService.getExecutor()); } catch (Throwable t) { completionFuture.completeExceptionally(t); cancel(); } } /** * This method performs a registration attempt and triggers either a success notification or a * retry, depending on the result. */ @SuppressWarnings("unchecked") private void register(final G gateway, final int attempt, final long timeoutMillis) { // eager check for canceling to avoid some unnecessary work if (canceled) { return; } try { log.debug( "Registration at {} attempt {} (timeout={}ms)", targetName, attempt, timeoutMillis); CompletableFuture<RegistrationResponse> registrationFuture = invokeRegistration(gateway, fencingToken, timeoutMillis); // if the registration was successful, let the TaskExecutor know CompletableFuture<Void> registrationAcceptFuture = registrationFuture.thenAcceptAsync( (RegistrationResponse result) -> { if (!isCanceled()) { if (result instanceof RegistrationResponse.Success) { log.debug( "Registration with {} at {} was successful.", targetName, targetAddress); S success = (S) result; completionFuture.complete( RetryingRegistrationResult.success( gateway, success)); } else if (result instanceof RegistrationResponse.Rejection) { log.debug( "Registration with {} at {} was rejected.", targetName, targetAddress); R rejection = (R) result; completionFuture.complete( RetryingRegistrationResult.rejection(rejection)); } else { // registration failure if (result instanceof RegistrationResponse.Failure) { RegistrationResponse.Failure failure = (RegistrationResponse.Failure) result; log.info( "Registration failure at {} occurred.", targetName, failure.getReason()); } else { log.error( "Received unknown response to registration attempt: {}", result); } log.info( "Pausing and re-attempting registration in {} ms", retryingRegistrationConfiguration .getRefusedDelayMillis()); registerLater( gateway, 1, retryingRegistrationConfiguration .getInitialRegistrationTimeoutMillis(), retryingRegistrationConfiguration .getRefusedDelayMillis()); } } }, rpcService.getExecutor()); // upon failure, retry registrationAcceptFuture.whenCompleteAsync( (Void v, Throwable failure) -> { if (failure != null && !isCanceled()) { if (ExceptionUtils.stripCompletionException(failure) instanceof TimeoutException) { // we simply have not received a response in time. maybe the timeout // was // very low (initial fast registration attempts), maybe the target // endpoint is // currently down. if (log.isDebugEnabled()) { log.debug( "Registration at {} ({}) attempt {} timed out after {} ms", targetName, targetAddress, attempt, timeoutMillis); } long newTimeoutMillis = Math.min( 2 * timeoutMillis, retryingRegistrationConfiguration .getMaxRegistrationTimeoutMillis()); register(gateway, attempt + 1, newTimeoutMillis); } else { // a serious failure occurred. we still should not give up, but keep // trying log.error( "Registration at {} failed due to an error", targetName, failure); log.info( "Pausing and re-attempting registration in {} ms", retryingRegistrationConfiguration.getErrorDelayMillis()); registerLater( gateway, 1, retryingRegistrationConfiguration .getInitialRegistrationTimeoutMillis(), retryingRegistrationConfiguration.getErrorDelayMillis()); } } }, rpcService.getExecutor()); } catch (Throwable t) { completionFuture.completeExceptionally(t); cancel(); } } private void registerLater( final G gateway, final int attempt, final long timeoutMillis, long delay) { rpcService.scheduleRunnable( new Runnable() { @Override public void run() { register(gateway, attempt, timeoutMillis); } }, delay, TimeUnit.MILLISECONDS); } private void startRegistrationLater(final long delay) { rpcService.scheduleRunnable(this::startRegistration, delay, TimeUnit.MILLISECONDS); } static final class RetryingRegistrationResult<G, S, R> { @Nullable private final G gateway; @Nullable private final S success; @Nullable private final R rejection; private RetryingRegistrationResult( @Nullable G gateway, @Nullable S success, @Nullable R rejection) { this.gateway = gateway; this.success = success; this.rejection = rejection; } boolean isSuccess() { return success != null && gateway != null; } boolean isRejection() { return rejection != null; } public G getGateway() { Preconditions.checkState(isSuccess()); return gateway; } public R getRejection() { Preconditions.checkState(isRejection()); return rejection; } public S getSuccess() { Preconditions.checkState(isSuccess()); return success; } static < G extends RpcGateway, S extends RegistrationResponse.Success, R extends RegistrationResponse.Rejection> RetryingRegistrationResult<G, S, R> success(G gateway, S success) { return new RetryingRegistrationResult<>(gateway, success, null); } static < G extends RpcGateway, S extends RegistrationResponse.Success, R extends RegistrationResponse.Rejection> RetryingRegistrationResult<G, S, R> rejection(R rejection) { return new RetryingRegistrationResult<>(null, null, rejection); } } }
package com.kinetiqa.glacier.dialogs; import android.app.Dialog; import android.content.Context; import android.media.Image; import android.media.MediaPlayer; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Handler; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.Button; import android.widget.ImageButton; import android.widget.SeekBar; import android.widget.TextView; import com.kinetiqa.glacier.R; import com.kinetiqa.glacier.core.Config; import com.kinetiqa.glacier.ui.FragmentComposeMessage; import com.kinetiqa.glacier.utils.TimeConversion; import java.io.File; import java.io.IOException; public class DialogAudioRecorder extends Dialog { private FragmentComposeMessage h; private File audioFile; private String audioFileLocation; private MediaPlayer audioPlayer; private MediaRecorder audioRecorder; private Handler recorderTimeHandler = new Handler(); private Handler seekHandler = new Handler(); private long audioRecorderStartTime; private SeekBar playbackSeekBar; private TextView playbackTimerTextView; private TextView recordTimerTextView; private Button recordButton; private Button stopRecordingButton; private Button playButton; private Button pauseButton; private Button finishButton; public DialogAudioRecorder(Context context, FragmentComposeMessage h) { super(context, android.R.style.Theme_Holo_NoActionBar); getWindow().setBackgroundDrawableResource(R.color.transparent); getWindow().getAttributes().windowAnimations = R.style.DialogFadeAnimation; requestWindowFeature(Window.FEATURE_NO_TITLE); this.h = h; } @Override public void onCreate(Bundle savedInstanceState) { setCancelable(true); setContentView(R.layout.dialog_message_audio_recorder); init(); } private void init() { recordTimerTextView = (TextView) findViewById(R.id.ar_timer); playbackTimerTextView = (TextView) findViewById(R.id.ar_playback_timer); playbackSeekBar = (SeekBar) findViewById(R.id.ar_seek); playbackSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean b) { audioPlayer.seekTo(progress); } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { } }); recordButton = (Button) findViewById(R.id.ar_record); recordButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { recordButton.setVisibility(View.GONE); stopRecordingButton.setVisibility(View.VISIBLE); playButton.setVisibility(View.GONE); pauseButton.setVisibility(View.GONE); finishButton.setVisibility(View.GONE); playbackSeekBar.setVisibility(View.GONE); recordTimerTextView.setVisibility(View.VISIBLE); playbackTimerTextView.setVisibility(View.GONE); audioPlayer = null; try { startRecording(); } catch (IOException e) { e.printStackTrace(); } } }); stopRecordingButton = (Button) findViewById(R.id.ar_stop); stopRecordingButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { recordButton.setVisibility(View.VISIBLE); stopRecordingButton.setVisibility(View.GONE); playButton.setVisibility(View.VISIBLE); pauseButton.setVisibility(View.GONE); finishButton.setVisibility(View.VISIBLE); playbackSeekBar.setVisibility(View.VISIBLE); recordTimerTextView.setVisibility(View.GONE); playbackTimerTextView.setVisibility(View.VISIBLE); stopRecording(); } }); playButton = (Button) findViewById(R.id.ar_play); playButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { playButton.setVisibility(View.GONE); pauseButton.setVisibility(View.VISIBLE); if (audioPlayer == null) { initAudioPlayback(); } audioPlayer.start(); } }); pauseButton = (Button) findViewById(R.id.ar_pause); pauseButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { playButton.setVisibility(View.VISIBLE); pauseButton.setVisibility(View.GONE); audioPlayer.pause(); } }); finishButton = (Button) findViewById(R.id.ar_finish); finishButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (audioPlayer != null) { // audioPlayer.stop(); // audioPlayer.release(); } h.attachAudioCallback(audioFileLocation); dismiss(); } }); ImageButton closeButton = (ImageButton) findViewById(R.id.dialog_close); closeButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dismiss(); } }); } private void initAudioPlayback() { audioPlayer = new MediaPlayer(); try { audioPlayer.setDataSource(audioFileLocation); audioPlayer.prepare(); playbackSeekBar.setMax(audioPlayer.getDuration()); playbackSeekBar.setProgress(audioPlayer.getCurrentPosition()); seekHandler.postDelayed(seekBarRunnable, 100); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IllegalStateException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private void startRecording() throws IOException { File dir = new File(Config.MESSAGE_MEDIA_PATH_PREFIX); dir.mkdirs(); File noMedia = new File(dir, ".nomedia"); try { noMedia.createNewFile(); } catch (IOException e) { e.printStackTrace(); } try { audioFile = File.createTempFile("sound", ".mp4", dir); } catch (IOException e) { System.out.println(e.toString()); return; } audioFileLocation = audioFile.getPath(); audioRecorder = new MediaRecorder(); audioRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); audioRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); audioRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC); audioRecorder.setOutputFile(audioFile.getAbsolutePath()); audioRecorder.prepare(); audioRecorder.start(); audioRecorderStartTime = System.currentTimeMillis(); recorderTimeHandler.postDelayed(recorderTimerRunnable, 100); } private void stopRecording() { audioRecorder.stop(); audioRecorder.release(); recorderTimeHandler.removeCallbacks(recorderTimerRunnable); } Runnable seekBarRunnable = new Runnable() { @Override public void run() { playbackSeekBar.setProgress(audioPlayer.getCurrentPosition()); playbackTimerTextView.setText(TimeConversion.convertMillisecondsToTime(audioPlayer.getCurrentPosition()) + "/" + TimeConversion.convertMillisecondsToTime(audioPlayer.getDuration())); seekHandler.postDelayed(seekBarRunnable, 100); } }; Runnable recorderTimerRunnable = new Runnable() { @Override public void run() { long elapsedTime = System.currentTimeMillis() - audioRecorderStartTime; recordTimerTextView.setText(TimeConversion.convertMillisecondsToTime(elapsedTime)); recorderTimeHandler.postDelayed(recorderTimerRunnable, 100); } }; }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.federation.sync; import org.jboss.logging.Logger; import org.junit.Assert; import org.junit.ClassRule; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import org.keycloak.common.util.Time; import org.keycloak.models.KeycloakSession; import org.keycloak.models.KeycloakSessionFactory; import org.keycloak.models.RealmModel; import org.keycloak.services.managers.RealmManager; import org.keycloak.services.managers.UserStorageSyncManager; import org.keycloak.storage.UserStorageProvider; import org.keycloak.storage.UserStorageProviderModel; import org.keycloak.storage.user.SynchronizationResult; import org.keycloak.testsuite.federation.DummyUserFederationProviderFactory; import org.keycloak.testsuite.rule.KeycloakRule; import org.keycloak.timer.TimerProvider; import java.util.concurrent.TimeUnit; /** * Test with Dummy providers * * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class SyncFederationTest { private static final Logger log = Logger.getLogger(SyncFederationTest.class); private static UserStorageProviderModel dummyModel = null; @ClassRule public static KeycloakRule keycloakRule = new KeycloakRule(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { // Other tests may left Time offset uncleared, which could cause issues Time.setOffset(0); } }); /** * Test that period sync is triggered when creating a synchronized User Storage Provider * */ @Test public void test01PeriodicSyncOnCreate() { KeycloakSession session = keycloakRule.startSession(); KeycloakSessionFactory sessionFactory = session.getKeycloakSessionFactory(); DummyUserFederationProviderFactory dummyFedFactory = (DummyUserFederationProviderFactory) sessionFactory.getProviderFactory(UserStorageProvider.class, DummyUserFederationProviderFactory.PROVIDER_NAME); int full = dummyFedFactory.getFullSyncCounter(); int changed = dummyFedFactory.getChangedSyncCounter(); keycloakRule.stopSession(session, false); // Enable timer for SyncDummyUserFederationProvider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { UserStorageProviderModel model = new UserStorageProviderModel(); model.setProviderId(DummyUserFederationProviderFactory.PROVIDER_NAME); model.setPriority(1); model.setName("test-sync-dummy"); model.setFullSyncPeriod(-1); model.setChangedSyncPeriod(1); model.setLastSync(0); dummyModel = new UserStorageProviderModel(appRealm.addComponentModel(model)); } }); session = keycloakRule.startSession(); try { // Assert that after some period was DummyUserFederationProvider triggered UserStorageSyncManager usersSyncManager = new UserStorageSyncManager(); sleep(1800); // Cancel timer RealmModel appRealm = session.realms().getRealmByName("test"); usersSyncManager.notifyToRefreshPeriodicSync(session, appRealm, dummyModel, true); log.infof("Notified sync manager about cancel periodic sync"); // This sync is here just to ensure that we have lock (doublecheck that periodic sync, which was possibly triggered before canceling timer is finished too) while (true) { SynchronizationResult result = usersSyncManager.syncChangedUsers(session.getKeycloakSessionFactory(), appRealm.getId(), dummyModel); if (result.isIgnored()) { log.infof("Still waiting for lock before periodic sync is finished", result.toString()); sleep(1000); } else { break; } } // Assert that DummyUserFederationProviderFactory.syncChangedUsers was invoked at least 2 times (once periodically and once for us) int newChanged = dummyFedFactory.getChangedSyncCounter(); Assert.assertEquals(full, dummyFedFactory.getFullSyncCounter()); Assert.assertTrue("Assertion failed. newChanged=" + newChanged + ", changed=" + changed, newChanged > (changed + 1)); // Assert that dummy provider won't be invoked anymore sleep(1800); Assert.assertEquals(full, dummyFedFactory.getFullSyncCounter()); int newestChanged = dummyFedFactory.getChangedSyncCounter(); Assert.assertEquals("Assertion failed. newChanged=" + newChanged + ", newestChanged=" + newestChanged, newChanged, newestChanged); } finally { keycloakRule.stopSession(session, true); } // remove dummyProvider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { appRealm.removeComponent(dummyModel); } }); } /** * Test that period sync is triggered when updating a synchronized User Storage Provider to have a non-negative sync period * */ @Test public void test02PeriodicSyncOnUpdate() { KeycloakSession session = keycloakRule.startSession(); KeycloakSessionFactory sessionFactory = session.getKeycloakSessionFactory(); DummyUserFederationProviderFactory dummyFedFactory = (DummyUserFederationProviderFactory) sessionFactory.getProviderFactory(UserStorageProvider.class, DummyUserFederationProviderFactory.PROVIDER_NAME); int full = dummyFedFactory.getFullSyncCounter(); int changed = dummyFedFactory.getChangedSyncCounter(); keycloakRule.stopSession(session, false); // Enable timer for SyncDummyUserFederationProvider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { UserStorageProviderModel model = new UserStorageProviderModel(); model.setProviderId(DummyUserFederationProviderFactory.PROVIDER_NAME); model.setPriority(1); model.setName("test-sync-dummy"); model.setFullSyncPeriod(-1); model.setChangedSyncPeriod(-1); model.setLastSync(0); dummyModel = new UserStorageProviderModel(appRealm.addComponentModel(model)); } }); session = keycloakRule.startSession(); try { // Assert that after some period was DummyUserFederationProvider triggered UserStorageSyncManager usersSyncManager = new UserStorageSyncManager(); // Assert that dummy provider won't be invoked anymore sleep(1800); Assert.assertEquals(full, dummyFedFactory.getFullSyncCounter()); int newestChanged = dummyFedFactory.getChangedSyncCounter(); Assert.assertEquals("Assertion failed. newChanged=" + changed + ", newestChanged=" + newestChanged, changed, newestChanged); } finally { keycloakRule.stopSession(session, true); } keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { dummyModel.setChangedSyncPeriod(1); appRealm.updateComponent(dummyModel); } }); session = keycloakRule.startSession(); try { // Assert that after some period was DummyUserFederationProvider triggered UserStorageSyncManager usersSyncManager = new UserStorageSyncManager(); sleep(1800); // Cancel timer RealmModel appRealm = session.realms().getRealmByName("test"); usersSyncManager.notifyToRefreshPeriodicSync(session, appRealm, dummyModel, true); log.infof("Notified sync manager about cancel periodic sync"); // This sync is here just to ensure that we have lock (doublecheck that periodic sync, which was possibly triggered before canceling timer is finished too) while (true) { SynchronizationResult result = usersSyncManager.syncChangedUsers(session.getKeycloakSessionFactory(), appRealm.getId(), dummyModel); if (result.isIgnored()) { log.infof("Still waiting for lock before periodic sync is finished", result.toString()); sleep(1000); } else { break; } } // Assert that DummyUserFederationProviderFactory.syncChangedUsers was invoked at least 2 times (once periodically and once for us) int newChanged = dummyFedFactory.getChangedSyncCounter(); Assert.assertEquals(full, dummyFedFactory.getFullSyncCounter()); log.info("Asserting. newChanged=" + newChanged + " > changed=" + changed); Assert.assertTrue("Assertion failed. newChanged=" + newChanged + ", changed=" + changed, newChanged > (changed + 1)); // Assert that dummy provider won't be invoked anymore sleep(1800); Assert.assertEquals(full, dummyFedFactory.getFullSyncCounter()); int newestChanged = dummyFedFactory.getChangedSyncCounter(); Assert.assertEquals("Assertion failed. newChanged=" + newChanged + ", newestChanged=" + newestChanged, newChanged, newestChanged); } finally { keycloakRule.stopSession(session, true); } // remove dummyProvider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { appRealm.removeComponent(dummyModel); } }); } @Test public void test03ConcurrentSync() throws Exception { SyncDummyUserFederationProviderFactory.restartLatches(); // Enable timer for SyncDummyUserFederationProvider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { UserStorageProviderModel model = new UserStorageProviderModel(); model.setProviderId(SyncDummyUserFederationProviderFactory.SYNC_PROVIDER_ID); model.setPriority(1); model.setName("test-sync-dummy"); model.setFullSyncPeriod(-1); model.setChangedSyncPeriod(1); model.setLastSync(0); model.getConfig().putSingle(SyncDummyUserFederationProviderFactory.WAIT_TIME, "2000"); dummyModel = new UserStorageProviderModel(appRealm.addComponentModel(model)); } }); KeycloakSession session = keycloakRule.startSession(); try { KeycloakSessionFactory sessionFactory = session.getKeycloakSessionFactory(); // bootstrap periodic sync UserStorageSyncManager usersSyncManager = new UserStorageSyncManager(); usersSyncManager.bootstrapPeriodic(sessionFactory, session.getProvider(TimerProvider.class)); // Wait and then trigger sync manually. Assert it will be ignored sleep(1800); RealmModel realm = session.realms().getRealm("test"); SynchronizationResult syncResult = usersSyncManager.syncChangedUsers(sessionFactory, realm.getId(), dummyModel); Assert.assertTrue(syncResult.isIgnored()); // Cancel timer usersSyncManager.notifyToRefreshPeriodicSync(session, realm, dummyModel, true); // Signal to factory to finish waiting SyncDummyUserFederationProviderFactory.latch1.countDown(); } finally { keycloakRule.stopSession(session, true); } SyncDummyUserFederationProviderFactory.latch2.await(20000, TimeUnit.MILLISECONDS); // remove provider keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { appRealm.removeComponent(dummyModel); } }); } private void sleep(int time) { try { Thread.sleep(time); } catch (InterruptedException ie) { throw new RuntimeException(ie); } } }
/* Copyright (C) 2005-2011 Fabio Riccardi */ package com.lightcrafts.platform; import java.awt.*; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import java.lang.reflect.InvocationTargetException; import javax.swing.*; import com.lightcrafts.utils.ProgressListener; import com.lightcrafts.utils.ProgressIndicator; import com.lightcrafts.utils.Version; import com.lightcrafts.utils.swing.NoIcon; import com.lightcrafts.utils.thread.CancelableThread; import com.lightcrafts.utils.thread.CancelableThreadMonitor; import com.lightcrafts.utils.thread.ProgressThread; import com.lightcrafts.ui.LightZoneSkin; import org.jvnet.substance.SubstanceLookAndFeel; /** * A <code>DefaultProgressDialog</code> implements {@link ProgressDialog} for * the default platform. * * @author Paul J. Lucas [paul@lightcrafts.com] */ public final class DefaultProgressDialog implements ProgressDialog { ////////// public ///////////////////////////////////////////////////////// public DefaultProgressDialog() { m_progressBar = new JProgressBar(); m_progressBar.putClientProperty(SubstanceLookAndFeel.THEME_PROPERTY, LightZoneSkin.orangeTheme); m_progressBar.setBorder(null); } /** * {@inheritDoc} */ public Throwable getThrown() { return m_threadMonitor.getThrown(); } /** * {@inheritDoc} */ public void incrementBy( final int delta ) { EventQueue.invokeLater( new Runnable() { public void run() { if (m_progressBar != null) m_progressBar.setValue( m_progressBar.getValue() + delta ); } } ); } /** * {@inheritDoc} */ public void setIndeterminate( final boolean indeterminate ) { EventQueue.invokeLater( new Runnable() { public void run() { if (m_progressBar != null) { // // There's a bug in Java: if you make a determinate // progress bar indeterminate, the "barber pole" is // partially "frozen" from where the old value was to // the right. To fix it, first set the value to the // maximum value. // m_progressBar.setValue( m_progressBar.getMaximum() ); m_progressBar.setIndeterminate( indeterminate ); } } } ); } /** * {@inheritDoc} */ public void setMaximum( final int maximum ) { EventQueue.invokeLater( new Runnable() { public void run() { if (m_progressBar != null) m_progressBar.setMaximum( maximum ); } } ); } /** * {@inheritDoc} */ public void setMinimum( final int minimum ) { EventQueue.invokeLater( new Runnable() { public void run() { if (m_progressBar != null) m_progressBar.setMinimum( minimum ); } } ); } /** * {@inheritDoc} */ public void showProgress( Frame parent, CancelableThread thread, String message, int minValue, int maxValue, boolean hasCancelButton ) { final ProgressListenerImpl pli = new ProgressListenerImpl( parent, thread, message, false, minValue, maxValue, hasCancelButton ); pli.showAndWait(); } /** * {@inheritDoc} */ public void showProgress( Frame parent, CancelableThread thread, String message, boolean hasCancelButton ) { final ProgressListenerImpl pli = new ProgressListenerImpl( parent, thread, message, true, 0, 0, hasCancelButton ); pli.showAndWait(); } ////////// private //////////////////////////////////////////////////////// /** * A <code>JProgressDialog</code> is-a {@link JDialog} that displays a * short message, the progress bar, and a Cancel button. */ private final class JProgressDialog extends JDialog { ////////// public ///////////////////////////////////////////////////// /** * Construct a <code>JProgressDialog</code>. * <p> * This <b>must</b> be called on the event dispatch thread because it * touches the progress bar. * * @param parent The parent window. * @param message The message to display in the progress dialog. * @param hasCancelButton If <code>true</code>, the dialog will contain * an enabled Cancel button the user can click to terminate the * {@link CancelableThread} prematurely. * @param pl The {@link ProgressListener} to notify if the user clicks * Cancel. */ JProgressDialog( Frame parent, String message, boolean hasCancelButton, final ProgressListener pl ) { super( parent, Version.getApplicationName(), true ); final JLabel messageLabel = new JLabel( message ); // Don't allow users to dismiss the dialog box setDefaultCloseOperation(DO_NOTHING_ON_CLOSE); // // Trying to use something simple like Box for doing the layout of // the progress bar doesn't work, i.e., it generates a funny // looking layout; so do the layout ourselves. // final JPanel layoutPanel = new JPanel( null ) { public void doLayout() { final Dimension size = getSize(); messageLabel.setLocation( 0, 0 ); messageLabel.setSize( messageLabel.getPreferredSize() ); final Dimension mSize = messageLabel.getPreferredSize(); final Dimension pSize = m_progressBar.getPreferredSize(); m_progressBar.setLocation( 0, mSize.height + msgBarPadding ); m_progressBar.setSize( size.width, pSize.height ); } public Dimension getPreferredSize() { final Dimension mSize = messageLabel.getPreferredSize(); final Dimension pSize = m_progressBar.getPreferredSize(); return new Dimension( Math.max( mSize.width, pSize.width ), mSize.height + pSize.height + msgBarPadding ); } }; layoutPanel.add( messageLabel ); layoutPanel.add( m_progressBar ); final JButton cancelButton = new JButton( "Cancel "); final JOptionPane panel = new JOptionPane( layoutPanel, JOptionPane.INFORMATION_MESSAGE, 0, NoIcon.INSTANCE, new Object[]{ cancelButton } ); if ( hasCancelButton ) { cancelButton.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent event ) { messageLabel.setText( "Cancelling..." ); pl.progressCancelled(); } } ); } else cancelButton.setEnabled( false ); getContentPane().setLayout( new BorderLayout() ); getContentPane().add( panel ); pack(); setLocationRelativeTo( parent ); setResizable( false ); } /** * {@inheritDoc} */ public void dispose() { // apple.laf.AquaProgressBarUI leaks animation Timers, // which hold references to the progress bar member: if ( m_progressBar != null ) { final Container barParent = m_progressBar.getParent(); if ( barParent != null ) { final Component progreesBarCopy = m_progressBar; EventQueue.invokeLater( new Runnable() { public void run() { barParent.remove( progreesBarCopy ); } } ); } m_progressBar = null; } super.dispose(); } ////////// private //////////////////////////////////////////////////// /** * There needs to be some padding between the bottom of the message and * the top of the progress bar. This is it. */ private static final int msgBarPadding = 5; } /** * An <code>ProgressListenerImpl</code> implements * {@link ProgressListener}. The reason for having this class as a nested, * private class rather than having {@link DefaultProgressDialog} implement * {@link ProgressListener} directly is not to expose the * {@link ProgressListener} API to the user of * {@link DefaultProgressDialog}. */ private final class ProgressListenerImpl implements CancelableThreadMonitor.Listener, ProgressListener { ////////// public ///////////////////////////////////////////////////// /** * Construct a <code>ProgressListenerImpl</code>. * * @param parent The parent window. * @param thread The {@link CancelableThread} to run while showing the * progress dialog. * @param message The message to display in the progress dialog. * @param indeterminate If <code>true</code>, makes this indicator an * interminate progress indicator. * @param minValue The minimum value of the progress indicator. * @param maxValue The maximum value of the progress indicator. * @param hasCancelButton If <code>true</code>, the dialog will contain an * enabled Cancel button the user can click to terminate the * {@link CancelableThread} prematurely. */ ProgressListenerImpl( Frame parent, CancelableThread thread, String message, boolean indeterminate, int minValue, int maxValue, boolean hasCancelButton ) { init( parent, message, indeterminate, minValue, maxValue, hasCancelButton ); m_threadMonitor = new CancelableThreadMonitor( thread, this ); } /** * {@inheritDoc} */ public void progressCancelled() { // // Cancel the thread: this will subsequently cause // threadTerminated() below to be called that will dispose of the // dialog and unblock the show() in showAndWait() below. // m_threadMonitor.requestCancel(); } /** * Start the thread, show the dialog, and wait until the dialog is * dismissed either because the thread terminated naturally or the user * clicks Cancel. */ public void showAndWait() { m_threadMonitor.start(); m_jProgressDialog.setVisible(true); // blocks until dialog goes away } /** * {@inheritDoc} */ public void threadTerminated( CancelableThread t ) { if ( t != m_threadMonitor.getMonitoredThread() ) throw new IllegalStateException(); EventQueue.invokeLater( new Runnable() { public void run() { m_jProgressDialog.dispose(); } } ); } ////////// private //////////////////////////////////////////////////// /** * Initialize a <code>ProgressListenerImpl</code>. The reason this * method is needed is because this initialization must occur on the * event dispatch thread synchronously. The obvious thing to use is * {@link EventQueue#invokeAndWait(Runnable)}. However, that * (stupidly) dies if it's called from the event dispatch thread. * (Why can't it just call the <code>run()</code> method directly * instead?) So we have to write the code that Sun should have written * in the first place, i.e., check whether we're already running on the * event dispatch thread: if so, just initialize; if not, call * {@link EventQueue#invokeAndWait(Runnable)}. * * @param parent The parent window. * @param message The message to display in the progress dialog. * @param indeterminate If <code>true</code>, makes this indicator an * interminate progress indicator. * @param minValue The minimum value of the progress indicator. * @param maxValue The maximum value of the progress indicator. * @param hasCancelButton If <code>true</code>, the dialog will contain an * enabled Cancel button the user can click to terminate the * {@link CancelableThread} prematurely. */ private void init( final Frame parent, final String message, final boolean indeterminate, final int minValue, final int maxValue, final boolean hasCancelButton ) { if ( !EventQueue.isDispatchThread() ) { try { EventQueue.invokeAndWait( new Runnable() { public void run() { init( parent, message, indeterminate, minValue, maxValue, hasCancelButton ); } } ); } catch ( InterruptedException e ) { // ignore (?) } catch ( InvocationTargetException e ) { // Have you got a better idea? throw new RuntimeException( e.getCause() ); } return; } if ( indeterminate ) { m_progressBar.setIndeterminate( true ); } else { m_progressBar.setMaximum( maxValue ); m_progressBar.setMinimum( minValue ); } m_jProgressDialog = new JProgressDialog( parent, message, hasCancelButton, this ); } private JProgressDialog m_jProgressDialog; } /** * The progress bar that's displayed in the dialog. */ private JProgressBar m_progressBar; private CancelableThreadMonitor m_threadMonitor; /////////////////////////////////////////////////////////////////////////// private static final class TestThread extends ProgressThread { TestThread( ProgressIndicator indicator ) { super( indicator ); } public void run() { for ( int i = 0; i < 20; ++i ) { if ( isCanceled() ) break; System.out.println( i ); getProgressIndicator().incrementBy( 1 ); try { Thread.sleep( 250 ); } catch ( InterruptedException e ) { // ignore } } } } public static void main( String[] args ) { final JFrame frame = new JFrame( "TestProgress" ); frame.setBounds( 100, 100, 500, 300 ); frame.setVisible( true ); final DefaultProgressDialog dialog = new DefaultProgressDialog(); final ProgressThread t = new TestThread( dialog ); dialog.showProgress( frame, t, "Working...", 0, 20, true ); System.exit( 0 ); } } /* vim:set et sw=4 ts=4: */
/* * Copyright 2020-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.k8snode.impl; import com.google.common.collect.ImmutableSet; import org.onlab.packet.IpAddress; import org.onlab.util.KryoNamespace; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.k8snode.api.DefaultK8sHost; import org.onosproject.k8snode.api.K8sBridge; import org.onosproject.k8snode.api.K8sHost; import org.onosproject.k8snode.api.K8sHostEvent; import org.onosproject.k8snode.api.K8sHostState; import org.onosproject.k8snode.api.K8sHostStore; import org.onosproject.k8snode.api.K8sHostStoreDelegate; import org.onosproject.k8snode.api.K8sRouterBridge; import org.onosproject.k8snode.api.K8sTunnelBridge; import org.onosproject.store.AbstractStore; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.ConsistentMap; import org.onosproject.store.service.MapEvent; import org.onosproject.store.service.MapEventListener; import org.onosproject.store.service.Serializer; import org.onosproject.store.service.StorageService; import org.onosproject.store.service.Versioned; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.slf4j.Logger; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutorService; import static com.google.common.base.Preconditions.checkArgument; import static java.util.concurrent.Executors.newSingleThreadExecutor; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_HOST_COMPLETE; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_HOST_CREATED; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_HOST_INCOMPLETE; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_HOST_REMOVED; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_HOST_UPDATED; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_NODES_ADDED; import static org.onosproject.k8snode.api.K8sHostEvent.Type.K8S_NODES_REMOVED; import static org.onosproject.k8snode.api.K8sHostState.COMPLETE; import static org.onosproject.k8snode.api.K8sHostState.INCOMPLETE; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of kubernetes host store using consistent map. */ @Component(immediate = true, service = K8sHostStore.class) public class DistributedK8sHostStore extends AbstractStore<K8sHostEvent, K8sHostStoreDelegate> implements K8sHostStore { private final Logger log = getLogger(getClass()); private static final String ERR_NOT_FOUND = " does not exist"; private static final String ERR_DUPLICATE = " already exists"; private static final String APP_ID = "org.onosproject.k8snode"; private static final KryoNamespace SERIALIZER_K8S_HOST = KryoNamespace.newBuilder() .register(KryoNamespaces.API) .register(K8sHost.class) .register(DefaultK8sHost.class) .register(K8sHostState.class) .register(K8sBridge.class) .register(K8sTunnelBridge.class) .register(K8sRouterBridge.class) .register(Collection.class) .build(); @Reference(cardinality = ReferenceCardinality.MANDATORY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY) protected StorageService storageService; private final ExecutorService eventExecutor = newSingleThreadExecutor( groupedThreads(this.getClass().getSimpleName(), "event-handler", log)); private final MapEventListener<String, K8sHost> hostMapListener = new K8sHostMapListener(); private ConsistentMap<String, K8sHost> hostStore; @Activate protected void activate() { ApplicationId appId = coreService.registerApplication(APP_ID); hostStore = storageService.<String, K8sHost>consistentMapBuilder() .withSerializer(Serializer.using(SERIALIZER_K8S_HOST)) .withName("k8s-hoststore") .withApplicationId(appId) .build(); hostStore.addListener(hostMapListener); log.info("Started"); } @Deactivate protected void deactivate() { hostStore.removeListener(hostMapListener); eventExecutor.shutdown(); log.info("Stopped"); } @Override public void createHost(K8sHost host) { hostStore.compute(host.hostIp().toString(), (hostIp, existing) -> { final String error = host.hostIp().toString() + ERR_DUPLICATE; checkArgument(existing == null, error); return host; }); } @Override public void updateHost(K8sHost host) { hostStore.compute(host.hostIp().toString(), (hostIp, existing) -> { final String error = host.hostIp().toString() + ERR_NOT_FOUND; checkArgument(existing != null, error); return host; }); } @Override public K8sHost removeHost(IpAddress hostIp) { Versioned<K8sHost> host = hostStore.remove(hostIp.toString()); if (host == null) { final String error = hostIp.toString() + ERR_NOT_FOUND; throw new IllegalArgumentException(error); } return host.value(); } @Override public Set<K8sHost> hosts() { return ImmutableSet.copyOf(hostStore.asJavaMap().values()); } @Override public K8sHost host(IpAddress hostIp) { return hostStore.asJavaMap().get(hostIp.toString()); } private class K8sHostMapListener implements MapEventListener<String, K8sHost> { @Override public void event(MapEvent<String, K8sHost> event) { switch (event.type()) { case INSERT: log.debug("Kubernetes host created {}", event.newValue()); eventExecutor.execute(() -> notifyDelegate(new K8sHostEvent( K8S_HOST_CREATED, event.newValue().value() ))); break; case UPDATE: log.debug("Kubernetes host updated {}", event.newValue()); eventExecutor.execute(() -> { notifyDelegate(new K8sHostEvent( K8S_HOST_UPDATED, event.newValue().value() )); if (event.newValue().value().state() == COMPLETE) { notifyDelegate(new K8sHostEvent( K8S_HOST_COMPLETE, event.newValue().value() )); } else if (event.newValue().value().state() == INCOMPLETE) { notifyDelegate(new K8sHostEvent( K8S_HOST_INCOMPLETE, event.newValue().value() )); } K8sHost origHost = event.newValue().value(); Set<String> oldNodes = event.oldValue().value().nodeNames(); Set<String> newNodes = event.newValue().value().nodeNames(); Set<String> addedNodes = new HashSet<>(newNodes); Set<String> removedNodes = new HashSet<>(oldNodes); addedNodes.removeAll(oldNodes); removedNodes.removeAll(newNodes); if (addedNodes.size() > 0) { K8sHost addedHost = DefaultK8sHost.builder() .hostIp(origHost.hostIp()) .state(origHost.state()) .nodeNames(addedNodes) .build(); notifyDelegate(new K8sHostEvent(K8S_NODES_ADDED, addedHost)); } if (removedNodes.size() > 0) { K8sHost removedHost = DefaultK8sHost.builder() .hostIp(origHost.hostIp()) .state(origHost.state()) .nodeNames(removedNodes) .build(); notifyDelegate(new K8sHostEvent(K8S_NODES_REMOVED, removedHost)); } }); break; case REMOVE: log.debug("Kubernetes host removed {}", event.oldValue()); eventExecutor.execute(() -> notifyDelegate(new K8sHostEvent( K8S_HOST_REMOVED, event.oldValue().value() ))); break; default: // do nothing break; } } } }
/* * Copyright 2018 Kantega AS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kantega.reststop.maven; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.eclipse.aether.RepositorySystem; import org.eclipse.aether.RepositorySystemSession; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.collection.CollectRequest; import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.graph.DependencyFilter; import org.eclipse.aether.repository.RemoteRepository; import org.eclipse.aether.resolution.ArtifactResult; import org.eclipse.aether.resolution.DependencyRequest; import org.eclipse.aether.resolution.DependencyResolutionException; import org.eclipse.aether.resolution.DependencyResult; import org.eclipse.aether.util.artifact.JavaScopes; import org.eclipse.aether.util.filter.DependencyFilterUtils; import org.eclipse.jetty.maven.plugin.JettyWebAppContext; import org.eclipse.jetty.server.Server; import org.kantega.reststop.classloaderutils.CircularDependencyException; import org.kantega.reststop.classloaderutils.PluginInfo; import org.w3c.dom.Document; import org.w3c.dom.Element; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.util.*; import java.util.jar.JarFile; import java.util.zip.ZipEntry; import static java.util.Arrays.asList; /** * */ public abstract class AbstractReststopMojo extends AbstractMojo { @Component protected RepositorySystem repoSystem; @Parameter(defaultValue ="${repositorySystemSession}" ,readonly = true) protected RepositorySystemSession repoSession; @Parameter(defaultValue = "${project.remoteProjectRepositories}") protected List<RemoteRepository> remoteRepos; @Parameter (defaultValue = "org.kantega.reststop:reststop-webapp:war:${plugin.version}") protected String warCoords; @Parameter (defaultValue = "org.kantega.reststop:reststop-bootstrap:jar:${plugin.version}") protected String bootstrapCoords; @Parameter(defaultValue = "${project.build.directory}/${project.build.finalName}.${project.packaging}") private File pluginJar; @Parameter(defaultValue = "${project}") protected MavenProject mavenProject; @Parameter protected List<Plugin> basePlugins; @Parameter protected List<Plugin> plugins; @Parameter (defaultValue = "${plugin.version}") protected String pluginVersion; @Parameter protected List<org.apache.maven.model.Dependency> containerDependencies; protected void customizeContext(JettyWebAppContext context) { } protected void afterServerStart(Server server, int port) throws MojoFailureException { } protected Document createPluginXmlDocument(boolean prod) throws MojoFailureException, MojoExecutionException { List<PluginInfo> pluginInfos = getPluginInfos(); validateCircularDependencies(pluginInfos); return buildPluginsDocument(prod, pluginInfos); } private Document buildPluginsDocument(boolean prod, List<PluginInfo> pluginInfos) throws MojoExecutionException { try { Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument(); Element pluginsElem = doc.createElement("plugins"); doc.appendChild(pluginsElem); for (PluginInfo plugin : pluginInfos) { Element pluginElem = doc.createElement("plugin"); pluginsElem.appendChild(pluginElem); for (PluginInfo parent : plugin.getParents(pluginInfos)) { Element dependsElem = doc.createElement("depends-on"); pluginElem.appendChild(dependsElem); dependsElem.setAttribute("groupId", parent.getGroupId()); dependsElem.setAttribute("artifactId", parent.getArtifactId()); dependsElem.setAttribute("version", parent.getVersion()); } if(!prod) { if(!plugin.getConfig().isEmpty()) { Element configElem = doc.createElement("config"); for (String name : plugin.getConfig().stringPropertyNames()) { Element propElem = doc.createElement("prop"); propElem.setAttribute("name", name); propElem.setAttribute("value", plugin.getConfig().getProperty(name)); configElem.appendChild(propElem); } pluginElem.appendChild(configElem); } } pluginElem.setAttribute("groupId", plugin.getGroupId()); pluginElem.setAttribute("artifactId", plugin.getArtifactId()); pluginElem.setAttribute("version", plugin.getVersion()); if(!prod) { if(plugin.getSourceDirectory() != null) { pluginElem.setAttribute("sourceDirectory", plugin.getSourceDirectory().getAbsolutePath()); } pluginElem.setAttribute("pluginFile", plugin.getFile().getAbsolutePath()); } List<String> scopes = prod ? Collections.singletonList(JavaScopes.RUNTIME) : asList(JavaScopes.TEST, JavaScopes.RUNTIME, JavaScopes.COMPILE); for(String scope : scopes) { Element scopeElem = doc.createElement(scope); pluginElem.appendChild(scopeElem); for (org.kantega.reststop.classloaderutils.Artifact artifact : plugin.getClassPath(scope)) { Element artifactElement = doc.createElement("artifact"); artifactElement.setAttribute("groupId", artifact.getGroupId()); artifactElement.setAttribute("artifactId", artifact.getArtifactId()); artifactElement.setAttribute("version", artifact.getVersion()); if(!prod) { artifactElement.setAttribute("file", artifact.getFile().getAbsolutePath()); } scopeElem.appendChild(artifactElement); } } } return doc; } catch (ParserConfigurationException e) { throw new MojoExecutionException(e.getMessage(), e); } } private List<PluginInfo> getPluginInfos() throws MojoFailureException, MojoExecutionException { List<PluginInfo> pluginInfos = new Resolver(repoSystem, repoSession, remoteRepos, getLog()).resolve(getPlugins()); validateTransitivePluginsMissing(pluginInfos); validateNoPluginArtifactsOnRuntimeClasspath(pluginInfos); return pluginInfos; } private void validateCircularDependencies(List<PluginInfo> pluginInfos) throws MojoFailureException { try { PluginInfo.resolveClassloaderOrder(pluginInfos); } catch (CircularDependencyException e) { throw new MojoFailureException(e.getMessage(), e); } } private void validateNoPluginArtifactsOnRuntimeClasspath(List<PluginInfo> pluginInfos) throws MojoExecutionException, MojoFailureException { for (PluginInfo pluginInfo : pluginInfos) { Map<String, org.kantega.reststop.classloaderutils.Artifact> shouldBeProvided = new TreeMap<>(); for (org.kantega.reststop.classloaderutils.Artifact dep : pluginInfo.getClassPath("runtime")) { try { JarFile jar = new JarFile(dep.getFile()); ZipEntry entry = jar.getEntry("META-INF/services/ReststopPlugin/"); boolean isPlugin = entry != null; jar.close(); if(isPlugin) { shouldBeProvided.put(dep.getGroupIdAndArtifactId(), dep); getLog().error("Plugin " + pluginInfo.getPluginId() +" depends on plugin artifact " + dep.getPluginId() +" which must be in <scope>provided</scope> and declared as a <plugin>!"); String decl = String.format("\t<plugin>\n\t\t<groupId>%s</groupId>\n\t\t<artifactId>%s</artifactId>\n\t\t<version>%s</version>\n\t</plugin>", dep.getGroupId(), dep.getArtifactId(), dep.getVersion()); getLog().error("Please add the following to your <plugins> section:\n" + decl); } } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } } if(!shouldBeProvided.isEmpty()) { throw new MojoFailureException("Plugin " +pluginInfo.getPluginId() +" has a Maven <dependency> on " + "one or more plugin artifacts which should be made <scope>provided</scope> and directly declared as a <plugin>: " + shouldBeProvided.values()); } } } private void validateTransitivePluginsMissing(List<PluginInfo> pluginInfos) throws MojoExecutionException, MojoFailureException { for (PluginInfo pluginInfo : pluginInfos) { Map<String, org.kantega.reststop.classloaderutils.Artifact> missing = new TreeMap<>(); for (org.kantega.reststop.classloaderutils.Artifact dep : pluginInfo.getClassPath("compile")) { try { JarFile jar = new JarFile(dep.getFile()); ZipEntry entry = jar.getEntry("META-INF/services/ReststopPlugin/"); boolean isPlugin = entry != null; jar.close(); if(isPlugin && !isDeclaredPlugin(dep, pluginInfos)) { missing.put(dep.getGroupIdAndArtifactId(), dep); File pomFile = new File(mavenProject.getBasedir(), "pom.xml"); getLog().error("Plugin " + pluginInfo.getPluginId() +" depends on the plugin " + dep.getPluginId() +" which is not declared as a <plugin> in " + pomFile); String decl = String.format("\t<plugin>\n\t\t<groupId>%s</groupId>\n\t\t<artifactId>%s</artifactId>\n\t\t<version>%s</version>\n\t</plugin>", dep.getGroupId(), dep.getArtifactId(), dep.getVersion()); getLog().error("Please add the following to maven-reststop-plugin's <plugins> section in " +pomFile + ":\n" + decl); } } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } } if(!missing.isEmpty()) { throw new MojoFailureException("Plugin " +pluginInfo.getPluginId() +" has a Maven <dependency> on " + "one or more plugin artifacts which should be directly declared as a <plugin>: " + missing.values()); } } } private boolean isDeclaredPlugin(org.kantega.reststop.classloaderutils.Artifact dep, List<PluginInfo> pluginInfos) { for(PluginInfo declared : pluginInfos) { if(declared.getGroupIdAndArtifactId().equals(dep.getGroupIdAndArtifactId())) { return true; } } return false; } protected List<Plugin> getPlugins() { List<Plugin> plugins = new ArrayList<>(); if(this.plugins != null) { plugins.addAll(this.plugins); } if(this.basePlugins != null) { plugins.addAll(this.basePlugins); } return plugins; } protected File resolveArtifactFile(String coords) throws MojoFailureException, MojoExecutionException { return resolveArtifact(coords).getFile(); } protected Artifact resolveArtifact(String coords) throws MojoFailureException, MojoExecutionException { return new Resolver(repoSystem, repoSession, remoteRepos, getLog()).resolveArtifact(coords); } protected File getSourceDirectory(Plugin plugin) { String path = repoSession.getLocalRepositoryManager().getPathForLocalArtifact(new DefaultArtifact(plugin.getGroupId(), plugin.getArtifactId(), "sourceDir", plugin.getVersion())); File file = new File(repoSession.getLocalRepository().getBasedir(), path); try { return file.exists() ? new File(Files.readAllLines(file.toPath(), Charset.forName("utf-8")).get(0)) : null; } catch (IOException e) { throw new RuntimeException(e.getMessage(), e); } } protected void addDevelopmentPlugins(List<Plugin> plugins) { { Plugin devConsolePlugin = new Plugin("org.kantega.reststop", "reststop-development-console", pluginVersion); plugins.add(devConsolePlugin); } { Plugin developmentPlugin = new Plugin("org.kantega.reststop", "reststop-development-plugin", pluginVersion); plugins.add(developmentPlugin); } for (Plugin plugin : plugins) { plugin.setSourceDirectory(getSourceDirectory(plugin)); } } protected List<Artifact> resolveContainerArtifacts(List<org.apache.maven.model.Dependency> containerDependencies) throws MojoFailureException, MojoExecutionException { List<Artifact> containerArtifacts = new ArrayList<>(); List<org.eclipse.aether.graph.Dependency> containerDeps = new ArrayList<>(); for (org.apache.maven.model.Dependency dependency : containerDependencies) { Artifact dependencyArtifact = resolveArtifact( String.format("%s:%s:%s", dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion())); containerDeps.add(new Dependency(dependencyArtifact, JavaScopes.RUNTIME)); } try { CollectRequest collectRequest = new CollectRequest(containerDeps, null, remoteRepos); final DependencyFilter filter = DependencyFilterUtils.andFilter( DependencyFilterUtils.classpathFilter(JavaScopes.RUNTIME), (dependencyNode, list) -> dependencyNode.getDependency() == null || !dependencyNode.getDependency().isOptional()); DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, filter); DependencyResult dependencyResult = repoSystem.resolveDependencies(repoSession, dependencyRequest); if (!dependencyResult.getCollectExceptions().isEmpty()) { throw new MojoFailureException("Failed resolving plugin dependencies", dependencyResult.getCollectExceptions().get(0)); } for (ArtifactResult result : dependencyResult.getArtifactResults()) { Artifact artifact = result.getArtifact(); containerArtifacts.add(artifact); } return containerArtifacts; } catch (DependencyResolutionException e) { throw new MojoFailureException("Failed resolving plugin dependencies", e); } } }
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * (C) Copyright 2009-2020, Arnaud Roques * * Project Info: https://plantuml.com * * If you like this project or if you find it useful, you can support us at: * * https://plantuml.com/patreon (only 1$ per month!) * https://plantuml.com/paypal * * This file is part of PlantUML. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Original Author: Arnaud Roques * Contribution : Hisashi Miyashita */ package net.sourceforge.plantuml.cucadiagram; import java.util.Objects; import net.sourceforge.plantuml.ugraphic.UStroke; public class LinkType { private final LinkHat hat1; private final LinkDecor decor1; private final LinkStyle linkStyle; private final LinkDecor decor2; private final LinkHat hat2; private final LinkMiddleDecor middleDecor; public boolean isDoubleDecorated() { return decor1 != LinkDecor.NONE && decor2 != LinkDecor.NONE; } public boolean looksLikeRevertedForSvg() { if (this.decor1 == LinkDecor.NONE && this.decor2 != LinkDecor.NONE) { return true; } return false; } public boolean looksLikeNoDecorAtAllSvg() { if (this.decor1 == LinkDecor.NONE && this.decor2 == LinkDecor.NONE) { return true; } if (this.decor1 != LinkDecor.NONE && this.decor2 != LinkDecor.NONE) { return true; } return false; } public LinkType(LinkDecor decor1, LinkDecor decor2) { this(LinkHat.NONE, decor1, decor2, LinkHat.NONE); } public LinkType(LinkHat hat1, LinkDecor decor1, LinkDecor decor2, LinkHat hat2) { this(hat1, decor1, LinkStyle.NORMAL(), LinkMiddleDecor.NONE, decor2, hat2); } public LinkType withoutDecors1() { return new LinkType(hat1, LinkDecor.NONE, linkStyle, middleDecor, decor2, hat2); } public LinkType withoutDecors2() { return new LinkType(hat1, decor1, linkStyle, middleDecor, LinkDecor.NONE, hat2); } // public boolean contains(LinkDecor decors) { // return decor1 == decors || decor2 == decors; // } @Override public String toString() { return decor1 + "-" + linkStyle + "-" + decor2; } @Override public int hashCode() { return toString().hashCode(); } @Override public boolean equals(Object obj) { final LinkType other = (LinkType) obj; return this.decor1 == other.decor1 && this.decor2 == other.decor2 && this.linkStyle == other.linkStyle; } private LinkType(LinkHat hat1, LinkDecor decor1, LinkStyle style, LinkMiddleDecor middleDecor, LinkDecor decor2, LinkHat hat2) { this.decor1 = decor1; this.linkStyle = Objects.requireNonNull(style); this.decor2 = decor2; this.middleDecor = middleDecor; this.hat1 = hat1; this.hat2 = hat2; } public boolean isInvisible() { return linkStyle.isInvisible(); } public LinkType goDashed() { return new LinkType(hat1, decor1, LinkStyle.DASHED(), middleDecor, decor2, hat2); } public LinkType goDotted() { return new LinkType(hat1, decor1, LinkStyle.DOTTED(), middleDecor, decor2, hat2); } public LinkType goThickness(double thickness) { return new LinkType(hat1, decor1, linkStyle.goThickness(thickness), middleDecor, decor2, hat2); } public LinkType goBold() { return new LinkType(hat1, decor1, LinkStyle.BOLD(), middleDecor, decor2, hat2); } public LinkType getInversed() { return new LinkType(hat2, decor2, linkStyle, middleDecor.getInversed(), decor1, hat1); } public LinkType withMiddleCircle() { return new LinkType(hat1, decor1, linkStyle, LinkMiddleDecor.CIRCLE, decor2, hat2); } public LinkType withMiddleCircleCircled() { return new LinkType(hat1, decor1, linkStyle, LinkMiddleDecor.CIRCLE_CIRCLED, decor2, hat2); } public LinkType withMiddleCircleCircled1() { return new LinkType(hat1, decor1, linkStyle, LinkMiddleDecor.CIRCLE_CIRCLED1, decor2, hat2); } public LinkType withMiddleCircleCircled2() { return new LinkType(hat1, decor1, linkStyle, LinkMiddleDecor.CIRCLE_CIRCLED2, decor2, hat2); } public LinkType getInvisible() { return new LinkType(hat1, decor1, LinkStyle.INVISIBLE(), middleDecor, decor2, hat2); } public String getSpecificDecorationSvek() { final StringBuilder sb = new StringBuilder(); final boolean isEmpty1 = decor1 == LinkDecor.NONE && hat1 == LinkHat.NONE; final boolean isEmpty2 = decor2 == LinkDecor.NONE && hat2 == LinkHat.NONE; if (isEmpty1 && isEmpty2) { sb.append("arrowtail=none"); sb.append(",arrowhead=none"); } else if (isEmpty1 == false && isEmpty2 == false) { sb.append("dir=both,"); sb.append("arrowtail=empty"); sb.append(",arrowhead=empty"); } else if (isEmpty1 && isEmpty2 == false) { sb.append("arrowtail=empty"); sb.append(",arrowhead=none"); sb.append(",dir=back"); // } else if (isEmpty1 == false && isEmpty2) { // sb.append("arrowtail=none"); // sb.append(",arrowhead=empty"); } final double arrowsize = Math.max(decor1.getArrowSize(), decor2.getArrowSize()); if (arrowsize > 0) { if (sb.length() > 0) { sb.append(","); } sb.append("arrowsize=" + arrowsize); } return sb.toString(); } public final LinkDecor getDecor1() { return decor1; } public final LinkStyle getStyle() { return linkStyle; } public final LinkDecor getDecor2() { return decor2; } private boolean isExtendsOrAggregationOrCompositionOrPlus() { return isExtends() || isAggregationOrComposition() || isPlus() || isOf(LinkDecor.DEFINEDBY) || isOf(LinkDecor.REDEFINES); } private boolean isOf(LinkDecor ld) { return decor1 == ld || decor2 == ld; } private boolean isExtends() { return decor1 == LinkDecor.EXTENDS || decor2 == LinkDecor.EXTENDS; } private boolean isPlus() { return decor1 == LinkDecor.PLUS || decor2 == LinkDecor.PLUS; } private boolean isAggregationOrComposition() { return decor1 == LinkDecor.AGREGATION || decor2 == LinkDecor.AGREGATION || decor1 == LinkDecor.COMPOSITION || decor2 == LinkDecor.COMPOSITION; } public LinkType getPart1() { return new LinkType(hat1, decor1, linkStyle, middleDecor, LinkDecor.NONE, LinkHat.NONE); } public LinkType getPart2() { return new LinkType(LinkHat.NONE, LinkDecor.NONE, linkStyle, middleDecor, decor2, hat2); } public UStroke getStroke3(UStroke defaultThickness) { if (linkStyle.isThicknessOverrided()) { return linkStyle.getStroke3(); } if (defaultThickness == null) { return linkStyle.getStroke3(); } return linkStyle.goThickness(defaultThickness.getThickness()).getStroke3(); } public LinkMiddleDecor getMiddleDecor() { return middleDecor; } public LinkHat getHat1() { return hat1; } public LinkHat getHat2() { return hat2; } public LinkType withLollipopInterfaceEye2() { return new LinkType(hat1, LinkDecor.NONE, linkStyle, middleDecor, decor2, hat2); } public LinkType withLollipopInterfaceEye1() { return new LinkType(hat1, decor1, linkStyle, middleDecor, LinkDecor.NONE, hat2); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal.locks; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.CancelCriterion; import org.apache.geode.DataSerializer; import org.apache.geode.annotations.Immutable; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.MessageWithReply; import org.apache.geode.distributed.internal.PooledDistributionMessage; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.internal.serialization.DeserializationContext; import org.apache.geode.internal.serialization.SerializationContext; import org.apache.geode.internal.util.concurrent.StoppableCondition; import org.apache.geode.internal.util.concurrent.StoppableReentrantLock; import org.apache.geode.logging.internal.log4j.api.LogService; /** * A processor for sending a message to the elder asking it for the grantor of a dlock service. * * @since GemFire 4.0 */ public class GrantorRequestProcessor extends ReplyProcessor21 { private static final Logger logger = LogService.getLogger(); private GrantorInfo result; ////////// Public static entry point ///////// /** * The number of milliseconds to sleep for elder change if current elder is departing (and already * sent shutdown msg) but is still in the View. */ public static final long ELDER_CHANGE_SLEEP = Long.getLong("GrantorRequestProcessor.ELDER_CHANGE_SLEEP", 100); private static final byte GET_OP = 0; private static final byte BECOME_OP = 1; private static final byte CLEAR_OP = 2; private static final byte PEEK_OP = 3; private static final byte CLEAR_WITH_LOCKS_OP = 4; @Immutable private static final GrantorInfo CLEAR_COMPLETE = new GrantorInfo(null, 0, 0, false); /** * Encapsulates the context necessary for processing a given grantor request for a given * InternalDistributedSystem * */ public static class GrantorRequestContext { /** * Locks access to elders */ final StoppableReentrantLock elderLock; /** * Subservient condition to {@link #elderLock} */ final StoppableCondition elderLockCondition; /** * Our notion of the current elder * * guarded.By {@link #elderLock} */ InternalDistributedMember currentElder = null; /** * Count of the elder calls in-flight * * guarded.By {@link #elderLock} */ int elderCallsInProgress = 0; /** * If true, we're cooling our heels waiting for the elders to pass the baton * * guarded.By {@link #elderLock} */ boolean waitingToChangeElder = false; public GrantorRequestContext(CancelCriterion cancelCriterion) { elderLock = new StoppableReentrantLock(cancelCriterion); elderLockCondition = elderLock.newCondition(); } } private static boolean basicStartElderCall(InternalDistributedSystem sys, ElderState es, InternalDistributedMember elder, DLockService dls) { GrantorRequestContext grc = sys.getGrantorRequestContext(); grc.elderLock.lock(); try { if (es != null) { // elder is in our vm if (grc.elderCallsInProgress > 0) { // wait until all the calls in progress to an old rmt elder complete. // We know it is some other elder because we don't count the // calls in progress to a local elder. elderSyncWait(sys, elder, dls); } } else { // elder is in remote vm if (grc.elderCallsInProgress > 0) { if (elder == grc.currentElder) { grc.elderCallsInProgress += 1; } else if (elder != null && elder.equals(grc.currentElder)) { grc.elderCallsInProgress += 1; } else { elderSyncWait(sys, elder, dls); return false; } } else { grc.currentElder = elder; grc.elderCallsInProgress = 1; } } return true; } finally { grc.elderLock.unlock(); } } /** * Waits until elder recovery can proceed safely. Currently this is done by waiting until any in * progress calls to an old elder are complete * * @param elderId the member id of the new elder; null if new elder is local */ static void readyForElderRecovery(InternalDistributedSystem sys, InternalDistributedMember elderId, DLockService dls) { GrantorRequestContext grc = sys.getGrantorRequestContext(); if (elderId != null) { grc.elderLock.lock(); try { if (grc.elderCallsInProgress > 0) { // make sure they are not going to the new elder if (elderId != grc.currentElder && !elderId.equals(grc.currentElder)) { elderSyncWait(sys, elderId, dls); } } } finally { grc.elderLock.unlock(); } } else { grc.elderLock.lock(); try { if (grc.elderCallsInProgress > 0) { // wait until all the calls in progress to an old rmt elder complete. // We know it is some other elder because we don't count the // calls in progress to a local elder. elderSyncWait(sys, /* elderId */ null, dls); } } finally { grc.elderLock.unlock(); } } } private static void elderSyncWait(InternalDistributedSystem sys, InternalDistributedMember newElder, DLockService dls) { GrantorRequestContext grc = sys.getGrantorRequestContext(); grc.waitingToChangeElder = true; final String message = String.format( "GrantorRequestProcessor.elderSyncWait: The current Elder %s is waiting for the new Elder %s.", grc.currentElder, newElder); while (grc.waitingToChangeElder) { logger.info(LogMarker.DLS_MARKER, message); boolean interrupted = Thread.interrupted(); try { grc.elderLockCondition.await(sys.getConfig().getMemberTimeout()); } catch (InterruptedException e) { interrupted = true; sys.getCancelCriterion().checkCancelInProgress(e); } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } } /** * Sets currentElder to the memberId of the current elder if elder is remote; null if elder is in * our vm. */ private static ElderState startElderCall(InternalDistributedSystem sys, DLockService dls) throws InterruptedException { InternalDistributedMember elder; ElderState es = null; final DistributionManager dm = sys.getDistributionManager(); boolean elderCallStarted = false; while (!elderCallStarted) { dm.throwIfDistributionStopped(); elder = dm.getElderId(); // call this before getElderState Assert.assertTrue(elder != null, "starting an elder call with no valid elder"); if (dm.getId().equals(elder)) { try { es = dm.getElderState(false); } catch (IllegalStateException e) { // loop back around to reacquire Collaboration and try elder lock again continue; } } else { es = null; } elderCallStarted = basicStartElderCall(sys, es, elder, dls); } return es; } private static void finishElderCall(GrantorRequestContext grc, ElderState es) { if (es == null) { grc.elderLock.lock(); try { Assert.assertTrue(grc.elderCallsInProgress > 0); grc.elderCallsInProgress -= 1; if (grc.elderCallsInProgress == 0) { grc.currentElder = null; if (grc.waitingToChangeElder) { grc.waitingToChangeElder = false; grc.elderLockCondition.signalAll(); } } } finally { grc.elderLock.unlock(); } } } /** * Asks the elder who the grantor is for the specified service. If no grantor exists then makes us * the grantor. * * @param service the service we want to know the grantor of. * @param sys the distributed system * @return information describing the current grantor of this service and if it needs recovery. */ public static GrantorInfo getGrantor(DLockService service, int dlsSerialNumber, InternalDistributedSystem sys) { return basicOp(-1, service, dlsSerialNumber, sys, null, GET_OP); } /** * Asks the elder who the grantor is for the specified service. * * @param service the service we want to know the grantor of. * @param sys th distributed system * @return information describing the current grantor of this service and if recovery is needed */ static GrantorInfo peekGrantor(DLockService service, InternalDistributedSystem sys) { return basicOp(-1, service, -1, sys, null, PEEK_OP); } static GrantorInfo peekGrantor(String serviceName, InternalDistributedSystem sys) { return basicOp(-1, serviceName, null, -1, sys, null, PEEK_OP); } /** * Tells the elder we want to become the grantor * * @param service the service we want to be the grantor of. * @param oldTurk if non-null then only become grantor if it is currently oldTurk. * @param sys the distributed system * @return information describing the previous grantor, if any, and if we need to do a grantor * recovery */ static GrantorInfo becomeGrantor(DLockService service, int dlsSerialNumber, InternalDistributedMember oldTurk, InternalDistributedSystem sys) { return basicOp(-1, service, dlsSerialNumber, sys, oldTurk, BECOME_OP); } /** * Tells the elder we are doing a clean destroy of our grantor * * @param service the service we are no longer the grantor of. * @param sys the distributed system */ static void clearGrantor(long grantorVersion, DLockService service, int dlsSerialNumber, InternalDistributedSystem sys, boolean withLocks) { basicOp(grantorVersion, service, dlsSerialNumber, sys, null, withLocks ? CLEAR_WITH_LOCKS_OP : CLEAR_OP); } /** * @param opCode encodes what operation we are doing */ private static GrantorInfo basicOp(long grantorVersion, DLockService service, int dlsSerialNumber, InternalDistributedSystem sys, InternalDistributedMember oldTurk, byte opCode) { return basicOp(grantorVersion, service.getName(), service, dlsSerialNumber, sys, oldTurk, opCode); } private static GrantorInfo basicOp(long grantorVersion, String serviceName, DLockService service, int dlsSerialNumber, InternalDistributedSystem system, InternalDistributedMember oldTurk, byte opCode) { GrantorInfo result = null; DistributionManager dm = system.getDistributionManager(); GrantorRequestContext grc = system.getGrantorRequestContext(); boolean tryNewElder; boolean interrupted = false; try { do { tryNewElder = false; ElderState es = null; try { es = startElderCall(system, service); } catch (InterruptedException e) { interrupted = true; } dm.throwIfDistributionStopped(); try { if (es != null) { // local elder so do it without messaging switch (opCode) { case GET_OP: result = es.getGrantor(serviceName, dm.getId(), dlsSerialNumber); break; case PEEK_OP: result = es.peekGrantor(serviceName); break; case BECOME_OP: result = es.becomeGrantor(serviceName, dm.getId(), dlsSerialNumber, oldTurk); break; case CLEAR_OP: es.clearGrantor(grantorVersion, serviceName, dlsSerialNumber, dm.getId(), false); result = CLEAR_COMPLETE; break; case CLEAR_WITH_LOCKS_OP: es.clearGrantor(grantorVersion, serviceName, dlsSerialNumber, dm.getId(), true); result = CLEAR_COMPLETE; break; default: throw new IllegalStateException("Unknown opCode " + opCode); } } else { // remote elder so send message GrantorRequestProcessor processor = new GrantorRequestProcessor(system, grc.currentElder); boolean sent = GrantorRequestMessage.send(grantorVersion, dlsSerialNumber, serviceName, grc.currentElder, dm, processor, oldTurk, opCode); if (!sent) { if (logger.isTraceEnabled(LogMarker.DLS_VERBOSE)) { logger.trace(LogMarker.DLS_VERBOSE, "Unable to communicate with elder {}", grc.currentElder); } } try { processor.waitForRepliesUninterruptibly(); } catch (ReplyException e) { e.handleCause(); } if (processor.result != null) { result = processor.result; } else { // no result and no longer waiting... // sleep if targeted elder still in view but not activeMembers if (!dm.getDistributionManagerIds().contains(grc.currentElder) && dm.getViewMembers().contains(grc.currentElder)) { // if true then elder no longer in DM activeMembers // but elder is still in the View // elder probably sent shutdown msg but may not yet left View try { Thread.sleep(ELDER_CHANGE_SLEEP); } catch (InterruptedException e) { interrupted = true; dm.getCancelCriterion().checkCancelInProgress(e); } } // targetted elder either died or already sent us a shutdown msg if (opCode != CLEAR_OP && opCode != CLEAR_WITH_LOCKS_OP) { // Note we do not try a new elder if doing a clear because // the new elder will not have anything for us to clear. // It will have done an ElderInit. tryNewElder = true; } } } } finally { finishElderCall(grc, es); } } while (tryNewElder); } finally { if (interrupted) { Thread.currentThread().interrupt(); } } return result; } //////////// Instance methods ////////////// /** * Creates a new instance of GrantorRequestProcessor */ private GrantorRequestProcessor(InternalDistributedSystem system, InternalDistributedMember elder) { super(system, elder); } @Override public void process(DistributionMessage msg) { if (msg instanceof GrantorInfoReplyMessage) { GrantorInfoReplyMessage giMsg = (GrantorInfoReplyMessage) msg; result = giMsg.getGrantorInfo(); } else if (msg instanceof ReplyMessage) { if (((ReplyMessage) msg).getException() == null) { // must be a reply sent back from a CLEAR_OP result = CLEAR_COMPLETE; } } else { Assert.assertTrue(false, "Expected instance of GrantorInfoReplyMessage or CReplyMessage but got " + msg.getClass()); } super.process(msg); } /////////////// Inner message classes ////////////////// public static class GrantorRequestMessage extends PooledDistributionMessage implements MessageWithReply { private long grantorVersion; private int dlsSerialNumber; private String serviceName; private int processorId; private byte opCode; private InternalDistributedMember oldTurk; /** * @return true if the message was sent */ protected static boolean send(long grantorVersion, int dlsSerialNumber, String serviceName, InternalDistributedMember elder, DistributionManager dm, ReplyProcessor21 proc, InternalDistributedMember oldTurk, byte opCode) { GrantorRequestMessage msg = new GrantorRequestMessage(); msg.grantorVersion = grantorVersion; msg.dlsSerialNumber = dlsSerialNumber; msg.serviceName = serviceName; msg.oldTurk = oldTurk; msg.opCode = opCode; msg.processorId = proc.getProcessorId(); msg.setRecipient(elder); if (logger.isTraceEnabled(LogMarker.DLS_VERBOSE)) { logger.trace(LogMarker.DLS_VERBOSE, "GrantorRequestMessage sending {} to {}", msg, elder); } Set failures = dm.putOutgoing(msg); return failures == null || failures.size() == 0; } @Override public int getProcessorId() { return processorId; } private void replyGrantorInfo(DistributionManager dm, GrantorInfo gi) { GrantorInfoReplyMessage.send(this, dm, gi); } private void replyClear(DistributionManager dm) { ReplyMessage.send(getSender(), getProcessorId(), null, dm); } @Override protected void process(ClusterDistributionManager dm) { basicProcess(dm); } protected void basicProcess(final DistributionManager dm) { // we should be in the elder final ElderState es; try { es = dm.getElderState(true); } catch (InterruptedException e) { logger.info("Interrupted while processing {}", this); return; } switch (opCode) { case GET_OP: replyGrantorInfo(dm, es.getGrantor(serviceName, getSender(), dlsSerialNumber)); break; case PEEK_OP: replyGrantorInfo(dm, es.peekGrantor(serviceName)); break; case BECOME_OP: replyGrantorInfo(dm, es.becomeGrantor(serviceName, getSender(), dlsSerialNumber, oldTurk)); break; case CLEAR_OP: es.clearGrantor(grantorVersion, serviceName, dlsSerialNumber, getSender(), false); replyClear(dm); break; case CLEAR_WITH_LOCKS_OP: es.clearGrantor(grantorVersion, serviceName, dlsSerialNumber, getSender(), true); replyClear(dm); break; default: throw new IllegalStateException("Unknown opCode " + opCode); } } @Override public int getDSFID() { return GRANTOR_REQUEST_MESSAGE; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { super.fromData(in, context); grantorVersion = in.readLong(); dlsSerialNumber = in.readInt(); serviceName = DataSerializer.readString(in); processorId = in.readInt(); opCode = in.readByte(); if (opCode == BECOME_OP) { oldTurk = DataSerializer.readObject(in); } } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { super.toData(out, context); out.writeLong(grantorVersion); out.writeInt(dlsSerialNumber); DataSerializer.writeString(serviceName, out); out.writeInt(processorId); out.writeByte(opCode); if (opCode == BECOME_OP) { DataSerializer.writeObject(oldTurk, out); } } public static String opCodeToString(int opCode) { String string = null; switch (opCode) { case GET_OP: string = "GET_OP"; break; case BECOME_OP: string = "BECOME_OP"; break; case CLEAR_OP: string = "CLEAR_OP"; break; case PEEK_OP: string = "PEEK_OP"; break; case CLEAR_WITH_LOCKS_OP: string = "CLEAR_WITH_LOCKS_OP"; break; default: string = "UNKNOWN:" + opCode; break; } return string; } @Override public String toString() { String opCodeString = opCodeToString(opCode); return "GrantorRequestMessage (service='" + serviceName + "'; grantorVersion=" + grantorVersion + "'; dlsSerialNumber=" + dlsSerialNumber + "'; processorId=" + processorId + "'; opCode=" + opCodeString + "'; oldT=" + oldTurk + ")"; } } public static class GrantorInfoReplyMessage extends ReplyMessage { private InternalDistributedMember grantor; private long elderVersionId; private int grantorSerialNumber; private boolean needsRecovery; public static void send(MessageWithReply reqMsg, DistributionManager dm, GrantorInfo gi) { GrantorInfoReplyMessage m = new GrantorInfoReplyMessage(); m.grantor = gi.getId(); m.needsRecovery = gi.needsRecovery(); m.elderVersionId = gi.getVersionId(); m.grantorSerialNumber = gi.getSerialNumber(); m.processorId = reqMsg.getProcessorId(); m.setRecipient(reqMsg.getSender()); dm.putOutgoing(m); } public GrantorInfo getGrantorInfo() { return new GrantorInfo(grantor, elderVersionId, grantorSerialNumber, needsRecovery); } @Override public int getDSFID() { return GRANTOR_INFO_REPLY_MESSAGE; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { super.fromData(in, context); grantor = DataSerializer.readObject(in); elderVersionId = in.readLong(); grantorSerialNumber = in.readInt(); needsRecovery = in.readBoolean(); } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { super.toData(out, context); DataSerializer.writeObject(grantor, out); out.writeLong(elderVersionId); out.writeInt(grantorSerialNumber); out.writeBoolean(needsRecovery); } @Override public String toString() { return "GrantorInfoReplyMessage" + "; sender=" + getSender() + "; processorId=" + super.processorId + "; grantor=" + grantor + "; elderVersionId=" + elderVersionId + "; grantorSerialNumber=" + grantorSerialNumber + "; needsRecovery=" + needsRecovery + ")"; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.keymap.impl; import com.intellij.openapi.actionSystem.KeyboardShortcut; import com.intellij.openapi.actionSystem.MouseShortcut; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.testFramework.LightPlatformTestCase; import javax.swing.*; import java.awt.event.InputEvent; import java.awt.event.MouseEvent; import static org.assertj.core.api.Assertions.assertThat; public class KeymapTest extends LightPlatformTestCase { private static final String ACTION_1 = "ACTION_1"; private static final String ACTION_2 = "ACTION_2"; private static final String ACTION_NON_EXISTENT = "NON_EXISTENT"; KeyboardShortcut shortcut1 = new KeyboardShortcut(KeyStroke.getKeyStroke('1'), null); KeyboardShortcut shortcut2 = new KeyboardShortcut(KeyStroke.getKeyStroke('2'), null); KeyboardShortcut shortcutA = new KeyboardShortcut(KeyStroke.getKeyStroke('a'), null); KeyboardShortcut shortcutB = new KeyboardShortcut(KeyStroke.getKeyStroke('b'), null); private KeymapImpl myParent; private KeymapImpl myChild; @Override public void setUp() throws Exception { super.setUp(); myParent = new KeymapImpl(); myParent.setName("Parent"); myParent.setCanModify(false); myParent.addShortcut(ACTION_1, shortcut1); myParent.addShortcut(ACTION_2, shortcut2); myChild = myParent.deriveKeymap("Child"); myChild.setCanModify(false); assertThat(myParent).isSameAs(myChild.getParent()); myChild.addShortcut(ACTION_1, shortcutA); } public void testParentAndChildShortcuts() { assertTrue(myParent.hasOwnActionId(ACTION_1)); assertTrue(myParent.hasOwnActionId(ACTION_2)); assertFalse(myParent.hasOwnActionId(ACTION_NON_EXISTENT)); assertSameElements(myParent.getShortcuts(ACTION_1), shortcut1); assertSameElements(myParent.getShortcuts(ACTION_2), shortcut2); assertSameElements(myParent.getShortcuts(ACTION_NON_EXISTENT)); assertSameElements(myParent.getActionIds(shortcut1), ACTION_1); assertSameElements(myParent.getActionIds(shortcut2), ACTION_2); assertSameElements(myParent.getActionIds(shortcutA)); assertSameElements(myParent.getActionIds(shortcutB)); assertTrue(myChild.hasOwnActionId(ACTION_1)); assertFalse(myChild.hasOwnActionId(ACTION_2)); assertFalse(myChild.hasOwnActionId(ACTION_NON_EXISTENT)); assertSameElements(myChild.getShortcuts(ACTION_1), shortcut1, shortcutA); assertSameElements(myChild.getShortcuts(ACTION_2), shortcut2); assertSameElements(myChild.getShortcuts(ACTION_NON_EXISTENT)); assertSameElements(myChild.getActionIds(shortcut1), ACTION_1); assertSameElements(myChild.getActionIds(shortcut2), ACTION_2); assertSameElements(myChild.getActionIds(shortcutA), ACTION_1); assertSameElements(myChild.getActionIds(shortcutB)); } public void testRemovingShortcutsFromParentAndChild() { myParent.removeShortcut(ACTION_1, shortcut1); assertFalse(myParent.hasOwnActionId(ACTION_1)); assertTrue(myParent.hasOwnActionId(ACTION_2)); assertFalse(myParent.hasOwnActionId(ACTION_NON_EXISTENT)); assertSameElements(myParent.getShortcuts(ACTION_1)); assertSameElements(myParent.getShortcuts(ACTION_2), shortcut2); assertSameElements(myParent.getActionIds(shortcut1)); assertSameElements(myParent.getActionIds(shortcut2), ACTION_2); // child keymap still lists inherited shortcut assertSameElements(myChild.getShortcuts(ACTION_1), shortcut1, shortcutA); myChild.removeShortcut(ACTION_1, shortcut1); assertSameElements(myChild.getShortcuts(ACTION_1), shortcutA); assertSameElements(myChild.getActionIds(shortcut1)); assertSameElements(myChild.getActionIds(shortcutA), ACTION_1); assertTrue(myChild.hasOwnActionId(ACTION_1)); myChild.removeShortcut(ACTION_1, shortcutA); assertSameElements(myChild.getShortcuts(ACTION_1)); assertSameElements(myChild.getActionIds(shortcutA)); assertFalse(myChild.hasOwnActionId(ACTION_1)); // since equal to parent list myChild.removeShortcut(ACTION_2, shortcut2); assertSameElements(myChild.getShortcuts(ACTION_2)); assertSameElements(myChild.getActionIds(shortcut2)); assertTrue(myChild.hasOwnActionId(ACTION_2)); // since different from parent list } public void testRemovingShortcutFromChildWhenInheritedDontChangeTheListIfShortcutIsAbsent() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); myParent.addShortcut(ACTION_1, shortcut1); assertThat(myParent.hasOwnActionId(ACTION_1)).isTrue(); assertThat(myChild.hasOwnActionId(ACTION_1)).isFalse(); assertThat(myChild.getShortcuts(ACTION_1)).containsExactly(shortcut1); // should not have any effect myChild.removeShortcut(ACTION_1, shortcutA); assertThat(myParent.hasOwnActionId(ACTION_1)).isTrue(); assertThat(myChild.hasOwnActionId(ACTION_1)).isFalse(); assertThat(myChild.getShortcuts(ACTION_1)).containsExactly(shortcut1); myParent.addShortcut(ACTION_2, shortcut2); myParent.addShortcut(ACTION_2, shortcutA); myParent.addShortcut(ACTION_2, shortcutB); myChild.removeShortcut(ACTION_2, shortcutA); assertThat(myChild.getShortcuts(ACTION_2)).containsExactly(shortcut2, shortcutB); } public void testRemovingShortcutFirst() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); myParent.addShortcut(ACTION_2, shortcut2); myParent.addShortcut(ACTION_2, shortcutA); myParent.addShortcut(ACTION_2, shortcutB); myChild.removeShortcut(ACTION_2, shortcut2); assertThat(myChild.getShortcuts(ACTION_2)).containsExactly(shortcutA, shortcutB); } public void testRemoveMouseShortcut() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); MouseShortcut mouseShortcut = new MouseShortcut(1, InputEvent.BUTTON2_MASK, 1); myParent.addShortcut(ACTION_2, mouseShortcut); assertThat(myChild.getActionIds(mouseShortcut)).containsExactly(ACTION_2); myChild.removeShortcut(ACTION_2, mouseShortcut); assertThat(myChild.getActionIds(mouseShortcut)).isEmpty(); } // decided to not change order and keep old behavior //public void testChangeMouseShortcut() throws Exception { // myParent.clearOwnActionsIds(); // myChild.clearOwnActionsIds(); // // ActionManager actionManager = ActionManager.getInstance(); // actionManager.registerAction(ACTION_2, new EmptyAction()); // actionManager.registerAction(ACTION_1, new EmptyAction()); // try { // MouseShortcut mouseShortcut = new MouseShortcut(1, InputEvent.BUTTON2_MASK, 1); // myParent.addShortcut(ACTION_2, mouseShortcut); // assertThat(myChild.getActionIds(mouseShortcut)).containsExactly(ACTION_2); // // Keymap grandChild = myChild.deriveKeymap("GrandChild"); // myChild.addShortcut(ACTION_2, mouseShortcut); // // grandChild.addShortcut(ACTION_1, mouseShortcut); // assertThat(grandChild.getActionIds(mouseShortcut)).containsExactly(ACTION_1, ACTION_2); // } // finally { // actionManager.unregisterAction(ACTION_2); // actionManager.unregisterAction(ACTION_1); // } //} public void testChangingMouseShortcutInGrandChild() { MouseShortcut mouseShortcut = new MouseShortcut(MouseEvent.BUTTON1, 0, 1); myParent.addShortcut(ACTION_2, mouseShortcut); Keymap grandChild = myChild.deriveKeymap("GrandChild"); grandChild.removeShortcut(ACTION_2, mouseShortcut); grandChild.addShortcut(ACTION_1, mouseShortcut); assertThat(grandChild.getActionIds(mouseShortcut)).containsExactly(ACTION_1); } public void testRemovingShortcutLast() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); myParent.addShortcut(ACTION_2, shortcut2); myParent.addShortcut(ACTION_2, shortcutA); myParent.addShortcut(ACTION_2, shortcutB); myChild.removeShortcut(ACTION_2, shortcutB); assertThat(myChild.getShortcuts(ACTION_2)).containsExactly(shortcut2, shortcutA); } public void testRemovingShortcutFromChildWhenInherited() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); myParent.addShortcut(ACTION_1, shortcut1); myParent.addShortcut(ACTION_1, shortcut2); assertTrue(myParent.hasOwnActionId(ACTION_1)); assertSameElements(myParent.getShortcuts(ACTION_1), shortcut1, shortcut2); assertFalse(myChild.hasOwnActionId(ACTION_1)); assertSameElements(myChild.getShortcuts(ACTION_1), shortcut1, shortcut2); myChild.removeShortcut(ACTION_1, shortcut1); assertTrue(myParent.hasOwnActionId(ACTION_1)); assertSameElements(myParent.getShortcuts(ACTION_1), shortcut1, shortcut2); assertTrue(myChild.hasOwnActionId(ACTION_1)); assertSameElements(myChild.getShortcuts(ACTION_1), shortcut2); } public void testRemovingShortcutFromChildWhenInheritedAndBound() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); String BASE = "BASE_ACTION"; String DEPENDENT = "DEPENDENT_ACTION"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE, DEPENDENT); try { myParent.addShortcut(BASE, shortcut1); myParent.addShortcut(BASE, shortcut2); assertTrue(myParent.hasOwnActionId(BASE)); assertSameElements(myParent.getShortcuts(BASE), shortcut1, shortcut2); assertFalse(myChild.hasOwnActionId(BASE)); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertFalse(myChild.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); // child::BASE don't have it's own mapping myChild.removeShortcut(DEPENDENT, shortcut1); assertFalse(myChild.hasOwnActionId(BASE)); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertTrue(myChild.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut2); myChild.clearOwnActionsIds(); // child::BASE has it's own mapping myChild.addShortcut(BASE, shortcutA); assertTrue(myChild.hasOwnActionId(BASE)); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2, shortcutA); assertFalse(myChild.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); myChild.removeShortcut(DEPENDENT, shortcut1); assertTrue(myChild.hasOwnActionId(BASE)); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2, shortcutA); assertTrue(myChild.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut2, shortcutA); } finally { KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT); } } public void testRemovingShortcutNotInheritedBoundAndNotBound() { KeymapImpl standalone = new KeymapImpl(); standalone.setName("standalone"); String BASE1 = "BASE_ACTION1"; String DEPENDENT1 = "DEPENDENT_ACTION1"; String BASE2 = "BASE_ACTION2"; String DEPENDENT2 = "DEPENDENT_ACTION2"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE1, DEPENDENT1); KeymapManagerEx.getInstanceEx().bindShortcuts(BASE2, DEPENDENT2); try { standalone.addShortcut(ACTION_1, shortcut1); standalone.addShortcut(BASE1, shortcut1); assertTrue(standalone.hasOwnActionId(ACTION_1)); assertFalse(standalone.hasOwnActionId(ACTION_2)); assertTrue(standalone.hasOwnActionId(BASE1)); assertFalse(standalone.hasOwnActionId(DEPENDENT1)); assertFalse(standalone.hasOwnActionId(BASE2)); assertFalse(standalone.hasOwnActionId(DEPENDENT2)); standalone.removeShortcut(ACTION_1, shortcut1); standalone.removeShortcut(ACTION_2, shortcut1); // empty mapping -> should not have any effect standalone.removeShortcut(DEPENDENT1, shortcut1); standalone.removeShortcut(DEPENDENT2, shortcut1); // empty mapping -> should not have any effect assertFalse(standalone.hasOwnActionId(ACTION_1)); assertFalse(standalone.hasOwnActionId(ACTION_2)); assertTrue(standalone.hasOwnActionId(BASE1)); assertTrue(standalone.hasOwnActionId(DEPENDENT1)); assertSameElements(standalone.getShortcuts(BASE1), shortcut1); assertSameElements(standalone.getShortcuts(DEPENDENT1)); assertFalse(standalone.hasOwnActionId(BASE2)); assertFalse(standalone.hasOwnActionId(DEPENDENT2)); } finally{ KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT1); KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT2); } } public void testResettingMappingInChild() { assertSameElements(myChild.getShortcuts(ACTION_1), shortcut1, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), ACTION_1); assertSameElements(myChild.getActionIds(shortcutA), ACTION_1); assertTrue(myChild.hasOwnActionId(ACTION_1)); myChild.clearOwnActionsId(ACTION_1); assertSameElements(myChild.getShortcuts(ACTION_1), shortcut1); assertSameElements(myChild.getActionIds(shortcut1), ACTION_1); assertSameElements(myChild.getActionIds(shortcutA)); assertFalse(myChild.hasOwnActionId(ACTION_1)); myChild.removeShortcut(ACTION_2, shortcut2); assertSameElements(myChild.getShortcuts(ACTION_2)); assertSameElements(myChild.getActionIds(shortcut2)); assertTrue(myChild.hasOwnActionId(ACTION_2)); myChild.clearOwnActionsId(ACTION_2); assertSameElements(myChild.getShortcuts(ACTION_2), shortcut2); assertSameElements(myChild.getActionIds(shortcut2), ACTION_2); assertFalse(myChild.hasOwnActionId(ACTION_2)); } public void testChangingAndResettingBoundShortcutsInParentKeymap() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); String BASE = "BASE_ACTION"; String DEPENDENT = "DEPENDENT_ACTION"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE, DEPENDENT); try { assertSameElements(myParent.getShortcuts(BASE)); assertSameElements(myParent.getShortcuts(DEPENDENT)); assertSameElements(myParent.getActionIds(shortcut1)); assertSameElements(myChild.getShortcuts(BASE)); assertSameElements(myChild.getShortcuts(DEPENDENT)); assertSameElements(myChild.getActionIds(shortcut1)); myParent.addShortcut(BASE, shortcut1); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // - assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertFalse(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // override BASE action in child // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> child:BASE myChild.addShortcut(BASE, shortcut2); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // extend BASE action, overridden in child // parent: // BASE -> shortcut1, shortcutA // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> child:BASE myParent.addShortcut(BASE, shortcutA); // parent assertSameElements(myParent.getShortcuts(BASE), shortcut1, shortcutA); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1, shortcutA); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcutA), BASE, DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); // child is not affected since the action is overridden assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA)); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // extend DEPENDENT action, not-overridden in child // parent: // BASE -> shortcut1 // DEPENDENT -> shortcut1, shortcutB // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> child:BASE myParent.removeShortcut(BASE, shortcutA); myParent.addShortcut(DEPENDENT, shortcutB); // parent assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1, shortcutB); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcut2)); assertSameElements(myParent.getActionIds(shortcutA)); assertSameElements(myParent.getActionIds(shortcutB), DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertTrue(myParent.hasOwnActionId(DEPENDENT)); // child assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA)); assertSameElements(myChild.getActionIds(shortcutB)); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // override DEPENDENT action in child // parent: // BASE -> shortcut1 // DEPENDENT -> shortcut1, shortcutB // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> shortcut1, shortcut2, shortcutA myChild.addShortcut(DEPENDENT, shortcutA); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertSameElements(myChild.getActionIds(shortcutB)); assertTrue(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); } finally { KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT); } } public void testChangingAndResettingBoundShortcutsInChildKeymap() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); String BASE = "BASE_ACTION"; String DEPENDENT = "DEPENDENT_ACTION"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE, DEPENDENT); try { assertSameElements(myParent.getShortcuts(BASE)); assertSameElements(myParent.getShortcuts(DEPENDENT)); assertSameElements(myParent.getActionIds(shortcut1)); assertSameElements(myChild.getShortcuts(BASE)); assertSameElements(myChild.getShortcuts(DEPENDENT)); assertSameElements(myChild.getActionIds(shortcut1)); myParent.addShortcut(BASE, shortcut1); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // - assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertFalse(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // overriding BASE in child // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> child:BASE myChild.addShortcut(BASE, shortcut2); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); // overriding DEPENDENT in child // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> shortcut1, shortcut2, shortcutA myChild.addShortcut(DEPENDENT, shortcutA); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // removing one of BASE binding // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut2 // DEPENDENT -> shortcut1, shortcut2, shortcutA myChild.removeShortcut(BASE, shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut2); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // removing last of BASE binding // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> - // DEPENDENT -> shortcut1, shortcut2, shortcutA myChild.removeShortcut(BASE, shortcut2); assertSameElements(myChild.getShortcuts(BASE)); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2), DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // clearing BASE binding // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> parent:BASE // DEPENDENT -> shortcut1, shortcut2, shortcutA myChild.clearOwnActionsId(BASE); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myChild.getActionIds(shortcut1), DEPENDENT, BASE); assertSameElements(myChild.getActionIds(shortcut2), DEPENDENT); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertFalse(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // clearing DEPENDENT binding // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> parent:BASE // DEPENDENT -> child:BASE myChild.clearOwnActionsId(DEPENDENT); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcutA)); assertFalse(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); } finally { KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT); } } public void testRemovingChildMappingIsTheSameAsResetting() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); String BASE = "BASE_ACTION"; String DEPENDENT = "DEPENDENT_ACTION"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE, DEPENDENT); try { assertSameElements(myParent.getShortcuts(BASE)); assertSameElements(myParent.getShortcuts(DEPENDENT)); assertSameElements(myParent.getActionIds(shortcut1)); assertSameElements(myChild.getShortcuts(BASE)); assertSameElements(myChild.getShortcuts(DEPENDENT)); assertSameElements(myChild.getActionIds(shortcut1)); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // - myParent.addShortcut(BASE, shortcut1); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcutA // DEPENDENT -> shortcut1, shortcutA, shortcutB myChild.addShortcut(BASE, shortcutA); myChild.addShortcut(DEPENDENT, shortcutB); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcutA); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcutA, shortcutB); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcutA), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcutB), DEPENDENT); assertTrue(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // remove from child:BASE first // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1 // DEPENDENT -> shortcut1, shortcutA, shortcutB myChild.removeShortcut(BASE, shortcutA); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcutA, shortcutB); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcutA), DEPENDENT); assertSameElements(myChild.getActionIds(shortcutB), DEPENDENT); assertFalse(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); // remove dependent child:BASE first // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1 == parent:BASE // DEPENDENT -> shortcut1 == child:BASE myChild.removeShortcut(DEPENDENT, shortcutA); myChild.removeShortcut(DEPENDENT, shortcutB); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcutA)); assertSameElements(myChild.getActionIds(shortcutB)); assertFalse(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); } finally { KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT); } } public void testLookingForShortcutsInParentFirstAndOnlyThenConsiderBoundActions() { myParent.clearOwnActionsIds(); myChild.clearOwnActionsIds(); KeymapImpl myGrandChild = myChild.deriveKeymap("GrandChild"); assertSame(myChild, myGrandChild.getParent()); String BASE = "BASE_ACTION"; String DEPENDENT = "DEPENDENT_ACTION"; KeymapManagerEx.getInstanceEx().bindShortcuts(BASE, DEPENDENT); try { // parent: // BASE -> shortcut1 <-- change is here // DEPENDENT -> BASE // child: // - // grand-child: // - myParent.addShortcut(BASE, shortcut1); assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myGrandChild.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myGrandChild.getShortcuts(DEPENDENT), shortcut1); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertFalse(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); assertFalse(myGrandChild.hasOwnActionId(BASE)); assertFalse(myGrandChild.hasOwnActionId(DEPENDENT)); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> parent:BASE // DEPENDENT -> shortcut1, +shortcut2 <-- change is here // grand-child: // BASE -> parent:BASE // DEPENDENT -> child:DEPENDENT myChild.addShortcut(DEPENDENT, shortcut2); assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myGrandChild.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcut2), DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut2), DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertFalse(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); assertFalse(myGrandChild.hasOwnActionId(BASE)); assertFalse(myGrandChild.hasOwnActionId(DEPENDENT)); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> parent:BASE // DEPENDENT -> shortcut1, +shortcut2 // grand-child: // BASE -> parent:BASE // DEPENDENT -> shortcut1, shortcut2, + shortcutA <-- change is here myGrandChild.addShortcut(DEPENDENT, shortcutA); assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut1); assertSameElements(myGrandChild.getShortcuts(BASE), shortcut1); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcut2), DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut2), DEPENDENT); assertSameElements(myParent.getActionIds(shortcutA)); assertSameElements(myChild.getActionIds(shortcutA)); assertSameElements(myGrandChild.getActionIds(shortcutA), DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertFalse(myChild.hasOwnActionId(BASE)); assertTrue(myChild.hasOwnActionId(DEPENDENT)); assertFalse(myGrandChild.hasOwnActionId(BASE)); assertTrue(myGrandChild.hasOwnActionId(DEPENDENT)); // Now let's try the other way round - redefine base shortcut in children and check that DEPENDENT action uses the correct one myChild.clearOwnActionsIds(); myGrandChild.clearOwnActionsIds(); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, +shortcut2 <-- change is here // DEPENDENT -> child:BASE // grand-child: // BASE -> child:BASE // DEPENDENT -> child:BASE myChild.addShortcut(BASE, shortcut2); assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut2), BASE, DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); assertFalse(myGrandChild.hasOwnActionId(BASE)); assertFalse(myGrandChild.hasOwnActionId(DEPENDENT)); // parent: // BASE -> shortcut1 // DEPENDENT -> BASE // child: // BASE -> shortcut1, shortcut2 // DEPENDENT -> child:BASE // grand-child: // BASE -> shortcut1, shortcut2, +shortcutA <-- change is here // DEPENDENT -> grand-child:BASE myGrandChild.addShortcut(BASE, shortcutA); assertSameElements(myParent.getShortcuts(BASE), shortcut1); assertSameElements(myChild.getShortcuts(BASE), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(BASE), shortcut1, shortcut2, shortcutA); assertSameElements(myParent.getShortcuts(DEPENDENT), shortcut1); assertSameElements(myChild.getShortcuts(DEPENDENT), shortcut1, shortcut2); assertSameElements(myGrandChild.getShortcuts(DEPENDENT), shortcut1, shortcut2, shortcutA); assertSameElements(myParent.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut1), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcut2)); assertSameElements(myChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myGrandChild.getActionIds(shortcut2), BASE, DEPENDENT); assertSameElements(myParent.getActionIds(shortcutA)); assertSameElements(myChild.getActionIds(shortcutA)); assertSameElements(myGrandChild.getActionIds(shortcutA), BASE, DEPENDENT); assertTrue(myParent.hasOwnActionId(BASE)); assertFalse(myParent.hasOwnActionId(DEPENDENT)); assertTrue(myChild.hasOwnActionId(BASE)); assertFalse(myChild.hasOwnActionId(DEPENDENT)); assertTrue(myGrandChild.hasOwnActionId(BASE)); assertFalse(myGrandChild.hasOwnActionId(DEPENDENT)); } finally { KeymapManagerEx.getInstanceEx().unbindShortcuts(DEPENDENT); } } }
/* * Copyright (c) 2005-2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.server.util; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * taken from the commons file utils class. we didnt want to use the library. hence copied the source */ public class FileUtils { private static final int DEFAULT_BUFFER_SIZE = 1024 * 4; /** * Copies a whole directory to a new location preserving the file dates. * <p/> * This method copies the specified directory and all its child * directories and files to the specified destination. * The destination is the new location and name of the directory. * <p/> * The destination directory is created if it does not exist. * If the destination directory did exist, then this method merges * the source with the destination, with the source taking precedence. * * @param srcDir an existing directory to copy, must not be null * @param destDir the new directory, must not be null * @throws NullPointerException if source or destination is null * @throws java.io.IOException if source or destination is invalid * @since Commons IO 1.1 */ public static void copyDirectory(File srcDir, File destDir) throws IOException { copyDirectory(srcDir, destDir, false); } /** * Copies a whole directory to a new location. * <p/> * This method copies the contents of the specified source directory * to within the specified destination directory. * <p/> * The destination directory is created if it does not exist. * If the destination directory did exist, then this method merges * the source with the destination, with the source taking precedence. * * @param srcDir an existing directory to copy, must not be null * @param destDir the new directory, must not be null * @param preserveFileDate true if the file date of the copy * should be the same as the original * @throws NullPointerException if source or destination is null * @throws IOException if source or destination is invalid * @since Commons IO 1.1 */ public static void copyDirectory(File srcDir, File destDir, boolean preserveFileDate) throws IOException { if (srcDir == null) { throw new NullPointerException("Source must not be null"); } if (destDir == null) { throw new NullPointerException("Destination must not be null"); } if (!srcDir.exists()) { throw new FileNotFoundException("Source '" + srcDir + "' does not exist"); } if (!srcDir.isDirectory()) { throw new IOException("Source '" + srcDir + "' exists but is not a directory"); } if (srcDir.getCanonicalPath().equals(destDir.getCanonicalPath())) { throw new IOException("Source '" + srcDir + "' and destination '" + destDir + "' are the same"); } doCopyDirectory(srcDir, destDir, preserveFileDate); } /** * Internal copy directory method. * * @param srcDir the validated source directory, not null * @param destDir the validated destination directory, not null * @param preserveFileDate whether to preserve the file date * @throws IOException if an error occurs * @since Commons IO 1.1 */ private static void doCopyDirectory(File srcDir, File destDir, boolean preserveFileDate) throws IOException { if (destDir.exists()) { if (!destDir.isDirectory()) { throw new IOException("Destination '" + destDir + "' exists but is not a directory"); } } else { if (!destDir.mkdirs()) { throw new IOException("Destination '" + destDir + "' directory cannot be created"); } if (preserveFileDate) { boolean status = destDir.setLastModified(srcDir.lastModified()); if (!status) { throw new IOException("Failed to set the lastModified property: " + srcDir.getAbsolutePath()); } } } if (!destDir.canWrite()) { throw new IOException("Destination '" + destDir + "' cannot be written to"); } // recurse File[] files = srcDir.listFiles(); if (files == null) { // null if security restricted throw new IOException("Failed to list contents of " + srcDir); } for (File file : files) { File copiedFile = new File(destDir, file.getName()); if (file.isDirectory()) { doCopyDirectory(file, copiedFile, preserveFileDate); } else { copyFile(file, copiedFile, preserveFileDate); } } } /** * Copy file method. * * @param srcFile the validated source file, not null * @param destFile the validated destination file, not null * @param preserveFileDate whether to preserve the file date * @throws IOException if an error occurs */ public static void copyFile(File srcFile, File destFile, boolean preserveFileDate) throws IOException { if (destFile.exists() && destFile.isDirectory()) { throw new IOException("Destination '" + destFile + "' exists but is a directory"); } FileInputStream input = new FileInputStream(srcFile); try { FileOutputStream output = new FileOutputStream(destFile); try { copy(input, output); } finally { closeQuietly(output); } } finally { closeQuietly(input); } if (srcFile.length() != destFile.length()) { throw new IOException("Failed to copy full contents from '" + srcFile + "' to '" + destFile + "'"); } if (preserveFileDate) { boolean status = destFile.setLastModified(srcFile.lastModified()); if (!status) { throw new IOException("Failed to set the lastModified property: " + srcFile.getAbsolutePath()); } } } /** * Unconditionally close an <code>OutputStream</code>. * <p/> * Equivalent to {@link OutputStream#close()}, except any exceptions will be ignored. * This is typically used in finally blocks. * * @param output the OutputStream to close, may be null or already closed */ public static void closeQuietly(OutputStream output) { try { if (output != null) { output.close(); } } catch (IOException ioe) { // ignore } } /** * Unconditionally close an <code>InputStream</code>. * <p/> * Equivalent to {@link InputStream#close()}, except any exceptions will be ignored. * This is typically used in finally blocks. * * @param input the InputStream to close, may be null or already closed */ public static void closeQuietly(InputStream input) { try { if (input != null) { input.close(); } } catch (IOException ioe) { // ignore } } /** * Copy bytes from an <code>InputStream</code> to an * <code>OutputStream</code>. * <p/> * This method buffers the input internally, so there is no need to use a * <code>BufferedInputStream</code>. * * @param input the <code>InputStream</code> to read from * @param output the <code>OutputStream</code> to write to * @return the number of bytes copied * @throws NullPointerException if the input or output is null * @throws IOException if an I/O error occurs * @since Commons IO 1.1 */ public static int copy(InputStream input, OutputStream output) throws IOException { byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; int count = 0; int n; while (-1 != (n = input.read(buffer))) { output.write(buffer, 0, n); count += n; } return count; } /** * Deletes all files and subdirectories under dir. * Returns true if all deletions were successful. * If a deletion fails, the method stops attempting to delete and returns false. * * @param dir The directory to be deleted * @return true if the directory and its descendents were deleted */ public static boolean deleteDir(File dir) { if (dir.isDirectory()) { String[] children = dir.list(); if (children != null) { for (String child : children) { boolean success = deleteDir(new File(dir, child)); if (!success) { return false; } } } } // The directory is now empty so delete it return dir.delete(); } public static File getFile(File parentFile,final String fileName) { File[] files = parentFile.listFiles(); for ( File file : files ) { if (file.getName().equals(fileName)) { return file; } } return null; } public static List<String> readLinesToList(BufferedReader bufferedReader) throws IOException { if (bufferedReader == null) { return null; } else { List<String> list = new ArrayList<String>(); String line = null; while ((line = bufferedReader.readLine()) != null) { list.add(line); } return list; } } public static Map<String, String> readJarsWithMD5(BufferedReader bufferedReader) throws IOException { if (bufferedReader == null) { return null; } else { Map<String, String> jarmd5map = new HashMap<String, String>(); String line = null; while ((line = bufferedReader.readLine()) != null) { int index = line.indexOf(":"); jarmd5map.put(line.substring(0, index), line.substring(index + 1)); } return jarmd5map; } } }
/* $Id: FactoryCreateRule.java 299475 2004-06-26 17:41:32Z remm $ * * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.digester; import org.xml.sax.Attributes; /** * <p>Rule implementation that uses an {@link ObjectCreationFactory} to create * a new object which it pushes onto the object stack. When the element is * complete, the object will be popped.</p> * * <p>This rule is intended in situations where the element's attributes are * needed before the object can be created. A common senario is for the * ObjectCreationFactory implementation to use the attributes as parameters * in a call to either a factory method or to a non-empty constructor. */ public class FactoryCreateRule extends Rule { // ----------------------------------------------------------- Fields /** Should exceptions thrown by the factory be ignored? */ private boolean ignoreCreateExceptions; /** Stock to manage */ private ArrayStack exceptionIgnoredStack; // ----------------------------------------------------------- Constructors /** * Construct a factory create rule that will use the specified * class name to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack. * * @param digester The associated Digester * @param className Java class name of the object creation factory class * * @deprecated The digester instance is now set in the {@link Digester#addRule} method. * Use {@link #FactoryCreateRule(String className)} instead. */ public FactoryCreateRule(Digester digester, String className) { this(className); } /** * Construct a factory create rule that will use the specified * class to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack. * * @param digester The associated Digester * @param clazz Java class name of the object creation factory class * * @deprecated The digester instance is now set in the {@link Digester#addRule} method. * Use {@link #FactoryCreateRule(Class clazz)} instead. */ public FactoryCreateRule(Digester digester, Class clazz) { this(clazz); } /** * Construct a factory create rule that will use the specified * class name (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack. * * @param digester The associated Digester * @param className Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. * * @deprecated The digester instance is now set in the {@link Digester#addRule} method. * Use {@link #FactoryCreateRule(String className, String attributeName)} instead. */ public FactoryCreateRule(Digester digester, String className, String attributeName) { this(className, attributeName); } /** * Construct a factory create rule that will use the specified * class (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack. * * @param digester The associated Digester * @param clazz Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. * * @deprecated The digester instance is now set in the {@link Digester#addRule} method. * Use {@link #FactoryCreateRule(Class clazz, String attributeName)} instead. */ public FactoryCreateRule(Digester digester, Class clazz, String attributeName) { this(clazz, attributeName); } /** * Construct a factory create rule using the given, already instantiated, * {@link ObjectCreationFactory}. * * @param digester The associated Digester * @param creationFactory called on to create the object. * * @deprecated The digester instance is now set in the {@link Digester#addRule} method. * Use {@link #FactoryCreateRule(ObjectCreationFactory creationFactory)} instead. */ public FactoryCreateRule(Digester digester, ObjectCreationFactory creationFactory) { this(creationFactory); } /** * <p>Construct a factory create rule that will use the specified * class name to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack.</p> * * <p>Exceptions thrown during the object creation process will be propagated.</p> * * @param className Java class name of the object creation factory class */ public FactoryCreateRule(String className) { this(className, false); } /** * <p>Construct a factory create rule that will use the specified * class to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack.</p> * * <p>Exceptions thrown during the object creation process will be propagated.</p> * * @param clazz Java class name of the object creation factory class */ public FactoryCreateRule(Class clazz) { this(clazz, false); } /** * <p>Construct a factory create rule that will use the specified * class name (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack.</p> * * <p>Exceptions thrown during the object creation process will be propagated.</p> * * @param className Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. */ public FactoryCreateRule(String className, String attributeName) { this(className, attributeName, false); } /** * <p>Construct a factory create rule that will use the specified * class (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack.</p> * * <p>Exceptions thrown during the object creation process will be propagated.</p> * * @param clazz Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. */ public FactoryCreateRule(Class clazz, String attributeName) { this(clazz, attributeName, false); } /** * <p>Construct a factory create rule using the given, already instantiated, * {@link ObjectCreationFactory}.</p> * * <p>Exceptions thrown during the object creation process will be propagated.</p> * * @param creationFactory called on to create the object. */ public FactoryCreateRule(ObjectCreationFactory creationFactory) { this(creationFactory, false); } /** * Construct a factory create rule that will use the specified * class name to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack. * * @param className Java class name of the object creation factory class * @param ignoreCreateExceptions if true, exceptions thrown by the object * creation factory * will be ignored. */ public FactoryCreateRule(String className, boolean ignoreCreateExceptions) { this(className, null, ignoreCreateExceptions); } /** * Construct a factory create rule that will use the specified * class to create an {@link ObjectCreationFactory} which will * then be used to create an object and push it on the stack. * * @param clazz Java class name of the object creation factory class * @param ignoreCreateExceptions if true, exceptions thrown by the * object creation factory * will be ignored. */ public FactoryCreateRule(Class clazz, boolean ignoreCreateExceptions) { this(clazz, null, ignoreCreateExceptions); } /** * Construct a factory create rule that will use the specified * class name (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack. * * @param className Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. * @param ignoreCreateExceptions if true, exceptions thrown by the object * creation factory will be ignored. */ public FactoryCreateRule( String className, String attributeName, boolean ignoreCreateExceptions) { this.className = className; this.attributeName = attributeName; this.ignoreCreateExceptions = ignoreCreateExceptions; } /** * Construct a factory create rule that will use the specified * class (possibly overridden by the specified attribute if present) * to create an {@link ObjectCreationFactory}, which will then be used * to instantiate an object instance and push it onto the stack. * * @param clazz Default Java class name of the factory class * @param attributeName Attribute name which, if present, contains an * override of the class name of the object creation factory to create. * @param ignoreCreateExceptions if true, exceptions thrown by the object * creation factory will be ignored. */ public FactoryCreateRule( Class clazz, String attributeName, boolean ignoreCreateExceptions) { this(clazz.getName(), attributeName, ignoreCreateExceptions); } /** * Construct a factory create rule using the given, already instantiated, * {@link ObjectCreationFactory}. * * @param creationFactory called on to create the object. * @param ignoreCreateExceptions if true, exceptions thrown by the object * creation factory will be ignored. */ public FactoryCreateRule( ObjectCreationFactory creationFactory, boolean ignoreCreateExceptions) { this.creationFactory = creationFactory; this.ignoreCreateExceptions = ignoreCreateExceptions; } // ----------------------------------------------------- Instance Variables /** * The attribute containing an override class name if it is present. */ protected String attributeName = null; /** * The Java class name of the ObjectCreationFactory to be created. * This class must have a no-arguments constructor. */ protected String className = null; /** * The object creation factory we will use to instantiate objects * as required based on the attributes specified in the matched XML * element. */ protected ObjectCreationFactory creationFactory = null; // --------------------------------------------------------- Public Methods /** * Process the beginning of this element. * * @param attributes The attribute list of this element */ public void begin(String namespace, String name, Attributes attributes) throws Exception { if (ignoreCreateExceptions) { if (exceptionIgnoredStack == null) { exceptionIgnoredStack = new ArrayStack(); } try { Object instance = getFactory(attributes).createObject(attributes); if (digester.log.isDebugEnabled()) { digester.log.debug("[FactoryCreateRule]{" + digester.match + "} New " + instance.getClass().getName()); } digester.push(instance); exceptionIgnoredStack.push(Boolean.FALSE); } catch (Exception e) { // log message and error if (digester.log.isInfoEnabled()) { digester.log.info("[FactoryCreateRule] Create exception ignored: " + ((e.getMessage() == null) ? e.getClass().getName() : e.getMessage())); if (digester.log.isDebugEnabled()) { digester.log.debug("[FactoryCreateRule] Ignored exception:", e); } } exceptionIgnoredStack.push(Boolean.TRUE); } } else { Object instance = getFactory(attributes).createObject(attributes); if (digester.log.isDebugEnabled()) { digester.log.debug("[FactoryCreateRule]{" + digester.match + "} New " + instance.getClass().getName()); } digester.push(instance); } } /** * Process the end of this element. */ public void end(String namespace, String name) throws Exception { // check if object was created // this only happens if an exception was thrown and we're ignoring them if ( ignoreCreateExceptions && exceptionIgnoredStack != null && !(exceptionIgnoredStack.empty())) { if (((Boolean) exceptionIgnoredStack.pop()).booleanValue()) { // creation exception was ignored // nothing was put onto the stack if (digester.log.isTraceEnabled()) { digester.log.trace("[FactoryCreateRule] No creation so no push so no pop"); } return; } } Object top = digester.pop(); if (digester.log.isDebugEnabled()) { digester.log.debug("[FactoryCreateRule]{" + digester.match + "} Pop " + top.getClass().getName()); } } /** * Clean up after parsing is complete. */ public void finish() throws Exception { if (attributeName != null) { creationFactory = null; } } /** * Render a printable version of this Rule. */ public String toString() { StringBuffer sb = new StringBuffer("FactoryCreateRule["); sb.append("className="); sb.append(className); sb.append(", attributeName="); sb.append(attributeName); if (creationFactory != null) { sb.append(", creationFactory="); sb.append(creationFactory); } sb.append("]"); return (sb.toString()); } // ------------------------------------------------------ Protected Methods /** * Return an instance of our associated object creation factory, * creating one if necessary. * * @param attributes Attributes passed to our factory creation element * * @exception Exception if any error occurs */ protected ObjectCreationFactory getFactory(Attributes attributes) throws Exception { if (creationFactory == null) { String realClassName = className; if (attributeName != null) { String value = attributes.getValue(attributeName); if (value != null) { realClassName = value; } } if (digester.log.isDebugEnabled()) { digester.log.debug("[FactoryCreateRule]{" + digester.match + "} New factory " + realClassName); } Class clazz = digester.getClassLoader().loadClass(realClassName); creationFactory = (ObjectCreationFactory) clazz.newInstance(); creationFactory.setDigester(digester); } return (creationFactory); } }
/* * Copyright (c) 2009 - 2020 CaspersBox Web Services * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.cws.esolutions.web.controllers; /* * Project: eSolutions_java_source * Package: com.cws.esolutions.web.controllers * File: CommonController.java * * History * * Author Date Comments * ---------------------------------------------------------------------------- * cws-khuntly 11/23/2008 22:39:20 Created. */ import org.slf4j.Logger; import java.util.Arrays; import java.util.ArrayList; import java.util.Enumeration; import org.slf4j.LoggerFactory; import org.springframework.ui.Model; import javax.mail.MessagingException; import javax.servlet.http.HttpSession; import javax.servlet.http.HttpServletRequest; import org.springframework.stereotype.Controller; import org.apache.commons.lang.RandomStringUtils; import org.springframework.mail.SimpleMailMessage; import org.springframework.validation.BindingResult; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.view.RedirectView; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes; import com.cws.esolutions.web.Constants; import com.cws.esolutions.core.utils.EmailUtils; import com.cws.esolutions.security.dto.UserAccount; import com.cws.esolutions.web.ApplicationServiceBean; import com.cws.esolutions.core.utils.dto.EmailMessage; import com.cws.esolutions.web.validators.EmailMessageValidator; import com.cws.esolutions.core.config.xml.CoreConfigurationData; /** * @author cws-khuntly * @version 1.0 * @see org.springframework.stereotype.Controller */ @Controller @RequestMapping("/common") public class CommonController { private String homePage = null; private CoreConfigurationData coreConfig = null; private ApplicationServiceBean appConfig = null; private SimpleMailMessage contactResponseEmail = null; private static final String CNAME = CommonController.class.getName(); private static final Logger DEBUGGER = LoggerFactory.getLogger(Constants.DEBUGGER); private static final boolean DEBUG = DEBUGGER.isDebugEnabled(); private static final Logger ERROR_RECORDER = LoggerFactory.getLogger(Constants.ERROR_LOGGER + CNAME); public final void setAppConfig(final ApplicationServiceBean value) { final String methodName = CommonController.CNAME + "#setAppConfig(final ApplicationServiceBean value)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("Value: {}", value); } this.appConfig = value; } public final void setCoreConfig(final CoreConfigurationData value) { final String methodName = CommonController.CNAME + "#setCoreConfig(final CoreConfigurationData value)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("Value: {}", value); } this.coreConfig = value; } public final void setHomePage(final String value) { final String methodName = CommonController.CNAME + "#setHomePage(final String value)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("Value: {}", value); } this.homePage = value; } public final void setContactResponseEmail(final SimpleMailMessage value) { final String methodName = CommonController.CNAME + "#setContactResponseEmail(final SimpleMailMessage value)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("Value: {}", value); } this.contactResponseEmail = value; } @RequestMapping(value = "/default", method = RequestMethod.GET) public final String showDefaultPage(final Model model) { final String methodName = CommonController.CNAME + "#showDefaultPage(final Model model)"; if (DEBUG) { DEBUGGER.debug(methodName); } final ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); final HttpServletRequest hRequest = requestAttributes.getRequest(); final HttpSession hSession = hRequest.getSession(); final UserAccount userAccount = (UserAccount) hSession.getAttribute(Constants.USER_ACCOUNT); if (DEBUG) { DEBUGGER.debug("ServletRequestAttributes: {}", requestAttributes); DEBUGGER.debug("HttpServletRequest: {}", hRequest); DEBUGGER.debug("HttpSession: {}", hSession); DEBUGGER.debug("Session ID: {}", hSession.getId()); DEBUGGER.debug("UserAccount: {}", userAccount); DEBUGGER.debug("Dumping session content:"); Enumeration<?> sessionEnumeration = hSession.getAttributeNames(); while (sessionEnumeration.hasMoreElements()) { String element = (String) sessionEnumeration.nextElement(); Object value = hSession.getAttribute(element); DEBUGGER.debug("Session Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request content:"); Enumeration<?> requestEnumeration = hRequest.getAttributeNames(); while (requestEnumeration.hasMoreElements()) { String element = (String) requestEnumeration.nextElement(); Object value = hRequest.getAttribute(element); DEBUGGER.debug("Request Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request parameters:"); Enumeration<?> paramsEnumeration = hRequest.getParameterNames(); while (paramsEnumeration.hasMoreElements()) { String element = (String) paramsEnumeration.nextElement(); Object value = hRequest.getParameter(element); DEBUGGER.debug("Request Parameter: {}; Value: {}", element, value); } } if (hSession.getAttribute(Constants.USER_ACCOUNT) == null) { return this.appConfig.getLogonRedirect(); // try it ? } if (DEBUG) { DEBUGGER.debug("ModelAndView: {}", model); } // in here, we're going to get all the messages to display and such return this.homePage; } @RequestMapping(value = "/unauthorized", method = RequestMethod.GET) public final String showUnauthorizedPage(final Model model) { final String methodName = CommonController.CNAME + "#showUnauthorizedPage(final Model model)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("Model: {}", model); } final ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); final HttpServletRequest hRequest = requestAttributes.getRequest(); final HttpSession hSession = hRequest.getSession(); final UserAccount userAccount = (UserAccount) hSession.getAttribute(Constants.USER_ACCOUNT); if (DEBUG) { DEBUGGER.debug("ServletRequestAttributes: {}", requestAttributes); DEBUGGER.debug("HttpServletRequest: {}", hRequest); DEBUGGER.debug("HttpSession: {}", hSession); DEBUGGER.debug("Session ID: {}", hSession.getId()); DEBUGGER.debug("UserAccount: {}", userAccount); DEBUGGER.debug("Dumping session content:"); Enumeration<?> sessionEnumeration = hSession.getAttributeNames(); while (sessionEnumeration.hasMoreElements()) { String element = (String) sessionEnumeration.nextElement(); Object value = hSession.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request content:"); Enumeration<?> requestEnumeration = hRequest.getAttributeNames(); while (requestEnumeration.hasMoreElements()) { String element = (String) requestEnumeration.nextElement(); Object value = hRequest.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request parameters:"); Enumeration<?> paramsEnumeration = hRequest.getParameterNames(); while (paramsEnumeration.hasMoreElements()) { String element = (String) paramsEnumeration.nextElement(); Object value = hRequest.getParameter(element); DEBUGGER.debug("Parameter: {}; Value: {}", element, value); } } return this.appConfig.getUnauthorizedPage(); } @RequestMapping(value = "/contact", method = RequestMethod.GET) public final ModelAndView showContactPage() { final String methodName = CommonController.CNAME + "#showContactPage()"; if (DEBUG) { DEBUGGER.debug(methodName); } ModelAndView mView = new ModelAndView(); final ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); final HttpServletRequest hRequest = requestAttributes.getRequest(); final HttpSession hSession = hRequest.getSession(); final UserAccount userAccount = (UserAccount) hSession.getAttribute(Constants.USER_ACCOUNT); if (DEBUG) { DEBUGGER.debug("ServletRequestAttributes: {}", requestAttributes); DEBUGGER.debug("HttpServletRequest: {}", hRequest); DEBUGGER.debug("HttpSession: {}", hSession); DEBUGGER.debug("Session ID: {}", hSession.getId()); DEBUGGER.debug("UserAccount: {}", userAccount); DEBUGGER.debug("Dumping session content:"); Enumeration<?> sessionEnumeration = hSession.getAttributeNames(); while (sessionEnumeration.hasMoreElements()) { String element = (String) sessionEnumeration.nextElement(); Object value = hSession.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request content:"); Enumeration<?> requestEnumeration = hRequest.getAttributeNames(); while (requestEnumeration.hasMoreElements()) { String element = (String) requestEnumeration.nextElement(); Object value = hRequest.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request parameters:"); Enumeration<?> paramsEnumeration = hRequest.getParameterNames(); while (paramsEnumeration.hasMoreElements()) { String element = (String) paramsEnumeration.nextElement(); Object value = hRequest.getParameter(element); DEBUGGER.debug("Parameter: {}; Value: {}", element, value); } } mView.addObject("serviceEmail", this.coreConfig.getAppConfig().getEmailAliasId()); mView.addObject(Constants.COMMAND, new EmailMessage()); mView.setViewName(this.appConfig.getContactAdminsPage()); return mView; } @RequestMapping(value = "/contact", method = RequestMethod.POST) public final ModelAndView doSubmitMessage(@ModelAttribute("message") final EmailMessage message, final BindingResult bindResult) { final String methodName = CommonController.CNAME + "#doSubmitMessage(@ModelAttribute(\"message\") final EmailMessage message, final BindingResult bindResult)"; if (DEBUG) { DEBUGGER.debug(methodName); DEBUGGER.debug("EmailMessage: {}", message); DEBUGGER.debug("BindingResult: {}", bindResult); } ModelAndView mView = new ModelAndView(); final ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.currentRequestAttributes(); final HttpServletRequest hRequest = requestAttributes.getRequest(); final HttpSession hSession = hRequest.getSession(); final EmailMessageValidator validator = this.appConfig.getMessageValidator(); final String emailId = RandomStringUtils.randomAlphanumeric(16); if (DEBUG) { DEBUGGER.debug("ServletRequestAttributes: {}", requestAttributes); DEBUGGER.debug("HttpServletRequest: {}", hRequest); DEBUGGER.debug("HttpSession: {}", hSession); DEBUGGER.debug("EmailMessageValidator: {}", validator); DEBUGGER.debug("emailId: {}", emailId); DEBUGGER.debug("Dumping session content:"); Enumeration<?> sessionEnumeration = hSession.getAttributeNames(); while (sessionEnumeration.hasMoreElements()) { String element = (String) sessionEnumeration.nextElement(); Object value = hSession.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request content:"); Enumeration<?> requestEnumeration = hRequest.getAttributeNames(); while (requestEnumeration.hasMoreElements()) { String element = (String) requestEnumeration.nextElement(); Object value = hRequest.getAttribute(element); DEBUGGER.debug("Attribute: {}; Value: {}", element, value); } DEBUGGER.debug("Dumping request parameters:"); Enumeration<?> paramsEnumeration = hRequest.getParameterNames(); while (paramsEnumeration.hasMoreElements()) { String element = (String) paramsEnumeration.nextElement(); Object value = hRequest.getParameter(element); DEBUGGER.debug("Parameter: {}; Value: {}", element, value); } } validator.validate(message, bindResult); if (bindResult.hasErrors()) { // validation failed ERROR_RECORDER.error("Errors: {}", bindResult.getAllErrors()); mView.addObject(Constants.ERROR_MESSAGE, this.appConfig.getMessageValidationFailed()); mView.addObject(Constants.BIND_RESULT, bindResult.getAllErrors()); mView.addObject(Constants.COMMAND, message); mView.setViewName(this.appConfig.getContactAdminsPage()); return mView; } try { EmailUtils.sendEmailMessage(this.coreConfig.getMailConfig(), message, true); EmailMessage autoResponse = new EmailMessage(); autoResponse.setIsAlert(false); autoResponse.setMessageSubject(this.contactResponseEmail.getSubject()); autoResponse.setMessageTo(new ArrayList<String>(Arrays.asList(String.format(this.contactResponseEmail.getTo()[0], message.getEmailAddr().get(0))))); autoResponse.setEmailAddr(new ArrayList<String>(Arrays.asList(String.format(this.contactResponseEmail.getFrom())))); autoResponse.setMessageBody(String.format( this.contactResponseEmail.getText(), message.getEmailAddr(), message.getMessageBody())); if (DEBUG) { DEBUGGER.debug("EmailMessage: {}", autoResponse); } EmailUtils.sendEmailMessage(this.coreConfig.getMailConfig(), autoResponse, true); mView = new ModelAndView(new RedirectView()); mView.setViewName(this.appConfig.getContactAdminsRedirect()); mView.addObject(Constants.RESPONSE_MESSAGE, this.appConfig.getMessageEmailSentSuccess()); } catch (MessagingException mx) { ERROR_RECORDER.error(mx.getMessage(), mx); mView = new ModelAndView(new RedirectView()); mView.setViewName(this.appConfig.getContactAdminsPage()); mView.addObject(Constants.ERROR_MESSAGE, this.appConfig.getMessageRequestProcessingFailure()); } if (DEBUG) { DEBUGGER.debug("ModelAndView: {}", mView); } return mView; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.lang; import junit.framework.TestCase; import java.math.BigDecimal; import java.util.Iterator; import java.util.List; /** * @author James Strachan */ public class RangeTest extends TestCase { public void testSize() { Range r = createRange(0, 10); assertEquals("Size of " + r, 11, r.size()); r = createRange(0, 1); assertEquals("Size of " + r, 2, r.size()); r = createRange(0, 0); assertEquals("Size of " + r, 1, r.size()); r = createRange(new BigDecimal("2.1"), new BigDecimal("10.0")); assertEquals("Size of " + r, 8, r.size()); } public void testProperties() { Range r = createRange(0, 10); assertEquals("from", 0, r.getFrom()); assertEquals("to", 10, r.getTo()); } public void testGet() { Range r = createRange(10, 20); for (int i = 0; i < 10; i++) { Integer value = (Integer) r.get(i); assertEquals("Item at index: " + i, i + 10, value.intValue()); } r = createRange(new BigDecimal("3.2"), new BigDecimal("9.9")); for (int i = 0; i < r.size(); i++) { BigDecimal value = (BigDecimal) r.get(i); assertEquals("Item at index: " + i, new BigDecimal("3.2").add(new BigDecimal("" + i)), value); } } public void testNullForFromOrToIsIllegal() { Comparable dontcare = new Integer(0); try { new ObjectRange((Comparable)null, dontcare); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException e) { // worked } } public void testGetOutOfRange() { Range r = createRange(10, 20); try { r.get(-1); fail("Should have thrown IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // worked } try { r.get(11); fail("Should have thrown IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // worked } r = createRange(new BigDecimal("-4.3"), new BigDecimal("1.4")); try { r.get(-1); fail("Should have thrown IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // worked } try { r.get(7); fail("Should have thrown IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // worked } } public void testContains() { Range r = createRange(10, 20); assertTrue("contains 11", r.contains(new Integer(11))); assertTrue("contains 10", r.contains(new Integer(10))); assertTrue("contains 19", r.contains(new Integer(19))); assertFalse("contains 9", r.contains(new Integer(9))); assertFalse("contains 21", r.contains(new Integer(21))); assertFalse("contains 100", r.contains(new Integer(100))); assertFalse("contains -1", r.contains(new Integer(-1))); r = createRange(new BigDecimal("2.1"), new BigDecimal("10.0")); assertTrue("contains 9.1", r.contains(new BigDecimal("9.1"))); assertFalse("contains 10.1", r.contains(new BigDecimal("10.1"))); assertFalse("contains 8.0", r.contains(new BigDecimal("8.0"))); assertTrue("containsWithinBounds 8.0", r.containsWithinBounds(new BigDecimal("8.0"))); assertTrue("containsWithinBounds 9.9999", r.containsWithinBounds(new BigDecimal("9.9999"))); assertTrue("containsWithinBounds 10.0", r.containsWithinBounds(new BigDecimal("10.0"))); assertFalse("containsWithinBounds 10.0001", r.containsWithinBounds(new BigDecimal("10.0001"))); } public void testContainsWithLikeNumbers() { Range r = new ObjectRange(new Integer(1), new Short((short)3)); assertTrue("contains 2", r.contains(new Integer(2))); r = new ObjectRange(new Float(1.0), new Double(3.0)); assertTrue("contains 2.0d", r.contains(new Double(2.0))); assertTrue("contains 2.0g", r.contains(new BigDecimal(2.0))); r = new ObjectRange(new BigDecimal(1.0), new BigDecimal(3.0)); assertTrue("contains 2.0d", r.contains(new Double(2.0))); assertTrue("contains 2.0f", r.contains(new Float(2.0))); } public void testContainsWithIncompatibleType() { Range r = new ObjectRange(new Integer(1), new Short((short)3)); assertFalse("shouldn't contain string", r.contains("String")); } public void testSubList() { Range r = createRange(10, 20); List s = r.subList(2, 4); Range sr = (Range) s; assertEquals("from", 12, sr.getFrom()); assertEquals("to", 13, sr.getTo()); assertEquals("size", 2, sr.size()); r = createRange(new BigDecimal("0.5"), new BigDecimal("8.5")); assertEquals("size", 9, r.size()); s = r.subList(2, 5); sr = (Range) s; assertEquals("from", new BigDecimal("2.5"), sr.getFrom()); assertEquals("to", new BigDecimal("4.5"), sr.getTo()); assertTrue("contains 4.5", sr.contains(new BigDecimal("4.5"))); assertFalse("contains 5.5", sr.contains(new BigDecimal("5.5"))); assertEquals("size", 3, sr.size()); } public void testHashCodeAndEquals() { Range a = createRange(1, 11); Range b = createRange(1, 11); Range c = createRange(2, 11); assertEquals("hashcode", a.hashCode(), b.hashCode()); assertTrue("hashcode", a.hashCode() != c.hashCode()); assertEquals("a and b", a, b); assertFalse("a != c", a.equals(c)); } public void testIterator() { Range r = createRange(5, 11); int i = 5; for (Iterator it = r.iterator(); it.hasNext();) { assertEquals("equals to " + i, new Integer(i), (Integer) (it.next())); i++; } r = createRange(new BigDecimal("5.0"), new BigDecimal("11.0")); BigDecimal one = new BigDecimal("1.0"); BigDecimal val = new BigDecimal("5.0"); for (Iterator it = r.iterator(); it.hasNext();) { assertEquals("equals to " + val, val, (BigDecimal) (it.next())); val = val.add(one); } } protected Range createRange(int from, int to) { return new ObjectRange(new Integer(from), new Integer(to)); } protected Range createRange(BigDecimal from, BigDecimal to) { return new ObjectRange(from, to); } protected void assertEquals(String msg, int expected, Object value) { assertEquals(msg, new Integer(expected), value); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.avatica; import org.apache.calcite.avatica.remote.TypedValue; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; /** * Implementation of {@link java.sql.Statement} * for the Avatica engine. */ public abstract class AvaticaStatement implements Statement { /** The default value for {@link Statement#getFetchSize()}. */ public static final int DEFAULT_FETCH_SIZE = 100; public final AvaticaConnection connection; /** Statement id; unique within connection. */ public Meta.StatementHandle handle; protected boolean closed; /** Support for {@link #cancel()} method. */ protected final AtomicBoolean cancelFlag; /** * Support for {@link #closeOnCompletion()} method. */ protected boolean closeOnCompletion; /** * Current result set, or null if the statement is not executing anything. * Any method which modifies this member must synchronize * on the AvaticaStatement. */ protected AvaticaResultSet openResultSet; /** Current update count. Same lifecycle as {@link #openResultSet}. */ protected long updateCount; private int queryTimeoutMillis; final int resultSetType; final int resultSetConcurrency; final int resultSetHoldability; private int fetchSize = DEFAULT_FETCH_SIZE; private int fetchDirection; protected long maxRowCount = 0; private Meta.Signature signature; private final List<String> batchedSql; protected void setSignature(Meta.Signature signature) { this.signature = signature; } protected Meta.Signature getSignature() { return signature; } public Meta.StatementType getStatementType() { return signature.statementType; } /** * Creates an AvaticaStatement. * * @param connection Connection * @param h Statement handle * @param resultSetType Result set type * @param resultSetConcurrency Result set concurrency * @param resultSetHoldability Result set holdability */ protected AvaticaStatement(AvaticaConnection connection, Meta.StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability) { this(connection, h, resultSetType, resultSetConcurrency, resultSetHoldability, null); } protected AvaticaStatement(AvaticaConnection connection, Meta.StatementHandle h, int resultSetType, int resultSetConcurrency, int resultSetHoldability, Meta.Signature signature) { this.connection = Objects.requireNonNull(connection); this.resultSetType = resultSetType; this.resultSetConcurrency = resultSetConcurrency; this.resultSetHoldability = resultSetHoldability; this.signature = signature; this.closed = false; if (h == null) { final Meta.ConnectionHandle ch = connection.handle; h = connection.meta.createStatement(ch); } connection.statementMap.put(h.id, this); this.handle = h; this.batchedSql = new ArrayList<>(); try { this.cancelFlag = connection.getCancelFlag(h); } catch (NoSuchStatementException e) { throw new AssertionError("no statement", e); } } /** Returns the identifier of the statement, unique within its connection. */ public int getId() { return handle.id; } private void checkNotPreparedOrCallable(String s) throws SQLException { if (this instanceof PreparedStatement || this instanceof CallableStatement) { throw connection.helper.createException("Cannot call " + s + " on prepared or callable statement"); } } protected void executeInternal(String sql) throws SQLException { // reset previous state before moving forward. this.updateCount = -1; try { // In JDBC, maxRowCount = 0 means no limit; in prepare it means LIMIT 0 final long maxRowCount1 = maxRowCount <= 0 ? -1 : maxRowCount; for (int i = 0; i < connection.maxRetriesPerExecute; i++) { try { Meta.ExecuteResult x = connection.prepareAndExecuteInternal(this, sql, maxRowCount1); return; } catch (NoSuchStatementException e) { resetStatement(); } } } catch (RuntimeException e) { throw connection.helper.createException("Error while executing SQL \"" + sql + "\": " + e.getMessage(), e); } throw new RuntimeException("Failed to successfully execute query after " + connection.maxRetriesPerExecute + " attempts."); } /** * Executes a collection of updates in a single batch RPC. * * @return an array of long mapping to the update count per SQL command. */ protected long[] executeBatchInternal() throws SQLException { for (int i = 0; i < connection.maxRetriesPerExecute; i++) { try { return connection.prepareAndUpdateBatch(this, batchedSql).updateCounts; } catch (NoSuchStatementException e) { resetStatement(); } } throw new RuntimeException("Failed to successfully execute batch update after " + connection.maxRetriesPerExecute + " attempts"); } protected void resetStatement() { // Invalidate the old statement connection.statementMap.remove(handle.id); connection.flagMap.remove(handle.id); // Get a new one final Meta.ConnectionHandle ch = new Meta.ConnectionHandle(connection.id); Meta.StatementHandle h = connection.meta.createStatement(ch); // Cache it in the connection connection.statementMap.put(h.id, this); // Update the local state and try again this.handle = h; } /** * Re-initialize the ResultSet on the server with the given state. * @param state The ResultSet's state. * @param offset Offset into the desired ResultSet * @return True if the ResultSet has more results, false if there are no more results. */ protected boolean syncResults(QueryState state, long offset) throws NoSuchStatementException { return connection.meta.syncResults(handle, state, offset); } // implement Statement public boolean execute(String sql) throws SQLException { checkNotPreparedOrCallable("execute(String)"); executeInternal(sql); // Result set is null for DML or DDL. // Result set is closed if user cancelled the query. return openResultSet != null && !openResultSet.isClosed(); } public ResultSet executeQuery(String sql) throws SQLException { checkNotPreparedOrCallable("executeQuery(String)"); try { executeInternal(sql); if (openResultSet == null) { throw connection.helper.createException( "Statement did not return a result set"); } return openResultSet; } catch (RuntimeException e) { throw connection.helper.createException("Error while executing SQL \"" + sql + "\": " + e.getMessage(), e); } } public final int executeUpdate(String sql) throws SQLException { return AvaticaUtils.toSaturatedInt(executeLargeUpdate(sql)); } public long executeLargeUpdate(String sql) throws SQLException { checkNotPreparedOrCallable("executeUpdate(String)"); executeInternal(sql); return updateCount; } public synchronized void close() throws SQLException { try { close_(); } catch (RuntimeException e) { throw connection.helper.createException("While closing statement", e); } } protected void close_() { if (!closed) { closed = true; if (openResultSet != null) { AvaticaResultSet c = openResultSet; openResultSet = null; c.close(); } try { // inform the server to close the resource connection.meta.closeStatement(handle); } finally { // make sure we don't leak on our side connection.statementMap.remove(handle.id); connection.flagMap.remove(handle.id); } // If onStatementClose throws, this method will throw an exception (later // converted to SQLException), but this statement still gets closed. connection.driver.handler.onStatementClose(this); } } public int getMaxFieldSize() throws SQLException { throw connection.helper.unsupported(); } public void setMaxFieldSize(int max) throws SQLException { throw connection.helper.unsupported(); } public final int getMaxRows() { return AvaticaUtils.toSaturatedInt(getLargeMaxRows()); } public long getLargeMaxRows() { return maxRowCount; } public final void setMaxRows(int maxRowCount) throws SQLException { setLargeMaxRows(maxRowCount); } public void setLargeMaxRows(long maxRowCount) throws SQLException { if (maxRowCount < 0) { throw connection.helper.createException( "illegal maxRows value: " + maxRowCount); } this.maxRowCount = maxRowCount; } public void setEscapeProcessing(boolean enable) throws SQLException { throw connection.helper.unsupported(); } public int getQueryTimeout() throws SQLException { long timeoutSeconds = getQueryTimeoutMillis() / 1000; if (timeoutSeconds > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } if (timeoutSeconds == 0 && getQueryTimeoutMillis() > 0) { // Don't return timeout=0 if e.g. timeoutMillis=500. 0 is special. return 1; } return (int) timeoutSeconds; } int getQueryTimeoutMillis() { return queryTimeoutMillis; } public void setQueryTimeout(int seconds) throws SQLException { if (seconds < 0) { throw connection.helper.createException( "illegal timeout value " + seconds); } setQueryTimeoutMillis(seconds * 1000); } void setQueryTimeoutMillis(int millis) { this.queryTimeoutMillis = millis; } public synchronized void cancel() throws SQLException { if (openResultSet != null) { openResultSet.cancel(); } // If there is an open result set, it probably just set the same flag. cancelFlag.compareAndSet(false, true); } public SQLWarning getWarnings() throws SQLException { return null; // no warnings, since warnings are not supported } public void clearWarnings() throws SQLException { // no-op since warnings are not supported } public void setCursorName(String name) throws SQLException { throw connection.helper.unsupported(); } public ResultSet getResultSet() throws SQLException { // NOTE: result set becomes visible in this member while // executeQueryInternal is still in progress, and before it has // finished executing. Its internal state may not be ready for API // calls. JDBC never claims to be thread-safe! (Except for calls to the // cancel method.) It is not possible to synchronize, because it would // block 'cancel'. return openResultSet; } public int getUpdateCount() throws SQLException { return AvaticaUtils.toSaturatedInt(updateCount); } public long getLargeUpdateCount() throws SQLException { return updateCount; } public boolean getMoreResults() throws SQLException { throw connection.helper.unsupported(); } public void setFetchDirection(int direction) throws SQLException { this.fetchDirection = direction; } public int getFetchDirection() { return fetchDirection; } public void setFetchSize(int rows) throws SQLException { this.fetchSize = rows; } public int getFetchSize() { return fetchSize; } public int getResultSetConcurrency() throws SQLException { throw connection.helper.unsupported(); } public int getResultSetType() throws SQLException { throw connection.helper.unsupported(); } public void addBatch(String sql) throws SQLException { this.batchedSql.add(Objects.requireNonNull(sql)); } public void clearBatch() throws SQLException { this.batchedSql.clear(); } public int[] executeBatch() throws SQLException { return AvaticaUtils.toSaturatedInts(executeLargeBatch()); } public long[] executeLargeBatch() throws SQLException { try { return executeBatchInternal(); } finally { // If we failed to send this batch, that's a problem for the user to handle, not us. // Make sure we always clear the statements we collected to submit in one RPC. clearBatch(); } } public AvaticaConnection getConnection() { return connection; } public boolean getMoreResults(int current) throws SQLException { throw connection.helper.unsupported(); } public ResultSet getGeneratedKeys() throws SQLException { throw connection.helper.unsupported(); } public int executeUpdate( String sql, int autoGeneratedKeys) throws SQLException { throw connection.helper.unsupported(); } public int executeUpdate( String sql, int[] columnIndexes) throws SQLException { throw connection.helper.unsupported(); } public int executeUpdate( String sql, String[] columnNames) throws SQLException { throw connection.helper.unsupported(); } public boolean execute( String sql, int autoGeneratedKeys) throws SQLException { throw connection.helper.unsupported(); } public boolean execute( String sql, int[] columnIndexes) throws SQLException { throw connection.helper.unsupported(); } public boolean execute( String sql, String[] columnNames) throws SQLException { throw connection.helper.unsupported(); } public int getResultSetHoldability() throws SQLException { throw connection.helper.unsupported(); } public boolean isClosed() throws SQLException { return closed; } public void setPoolable(boolean poolable) throws SQLException { throw connection.helper.unsupported(); } public boolean isPoolable() throws SQLException { throw connection.helper.unsupported(); } // implements java.sql.Statement.closeOnCompletion (added in JDK 1.7) public void closeOnCompletion() throws SQLException { closeOnCompletion = true; } // implements java.sql.Statement.isCloseOnCompletion (added in JDK 1.7) public boolean isCloseOnCompletion() throws SQLException { return closeOnCompletion; } // implement Wrapper public <T> T unwrap(Class<T> iface) throws SQLException { if (iface.isInstance(this)) { return iface.cast(this); } throw connection.helper.createException( "does not implement '" + iface + "'"); } public boolean isWrapperFor(Class<?> iface) throws SQLException { return iface.isInstance(this); } /** * Executes a prepared statement. * * @param signature Parsed statement * @param isUpdate if the execute is for an update * * @return as specified by {@link java.sql.Statement#execute(String)} * @throws java.sql.SQLException if a database error occurs */ protected boolean executeInternal(Meta.Signature signature, boolean isUpdate) throws SQLException { ResultSet resultSet = executeQueryInternal(signature, isUpdate); // user may have cancelled the query if (resultSet.isClosed()) { return false; } return true; } /** * Executes a prepared query, closing any previously open result set. * * @param signature Parsed query * @param isUpdate If the execute is for an update * @return Result set * @throws java.sql.SQLException if a database error occurs */ protected ResultSet executeQueryInternal(Meta.Signature signature, boolean isUpdate) throws SQLException { return connection.executeQueryInternal(this, signature, null, null, isUpdate); } /** * Called by each child result set when it is closed. * * @param resultSet Result set or cell set */ void onResultSetClose(ResultSet resultSet) { if (closeOnCompletion) { close_(); } } /** Returns the list of values of this statement's parameters. * * <p>Called at execute time. Not a public API.</p> * * <p>The default implementation returns the empty list, because non-prepared * statements have no parameters.</p> * * @see org.apache.calcite.avatica.AvaticaConnection.Trojan#getParameterValues(AvaticaStatement) */ protected List<TypedValue> getParameterValues() { return Collections.emptyList(); } /** Returns a list of bound parameter values. * * <p>If any of the parameters have not been bound, throws. * If parameters have been bound to null, the value in the list is null. */ protected List<TypedValue> getBoundParameterValues() throws SQLException { final List<TypedValue> parameterValues = getParameterValues(); for (Object parameterValue : parameterValues) { if (parameterValue == null) { throw new SQLException("unbound parameter"); } } return parameterValues; } } // End AvaticaStatement.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.geo.RandomGeoGenerator; import org.hamcrest.CoreMatchers; import java.io.IOException; import java.util.Collection; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { @Override protected Collection<Class<? extends Plugin>> getPlugins() { return pluginList(InternalSettingsPlugin.class); } public void testGeoHashValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("point", stringEncode(1.3, 1.2)) .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLatLonValuesStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testArrayLatLonValues() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") .startObject().field("lat", 1.2).field("lon", 1.3).endObject() .startObject().field("lat", 1.4).field("lon", 1.5).endObject() .endArray() .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLatLonInOneValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLatLonStringWithZValue() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point") .field(IGNORE_Z_VALUE.getPreferredName(), true); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3,10.0") .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLatLonStringWithZValueException() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point") .field(IGNORE_Z_VALUE.getPreferredName(), false); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); SourceToParse source = SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3,10.0") .endObject()), XContentType.JSON); Exception e = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source)); assertThat(e.getCause().getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } public void testLatLonInOneValueStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .field("point", "1.2,1.3") .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLatLonInOneValueArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") .value("1.2,1.3") .value("1.4,1.5") .endArray() .endObject()), XContentType.JSON)); // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLonLatArray() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLonLatArrayDynamic() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") .startObject("mapping").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); } public void testLonLatArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.field("store", true).endObject().endObject().endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point").value(1.3).value(1.2).endArray() .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getField("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); } public void testLonLatArrayArrayStored() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); String mapping = Strings.toString(xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() .startArray("point") .startArray().value(1.3).value(1.2).endArray() .startArray().value(1.5).value(1.4).endArray() .endArray() .endObject()), XContentType.JSON)); assertThat(doc.rootDoc().getFields("point"), notNullValue()); assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4)); } /** * Test that accept_z_value parameter correctly parses */ public void testIgnoreZValue() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_point") .field(IGNORE_Z_VALUE.getPreferredName(), "true") .endObject().endObject() .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); boolean ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value(); assertThat(ignoreZValue, equalTo(true)); // explicit false accept_z_value test mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_point") .field(IGNORE_Z_VALUE.getPreferredName(), "false") .endObject().endObject() .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoPointFieldMapper.class)); ignoreZValue = ((GeoPointFieldMapper)fieldMapper).ignoreZValue().value(); assertThat(ignoreZValue, equalTo(false)); } public void testMultiField() throws Exception { int numDocs = randomIntBetween(10, 100); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") .field("type", "geo_point") .startObject("fields") .startObject("geohash").field("type", "keyword").endObject() // test geohash as keyword .startObject("latlon").field("type", "keyword").endObject() // test geohash as string .endObject() .endObject().endObject().endObject().endObject()); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test") .addMapping("pin", mapping, XContentType.JSON); mappingRequest.execute().actionGet(); // create index and add random test points client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); for (int i=0; i<numDocs; ++i) { final GeoPoint pt = RandomGeoGenerator.randomPoint(random()); client().prepareIndex("test", "pin").setSource(jsonBuilder().startObject().startObject("location").field("lat", pt.lat()) .field("lon", pt.lon()).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); } // TODO these tests are bogus and need to be Fix // query by geohash subfield SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().getTotalHits()); // query by latlon subfield searchResponse = client().prepareSearch().addStoredField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().getTotalHits()); } public void testEmptyName() throws Exception { // after 5.x String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", "geo_point").endObject().endObject() .endObject().endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); } }
/* Generic definitions */ /* Assertions (useful to generate conditional code) */ /* Current type and class (and size, if applicable) */ /* Value methods */ /* Interfaces (keys) */ /* Interfaces (values) */ /* Abstract implementations (keys) */ /* Abstract implementations (values) */ /* Static containers (keys) */ /* Static containers (values) */ /* Implementations */ /* Synchronized wrappers */ /* Unmodifiable wrappers */ /* Other wrappers */ /* Methods (keys) */ /* Methods (values) */ /* Methods (keys/values) */ /* Methods that have special names depending on keys (but the special names depend on values) */ /* Equality */ /* Object/Reference-only definitions (keys) */ /* Primitive-type-only definitions (keys) */ /* Object/Reference-only definitions (values) */ /* * Copyright (C) 2010-2013 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.longs; import it.unimi.dsi.fastutil.Arrays; import java.util.NoSuchElementException; /** A type-specific array-based FIFO queue, supporting also deque operations. * * <P>Instances of this class represent a FIFO queue using a backing * array in a circular way. The array is enlarged and shrunk as needed. You can use the {@link #trim()} method * to reduce its memory usage, if necessary. * * <P>This class provides additional methods that implement a <em>deque</em> (double-ended queue). */ public class LongArrayFIFOQueue extends AbstractLongPriorityQueue { /** The standard initial capacity of a queue. */ public final static int INITIAL_CAPACITY = 4; /** The backing array. */ @SuppressWarnings("unchecked") protected long array[] = LongArrays.EMPTY_ARRAY; /** The current (cached) length of {@link #array}. */ protected int length; /** The start position in {@link #array}. It is always strictly smaller than {@link #length}.*/ protected int start; /** The end position in {@link #array}. It is always strictly smaller than {@link #length}. * Might be actually smaller than {@link #start} because {@link #array} is used cyclically. */ protected int end; /** Creates a new empty queue with given capacity. * * @param capacity the initial capacity of this queue. */ @SuppressWarnings("unchecked") public LongArrayFIFOQueue( final int capacity ) { if ( capacity < 0 ) throw new IllegalArgumentException( "Initial capacity (" + capacity + ") is negative" ); array = new long[ capacity ]; length = capacity; } /** Creates a new empty queue with standard {@linkplain #INITIAL_CAPACITY initial capacity}. */ public LongArrayFIFOQueue() { this( INITIAL_CAPACITY ); } /** Returns <code>null</code> (FIFO queues have no comparator). * @return <code>null</code>. */ @Override public LongComparator comparator() { return null; } @Override public long dequeueLong() { if ( start == end ) throw new NoSuchElementException(); final long t = array[ start ]; if ( ++start == length ) start = 0; reduce(); return t; } /** Dequeues the {@linkplain #last() last} element from the queue. * * @return the dequeued element. * @throws NoSuchElementException if the queue is empty. */ public long dequeueLastLong() { if ( start == end ) throw new NoSuchElementException(); if ( end == 0 ) end = length; final long t = array[ --end ]; reduce(); return t; } @SuppressWarnings("unchecked") private final void resize( final int size, final int newLength ) { final long[] newArray = new long[ newLength ]; if ( start >= end ) { if ( size != 0 ) { System.arraycopy( array, start, newArray, 0, length - start ); System.arraycopy( array, 0, newArray, length - start, end ); } } else System.arraycopy( array, start, newArray, 0, end - start ); start = 0; end = size; array = newArray; length = newLength; } private final void expand() { resize( length, (int)Math.min( Arrays.MAX_ARRAY_SIZE, 2L * length ) ); } private final void reduce() { final int size = size(); if ( length > INITIAL_CAPACITY && size <= length / 4 ) resize( size, (int)(( length + 1L ) / 2) ); // This turns Integer.MAX_VALUE into 2 << 30. } @Override public void enqueue( long x ) { array[ end++ ] = x; if ( end == length ) end = 0; if ( end == start ) expand(); } /** Enqueues a new element as the {@linkplain #first() first} element (in dequeuing order) of the queue. */ public void enqueueFirst( long x ) { if ( start == 0 ) start = length; array[ --start ] = x; if ( end == start ) expand(); } /** Returns the first element of the queue. * @return the first element of the queue. */ public long firstLong() { if ( start == end ) throw new NoSuchElementException(); return array[ start ]; } /** Returns the last element of the queue. * @return the last element of the queue. */ public long lastLong() { if ( start == end ) throw new NoSuchElementException(); return array[ ( end == 0 ? length : end ) - 1 ]; } @Override public void clear() { start = end = 0; } /** Trims the queue to the smallest possible size. */ @SuppressWarnings("unchecked") public void trim() { final int size = size(); final long[] newArray = new long[ size + 1 ]; if ( start <= end ) System.arraycopy( array, start, newArray, 0, end - start ); else { System.arraycopy( array, start, newArray, 0, length - start ); System.arraycopy( array, 0, newArray, length - start, end ); } start = 0; length = ( end = size ) + 1; array = newArray; } @Override public int size() { final int apparentLength = end - start; return apparentLength >= 0 ? apparentLength : length + apparentLength; } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.neomedia.recording; import org.jitsi.service.neomedia.recording.*; import org.jitsi.utils.*; import org.jitsi.utils.logging.*; import org.json.simple.*; import java.io.*; import java.util.*; /** * Implements a <tt>RecorderEventHandler</tt> which handles * <tt>RecorderEvents</tt> by writing them to a file in JSON format. * * @author Boris Grozev */ public class RecorderEventHandlerJSONImpl implements RecorderEventHandler { /** * The <tt>Logger</tt> used by the <tt>RecorderEventHandlerJSONImpl</tt> * class and its instances for logging output. */ private static final Logger logger = Logger.getLogger(RecorderEventHandlerJSONImpl.class); /** * Compares <tt>RecorderEvent</tt>s by their instant (e.g. timestamp). */ private static final Comparator<RecorderEvent> eventComparator = new Comparator<RecorderEvent>() { @Override public int compare(RecorderEvent a, RecorderEvent b) { return Long.compare(a.getInstant(), b.getInstant()); } }; File file; private boolean closed = false; private final List<RecorderEvent> audioEvents = new LinkedList<RecorderEvent>(); private final List<RecorderEvent> videoEvents = new LinkedList<RecorderEvent>(); /** * {@inheritDoc} */ public RecorderEventHandlerJSONImpl(String filename) throws IOException { file = new File(filename); if (!file.createNewFile()) throw new IOException("File exists or cannot be created: " + file); if (!file.canWrite()) throw new IOException("Cannot write to file: " + file); } /** * {@inheritDoc} */ @Override public synchronized boolean handleEvent(RecorderEvent ev) { if (closed) return false; MediaType mediaType = ev.getMediaType(); RecorderEvent.Type type = ev.getType(); long duration = ev.getDuration(); long ssrc = ev.getSsrc(); /* * For a RECORDING_ENDED event without a valid instant, find it's * associated (i.e. with the same SSRC) RECORDING_STARTED event and * compute the RECORDING_ENDED instance based on its duration. */ if (RecorderEvent.Type.RECORDING_ENDED.equals(type) && ev.getInstant() == -1 && duration != -1) { List<RecorderEvent> events = MediaType.AUDIO.equals(mediaType) ? audioEvents : videoEvents; RecorderEvent start = null; for (RecorderEvent e : events) { if (RecorderEvent.Type.RECORDING_STARTED.equals(e.getType()) && e.getSsrc() == ssrc) { start = e; break; } } if (start != null) ev.setInstant(start.getInstant() + duration); } if (MediaType.AUDIO.equals(mediaType)) audioEvents.add(ev); else if (MediaType.VIDEO.equals(mediaType)) videoEvents.add(ev); try { writeAllEvents(); } catch (IOException ioe) { logger.warn("Failed to write recorder events to file: ", ioe); return false; } return true; } /** * {@inheritDoc} */ @Override public synchronized void close() { //XXX do we want to write everything again? try { writeAllEvents(); } catch (IOException ioe) { logger.warn("Failed to write recorder events to file: " + ioe); } finally { closed = true; } } private void writeAllEvents() throws IOException { Collections.sort(audioEvents, eventComparator); Collections.sort(videoEvents, eventComparator); int nbAudio = audioEvents.size(); int nbVideo = videoEvents.size(); if (nbAudio + nbVideo > 0) { FileWriter writer = new FileWriter(file, false); writer.write("{\n"); if (nbAudio > 0) { writer.write(" \"audio\" : [\n"); writeEvents(audioEvents, writer); if (nbVideo > 0) writer.write(" ],\n\n"); else writer.write(" ]\n\n"); } if (nbVideo > 0) { writer.write(" \"video\" : [\n"); writeEvents(videoEvents, writer); writer.write(" ]\n"); } writer.write("}\n"); writer.close(); } } private void writeEvents(List<RecorderEvent> events, FileWriter writer) throws IOException { int idx = 0; int size = events.size(); for (RecorderEvent ev : events) { if (++idx == size) writer.write(" " + getJSON(ev) + "\n"); else writer.write(" " + getJSON(ev)+",\n"); } } @SuppressWarnings("unchecked") private String getJSON(RecorderEvent ev) { JSONObject json = new JSONObject(); json.put("instant", ev.getInstant()); json.put("type", ev.getType().toString()); MediaType mediaType = ev.getMediaType(); if (mediaType != null) json.put("mediaType", mediaType.toString()); json.put("ssrc", ev.getSsrc()); long audioSsrc = ev.getAudioSsrc(); if (audioSsrc != -1) json.put("audioSsrc", audioSsrc); RecorderEvent.AspectRatio aspectRatio = ev.getAspectRatio(); if (aspectRatio != RecorderEvent.AspectRatio.ASPECT_RATIO_UNKNOWN) json.put("aspectRatio", aspectRatio.toString()); long rtpTimestamp = ev.getRtpTimestamp(); if (rtpTimestamp != -1) json.put("rtpTimestamp", rtpTimestamp); String endpointId = ev.getEndpointId(); if (endpointId != null) json.put("endpointId", endpointId); String filename = ev.getFilename(); if (filename != null) { String bareFilename = filename; int idx = filename.lastIndexOf('/'); int len = filename.length(); if (idx != -1 && idx != len-1) bareFilename = filename.substring(1 + idx, len); json.put("filename", bareFilename); } return json.toJSONString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.tezplugins; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hive.llap.registry.ServiceInstance; import org.apache.hadoop.io.Writable; import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol.TezAttemptArray; import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.protobuf.ByteString; import com.google.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.LlapNodeId; import org.apache.hadoop.hive.llap.LlapUtil; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary; import org.apache.hadoop.hive.llap.protocol.LlapTaskUmbilicalProtocol; import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService; import org.apache.hadoop.hive.llap.security.LlapTokenIdentifier; import org.apache.hadoop.hive.llap.tez.Converters; import org.apache.hadoop.hive.llap.tez.LlapProtocolClientProxy; import org.apache.hadoop.hive.llap.tezplugins.helpers.SourceStateTracker; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.ProtocolSignature; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.tez.common.TezTaskUmbilicalProtocol; import org.apache.tez.common.TezUtils; import org.apache.tez.common.security.JobTokenSecretManager; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.event.VertexStateUpdate; import org.apache.tez.dag.app.TezTaskCommunicatorImpl; import org.apache.tez.dag.records.TezTaskAttemptID; import org.apache.tez.runtime.api.TaskFailureType; import org.apache.tez.runtime.api.impl.TaskSpec; import org.apache.tez.runtime.api.impl.TezHeartbeatRequest; import org.apache.tez.runtime.api.impl.TezHeartbeatResponse; import org.apache.tez.serviceplugins.api.ContainerEndReason; import org.apache.tez.serviceplugins.api.ServicePluginErrorDefaults; import org.apache.tez.serviceplugins.api.TaskAttemptEndReason; import org.apache.tez.serviceplugins.api.TaskCommunicatorContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LlapTaskCommunicator extends TezTaskCommunicatorImpl { private static final Logger LOG = LoggerFactory.getLogger(LlapTaskCommunicator.class); private static final boolean isInfoEnabled = LOG.isInfoEnabled(); private static final String RESOURCE_URI_STR = "/ws/v1/applicationhistory"; private static final Joiner JOINER = Joiner.on(""); private static final Joiner PATH_JOINER = Joiner.on("/"); private final ConcurrentMap<QueryIdentifierProto, ByteBuffer> credentialMap; // Tracks containerIds and taskAttemptIds, so can be kept independent of the running DAG. // When DAG specific cleanup happens, it'll be better to link this to a DAG though. private final EntityTracker entityTracker = new EntityTracker(); private final SourceStateTracker sourceStateTracker; private final Set<LlapNodeId> nodesForQuery = new HashSet<>(); private LlapProtocolClientProxy communicator; private long deleteDelayOnDagComplete; private final LlapTaskUmbilicalProtocol umbilical; private final Token<LlapTokenIdentifier> token; private final String user; private String amHost; private String timelineServerUri; // These two structures track the list of known nodes, and the list of nodes which are sending in keep-alive heartbeats. // Primarily for debugging purposes a.t.m, since there's some unexplained TASK_TIMEOUTS which are currently being observed. private final ConcurrentMap<LlapNodeId, Long> knownNodeMap = new ConcurrentHashMap<>(); private final ConcurrentMap<LlapNodeId, PingingNodeInfo> pingedNodeMap = new ConcurrentHashMap<>(); private final LlapRegistryService serviceRegistry; private volatile QueryIdentifierProto currentQueryIdentifierProto; private volatile String currentHiveQueryId; public LlapTaskCommunicator( TaskCommunicatorContext taskCommunicatorContext) { super(taskCommunicatorContext); Credentials credentials = taskCommunicatorContext.getAMCredentials(); if (credentials != null) { @SuppressWarnings("unchecked") Token<LlapTokenIdentifier> llapToken = (Token<LlapTokenIdentifier>)credentials.getToken(LlapTokenIdentifier.KIND_NAME); this.token = llapToken; } else { this.token = null; } if (LOG.isInfoEnabled()) { LOG.info("Task communicator with a token " + token); } Preconditions.checkState((token != null) == UserGroupInformation.isSecurityEnabled()); // Not closing this at the moment at shutdown, since this could be a shared instance. serviceRegistry = LlapRegistryService.getClient(conf); umbilical = new LlapTaskUmbilicalProtocolImpl(getUmbilical()); // TODO Avoid reading this from the environment user = System.getenv(ApplicationConstants.Environment.USER.name()); credentialMap = new ConcurrentHashMap<>(); sourceStateTracker = new SourceStateTracker(getContext(), this); } private static final String LLAP_TOKEN_NAME = LlapTokenIdentifier.KIND_NAME.toString(); private void processSendError(Throwable t) { Throwable cause = t; while (cause != null) { if (cause instanceof RetriableException) return; if (((cause instanceof InvalidToken && cause.getMessage() != null) || (cause instanceof RemoteException && cause.getCause() == null && cause.getMessage() != null && cause.getMessage().contains("InvalidToken"))) && cause.getMessage().contains(LLAP_TOKEN_NAME)) { break; } cause = cause.getCause(); } if (cause == null) return; LOG.error("Reporting fatal error - LLAP token appears to be invalid.", t); getContext().reportError(ServicePluginErrorDefaults.OTHER_FATAL, cause.getMessage(), null); } @Override public void initialize() throws Exception { super.initialize(); Configuration conf = getConf(); int numThreads = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_COMMUNICATOR_NUM_THREADS); this.communicator = createLlapProtocolClientProxy(numThreads, conf); this.deleteDelayOnDagComplete = HiveConf.getTimeVar( conf, ConfVars.LLAP_FILE_CLEANUP_DELAY_SECONDS, TimeUnit.SECONDS); LOG.info("Running LlapTaskCommunicator with " + "fileCleanupDelay=" + deleteDelayOnDagComplete + ", numCommunicatorThreads=" + numThreads); this.communicator.init(conf); String scheme = WebAppUtils.getHttpSchemePrefix(conf); String ahsUrl = WebAppUtils.getAHSWebAppURLWithoutScheme(conf); this.timelineServerUri = WebAppUtils.getURLWithScheme(scheme, ahsUrl); } @Override public void start() { super.start(); this.communicator.start(); } @Override public void shutdown() { super.shutdown(); if (this.communicator != null) { this.communicator.stop(); } } @Override protected void startRpcServer() { Configuration conf = getConf(); try { JobTokenSecretManager jobTokenSecretManager = new JobTokenSecretManager(); jobTokenSecretManager.addTokenForJob(tokenIdentifier, sessionToken); int numHandlers = HiveConf.getIntVar(conf, ConfVars.LLAP_TASK_COMMUNICATOR_LISTENER_THREAD_COUNT); server = new RPC.Builder(conf) .setProtocol(LlapTaskUmbilicalProtocol.class) .setBindAddress("0.0.0.0") .setPort(0) .setInstance(umbilical) .setNumHandlers(numHandlers) .setSecretManager(jobTokenSecretManager).build(); if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) { server.refreshServiceAcl(conf, new LlapUmbilicalPolicyProvider()); } server.start(); this.address = NetUtils.getConnectAddress(server); this.amHost = LlapUtil.getAmHostNameFromAddress(address, conf); LOG.info("Started LlapUmbilical: " + umbilical.getClass().getName() + " at address: " + address + " with numHandlers=" + numHandlers + " using the host name " + amHost); } catch (IOException e) { throw new TezUncheckedException(e); } } @VisibleForTesting protected LlapProtocolClientProxy createLlapProtocolClientProxy(int numThreads, Configuration conf) { return new LlapProtocolClientProxy(numThreads, conf, token); } @Override public void registerRunningContainer(ContainerId containerId, String hostname, int port) { super.registerRunningContainer(containerId, hostname, port); entityTracker.registerContainer(containerId, hostname, port); } @Override public void registerContainerEnd(ContainerId containerId, ContainerEndReason endReason, String diagnostics) { super.registerContainerEnd(containerId, endReason, diagnostics); if (endReason == ContainerEndReason.INTERNAL_PREEMPTION) { LOG.info("Processing containerEnd for container {} caused by internal preemption", containerId); TezTaskAttemptID taskAttemptId = entityTracker.getTaskAttemptIdForContainer(containerId); if (taskAttemptId != null) { sendTaskTerminated(taskAttemptId, true); } } entityTracker.unregisterContainer(containerId); } @Override public void registerRunningTaskAttempt(final ContainerId containerId, final TaskSpec taskSpec, Map<String, LocalResource> additionalResources, Credentials credentials, boolean credentialsChanged, int priority) { super.registerRunningTaskAttempt(containerId, taskSpec, additionalResources, credentials, credentialsChanged, priority); int dagId = taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId(); if (currentQueryIdentifierProto == null || (dagId != currentQueryIdentifierProto.getDagIndex())) { // TODO HiveQueryId extraction by parsing the Processor payload is ugly. This can be improved // once TEZ-2672 is fixed. String hiveQueryId; try { hiveQueryId = extractQueryId(taskSpec); } catch (IOException e) { throw new RuntimeException("Failed to extract query id from task spec: " + taskSpec, e); } Preconditions.checkNotNull(hiveQueryId, "Unexpected null query id"); resetCurrentDag(dagId, hiveQueryId); } ContainerInfo containerInfo = getContainerInfo(containerId); String host; int port; if (containerInfo != null) { synchronized (containerInfo) { host = containerInfo.host; port = containerInfo.port; } } else { // TODO Handle this properly throw new RuntimeException("ContainerInfo not found for container: " + containerId + ", while trying to launch task: " + taskSpec.getTaskAttemptID()); } LlapNodeId nodeId = LlapNodeId.getInstance(host, port); registerKnownNode(nodeId); entityTracker.registerTaskAttempt(containerId, taskSpec.getTaskAttemptID(), host, port); nodesForQuery.add(nodeId); sourceStateTracker.registerTaskForStateUpdates(host, port, taskSpec.getInputs()); FragmentRuntimeInfo fragmentRuntimeInfo; try { fragmentRuntimeInfo = sourceStateTracker.getFragmentRuntimeInfo( taskSpec.getVertexName(), taskSpec.getTaskAttemptID().getTaskID().getId(), priority); } catch (Exception e) { LOG.error( "Error while trying to get runtimeFragmentInfo for fragmentId={}, containerId={}, currentQI={}, currentQueryId={}", taskSpec.getTaskAttemptID(), containerId, currentQueryIdentifierProto, currentHiveQueryId, e); if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new RuntimeException(e); } } SubmitWorkRequestProto requestProto; try { requestProto = constructSubmitWorkRequest(containerId, taskSpec, fragmentRuntimeInfo, currentHiveQueryId); } catch (IOException e) { throw new RuntimeException("Failed to construct request", e); } // Have to register this up front right now. Otherwise, it's possible for the task to start // sending out status/DONE/KILLED/FAILED messages before TAImpl knows how to handle them. getContext().taskStartedRemotely(taskSpec.getTaskAttemptID(), containerId); communicator.sendSubmitWork(requestProto, host, port, new LlapProtocolClientProxy.ExecuteRequestCallback<SubmitWorkResponseProto>() { @Override public void setResponse(SubmitWorkResponseProto response) { if (response.hasSubmissionState()) { LlapDaemonProtocolProtos.SubmissionStateProto ss = response.getSubmissionState(); if (ss.equals(LlapDaemonProtocolProtos.SubmissionStateProto.REJECTED)) { LOG.info( "Unable to run task: " + taskSpec.getTaskAttemptID() + " on containerId: " + containerId + ", Service Busy"); getContext().taskKilled(taskSpec.getTaskAttemptID(), TaskAttemptEndReason.EXECUTOR_BUSY, "Service Busy"); return; } } else { // TODO: Provide support for reporting errors // This should never happen as server always returns a valid status on success throw new RuntimeException("SubmissionState in response is expected!"); } if (response.hasUniqueNodeId()) { entityTracker.registerTaskSubmittedToNode( taskSpec.getTaskAttemptID(), response.getUniqueNodeId()); } LOG.info("Successfully launched task: " + taskSpec.getTaskAttemptID()); } @Override public void indicateError(Throwable t) { Throwable originalError = t; if (t instanceof ServiceException) { ServiceException se = (ServiceException) t; t = se.getCause(); } if (t instanceof RemoteException) { // All others from the remote service cause the task to FAIL. LOG.info( "Failed to run task: " + taskSpec.getTaskAttemptID() + " on containerId: " + containerId, t); processSendError(originalError); getContext() .taskFailed(taskSpec.getTaskAttemptID(), TaskFailureType.NON_FATAL, TaskAttemptEndReason.OTHER, t.toString()); } else { // Exception from the RPC layer - communication failure, consider as KILLED / service down. if (t instanceof IOException) { LOG.info( "Unable to run task: " + taskSpec.getTaskAttemptID() + " on containerId: " + containerId + ", Communication Error"); processSendError(originalError); getContext().taskKilled(taskSpec.getTaskAttemptID(), TaskAttemptEndReason.COMMUNICATION_ERROR, "Communication Error"); } else { // Anything else is a FAIL. LOG.info( "Failed to run task: " + taskSpec.getTaskAttemptID() + " on containerId: " + containerId, t); processSendError(originalError); getContext() .taskFailed(taskSpec.getTaskAttemptID(), TaskFailureType.NON_FATAL, TaskAttemptEndReason.OTHER, t.getMessage()); } } } }); } @Override public void unregisterRunningTaskAttempt(final TezTaskAttemptID taskAttemptId, TaskAttemptEndReason endReason, String diagnostics) { super.unregisterRunningTaskAttempt(taskAttemptId, endReason, diagnostics); if (endReason == TaskAttemptEndReason.INTERNAL_PREEMPTION) { LOG.info("Processing taskEnd for task {} caused by internal preemption", taskAttemptId); sendTaskTerminated(taskAttemptId, false); } entityTracker.unregisterTaskAttempt(taskAttemptId); // This will also be invoked for tasks which have been KILLED / rejected by the daemon. // Informing the daemon becomes necessary once the LlapScheduler supports preemption // and/or starts attempting to kill tasks which may be running on a node. } private void sendTaskTerminated(final TezTaskAttemptID taskAttemptId, boolean invokedByContainerEnd) { LOG.info( "Attempting to send terminateRequest for fragment {} due to internal preemption invoked by {}", taskAttemptId.toString(), invokedByContainerEnd ? "containerEnd" : "taskEnd"); LlapNodeId nodeId = entityTracker.getNodeIdForTaskAttempt(taskAttemptId); // NodeId can be null if the task gets unregistered due to failure / being killed by the daemon itself if (nodeId != null) { TerminateFragmentRequestProto request = TerminateFragmentRequestProto.newBuilder().setQueryIdentifier( constructQueryIdentifierProto( taskAttemptId.getTaskID().getVertexID().getDAGId().getId())) .setFragmentIdentifierString(taskAttemptId.toString()).build(); communicator.sendTerminateFragment(request, nodeId.getHostname(), nodeId.getPort(), new LlapProtocolClientProxy.ExecuteRequestCallback<TerminateFragmentResponseProto>() { @Override public void setResponse(TerminateFragmentResponseProto response) { } @Override public void indicateError(Throwable t) { LOG.warn("Failed to send terminate fragment request for {}", taskAttemptId.toString()); processSendError(t); } }); } else { LOG.info( "Not sending terminate request for fragment {} since it's node is not known. Already unregistered", taskAttemptId.toString()); } } @Override public void dagComplete(final int dagIdentifier) { QueryIdentifierProto queryIdentifierProto = constructQueryIdentifierProto(dagIdentifier); QueryCompleteRequestProto request = QueryCompleteRequestProto.newBuilder() .setQueryIdentifier(queryIdentifierProto).setDeleteDelay(deleteDelayOnDagComplete).build(); for (final LlapNodeId llapNodeId : nodesForQuery) { LOG.info("Sending dagComplete message for {}, to {}", dagIdentifier, llapNodeId); communicator.sendQueryComplete(request, llapNodeId.getHostname(), llapNodeId.getPort(), new LlapProtocolClientProxy.ExecuteRequestCallback<LlapDaemonProtocolProtos.QueryCompleteResponseProto>() { @Override public void setResponse(LlapDaemonProtocolProtos.QueryCompleteResponseProto response) { } @Override public void indicateError(Throwable t) { LOG.warn("Failed to indicate dag complete dagId={} to node {}", dagIdentifier, llapNodeId); processSendError(t); } }); } nodesForQuery.clear(); // TODO Ideally move some of the other cleanup code from resetCurrentDag over here } @Override public void onVertexStateUpdated(VertexStateUpdate vertexStateUpdate) { // Delegate updates over to the source state tracker. sourceStateTracker .sourceStateUpdated(vertexStateUpdate.getVertexName(), vertexStateUpdate.getVertexState()); } // TODO HIVE-15163. Handle cases where nodes go down and come back on the same port. Historic information // can prevent updates from being sent out to the new node. public void sendStateUpdate(final LlapNodeId nodeId, final SourceStateUpdatedRequestProto request) { communicator.sendSourceStateUpdate(request, nodeId, new LlapProtocolClientProxy.ExecuteRequestCallback<SourceStateUpdatedResponseProto>() { @Override public void setResponse(SourceStateUpdatedResponseProto response) { } @Override public void indicateError(Throwable t) { // Re-attempts are left upto the RPC layer. If there's a failure reported after this, // mark all attempts running on this node as KILLED. The node itself cannot be killed from // here, that's only possible via the scheduler. // The assumption is that if there's a failure to communicate with the node - it will // eventually timeout - and no more tasks will be allocated on it. LOG.error("Failed to send state update to node: {}, Killing all attempts running on " + "node. Attempted StateUpdate={}", nodeId, request, t); processSendError(t); BiMap<ContainerId, TezTaskAttemptID> biMap = entityTracker.getContainerAttemptMapForNode(nodeId); if (biMap != null) { synchronized (biMap) { for (Map.Entry<ContainerId, TezTaskAttemptID> entry : biMap.entrySet()) { LOG.info( "Sending a kill for attempt {}, due to a communication failure while sending a finishable state update", entry.getValue()); getContext().taskKilled(entry.getValue(), TaskAttemptEndReason.NODE_FAILED, "Failed to send finishable state update to node " + nodeId); } } } } }); } @Override public String getInProgressLogsUrl(TezTaskAttemptID attemptID, NodeId containerNodeId) { return constructLogUrl(attemptID, containerNodeId, false); } @Override public String getCompletedLogsUrl(TezTaskAttemptID attemptID, NodeId containerNodeId) { return constructLogUrl(attemptID, containerNodeId, true); } private String constructLogUrl(final TezTaskAttemptID attemptID, final NodeId containerNodeId, final boolean isDone) { if (timelineServerUri == null || containerNodeId == null) { return null; } Set<ServiceInstance> instanceSet; try { instanceSet = serviceRegistry.getInstances().getByHost(containerNodeId.getHost()); } catch (IOException e) { // Not failing the job due to a failure constructing the log url LOG.warn( "Unable to find instance for yarnNodeId={} to construct the log url. Exception message={}", containerNodeId, e.getMessage()); return null; } // Once NodeId includes fragmentId - this becomes a lot more reliable. if (instanceSet != null) { ServiceInstance matchedInstance = null; for (ServiceInstance instance : instanceSet) { if (instance.getRpcPort() == containerNodeId.getPort()) { matchedInstance = instance; break; } } if (matchedInstance != null) { String containerIdString = matchedInstance.getProperties() .get(HiveConf.ConfVars.LLAP_DAEMON_CONTAINER_ID.varname); String nmNodeAddress = matchedInstance.getProperties().get(ConfVars.LLAP_DAEMON_NM_ADDRESS.varname); if (!StringUtils.isBlank(containerIdString) && !StringUtils.isBlank(nmNodeAddress)) { return constructLlapLogUrl(attemptID, containerIdString, isDone, nmNodeAddress); } } } return null; } private String constructLlapLogUrl(final TezTaskAttemptID attemptID, final String containerIdString, final boolean isDone, final String nmAddress) { String dagId = attemptID.getTaskID().getVertexID().getDAGId().toString(); String filename = JOINER.join(currentHiveQueryId, "-", dagId, ".log", (isDone ? ".done" : ""), "?nm.id=", nmAddress); String url = PATH_JOINER.join(timelineServerUri, "ws", "v1", "applicationhistory", "containers", containerIdString, "logs", filename); return url; } private static class PingingNodeInfo { final AtomicLong logTimestamp; final AtomicInteger pingCount; PingingNodeInfo(long currentTs) { logTimestamp = new AtomicLong(currentTs); pingCount = new AtomicInteger(1); } } public void registerKnownNode(LlapNodeId nodeId) { Long old = knownNodeMap.putIfAbsent(nodeId, TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS)); if (old == null) { if (isInfoEnabled) { LOG.info("Added new known node: {}", nodeId); } } } public void registerPingingNode(LlapNodeId nodeId) { long currentTs = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); PingingNodeInfo ni = new PingingNodeInfo(currentTs); PingingNodeInfo old = pingedNodeMap.put(nodeId, ni); if (old == null) { if (isInfoEnabled) { LOG.info("Added new pinging node: [{}]", nodeId); } } else { old.pingCount.incrementAndGet(); } // The node should always be known by this point. Log occasionally if it is not known. if (!knownNodeMap.containsKey(nodeId)) { if (old == null) { // First time this is seen. Log it. LOG.warn("Received ping from unknownNode: [{}], count={}", nodeId, ni.pingCount.get()); } else { // Pinged before. Log only occasionally. if (currentTs > old.logTimestamp.get() + 5000l) { // 5 seconds elapsed. Log again. LOG.warn("Received ping from unknownNode: [{}], count={}", nodeId, old.pingCount.get()); old.logTimestamp.set(currentTs); } } } } private final AtomicLong nodeNotFoundLogTime = new AtomicLong(0); void nodePinged(String hostname, String uniqueId, int port, TezAttemptArray tasks) { // TODO: do we ever need the port? we could just do away with nodeId altogether. LlapNodeId nodeId = LlapNodeId.getInstance(hostname, port); registerPingingNode(nodeId); BiMap<ContainerId, TezTaskAttemptID> biMap = entityTracker.getContainerAttemptMapForNode(nodeId); if (biMap != null) { HashSet<TezTaskAttemptID> attempts = new HashSet<>(); for (Writable w : tasks.get()) { attempts.add((TezTaskAttemptID)w); } String error = ""; synchronized (biMap) { for (Map.Entry<ContainerId, TezTaskAttemptID> entry : biMap.entrySet()) { // TODO: this is a stopgap fix. We really need to change all mappings by unique node ID, // or at least (in this case) track the latest unique ID for LlapNode and retry all // older-node tasks proactively. For now let the heartbeats fail them. TezTaskAttemptID attemptId = entry.getValue(); String taskNodeId = entityTracker.getUniqueNodeId(attemptId); // Unique ID is registered based on Submit response. Theoretically, we could get a ping // when the task is valid but we haven't stored the unique ID yet, so taskNodeId is null. // However, the next heartbeat(s) should get the value eventually and mark task as alive. // Also, we prefer a missed heartbeat over a stuck query in case of discrepancy in ET. if (taskNodeId != null && taskNodeId.equals(uniqueId)) { if (attempts.contains(attemptId)) { getContext().taskAlive(entry.getValue()); } else { error += (attemptId + ", "); } getContext().containerAlive(entry.getKey()); } } } if (!error.isEmpty()) { LOG.info("The tasks we expected to be on the node are not there: " + error); } } else { long currentTs = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS); if (currentTs > nodeNotFoundLogTime.get() + 5000l) { LOG.warn("Received ping from node without any registered tasks or containers: " + hostname + ":" + port + ". Could be caused by pre-emption by the AM," + " or a mismatched hostname. Enable debug logging for mismatched host names"); nodeNotFoundLogTime.set(currentTs); } } } private void resetCurrentDag(int newDagId, String hiveQueryId) { // Working on the assumption that a single DAG runs at a time per AM. currentQueryIdentifierProto = constructQueryIdentifierProto(newDagId); currentHiveQueryId = hiveQueryId; sourceStateTracker.resetState(currentQueryIdentifierProto); nodesForQuery.clear(); LOG.info("CurrentDagId set to: " + newDagId + ", name=" + getContext().getCurrentDagInfo().getName() + ", queryId=" + hiveQueryId); // TODO Is it possible for heartbeats to come in from lost tasks - those should be told to die, which // is likely already happening. } private String extractQueryId(TaskSpec taskSpec) throws IOException { UserPayload processorPayload = taskSpec.getProcessorDescriptor().getUserPayload(); Configuration conf = TezUtils.createConfFromUserPayload(processorPayload); return HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID); } private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerId, TaskSpec taskSpec, FragmentRuntimeInfo fragmentRuntimeInfo, String hiveQueryId) throws IOException { SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder(); builder.setFragmentNumber(taskSpec.getTaskAttemptID().getTaskID().getId()); builder.setAttemptNumber(taskSpec.getTaskAttemptID().getId()); builder.setContainerIdString(containerId.toString()); builder.setAmHost(getAmHostString()); builder.setAmPort(getAddress().getPort()); Preconditions.checkState(currentQueryIdentifierProto.getDagIndex() == taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId()); ByteBuffer credentialsBinary = credentialMap.get(currentQueryIdentifierProto); if (credentialsBinary == null) { credentialsBinary = serializeCredentials(getContext().getCurrentDagInfo().getCredentials()); credentialMap.putIfAbsent(currentQueryIdentifierProto, credentialsBinary.duplicate()); } else { credentialsBinary = credentialsBinary.duplicate(); } builder.setCredentialsBinary(ByteString.copyFrom(credentialsBinary)); builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.constructSignableVertexSpec( taskSpec, currentQueryIdentifierProto, getTokenIdentifier(), user, hiveQueryId)).build()); // Don't call builder.setWorkSpecSignature() - Tez doesn't sign fragments builder.setFragmentRuntimeInfo(fragmentRuntimeInfo); return builder.build(); } private ByteBuffer serializeCredentials(Credentials credentials) throws IOException { Credentials containerCredentials = new Credentials(); containerCredentials.addAll(credentials); DataOutputBuffer containerTokens_dob = new DataOutputBuffer(); containerCredentials.writeTokenStorageToStream(containerTokens_dob); return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength()); } protected class LlapTaskUmbilicalProtocolImpl implements LlapTaskUmbilicalProtocol { private final TezTaskUmbilicalProtocol tezUmbilical; public LlapTaskUmbilicalProtocolImpl(TezTaskUmbilicalProtocol tezUmbilical) { this.tezUmbilical = tezUmbilical; } @Override public boolean canCommit(TezTaskAttemptID taskid) throws IOException { return tezUmbilical.canCommit(taskid); } @Override public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request) throws IOException, TezException { return tezUmbilical.heartbeat(request); } @Override public void nodeHeartbeat( Text hostname, Text uniqueId, int port, TezAttemptArray aw) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Received heartbeat from [" + hostname + ":" + port +" (" + uniqueId +")]"); } nodePinged(hostname.toString(), uniqueId.toString(), port, aw); } @Override public void taskKilled(TezTaskAttemptID taskAttemptId) throws IOException { // TODO Unregister the task for state updates, which could in turn unregister the node. getContext().taskKilled(taskAttemptId, TaskAttemptEndReason.EXTERNAL_PREEMPTION, "Attempt preempted"); entityTracker.unregisterTaskAttempt(taskAttemptId); } @Override public long getProtocolVersion(String protocol, long clientVersion) throws IOException { return versionID; } @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) throws IOException { return ProtocolSignature.getProtocolSignature(this, protocol, clientVersion, clientMethodsHash); } } /** * Track the association between known containers and taskAttempts, along with the nodes they are assigned to. */ @VisibleForTesting static final class EntityTracker { // TODO: need the description of how these maps are kept consistent. @VisibleForTesting final ConcurrentMap<TezTaskAttemptID, LlapNodeId> attemptToNodeMap = new ConcurrentHashMap<>(); @VisibleForTesting final ConcurrentMap<ContainerId, LlapNodeId> containerToNodeMap = new ConcurrentHashMap<>(); @VisibleForTesting final ConcurrentMap<LlapNodeId, BiMap<ContainerId, TezTaskAttemptID>> nodeMap = new ConcurrentHashMap<>(); // TODO: we currently put task info everywhere before we submit it and know the "real" node id. // Therefore, we are going to store this separately. Ideally, we should roll uniqueness // into LlapNodeId. We get node info from registry; that should (or can) include it. private final ConcurrentMap<TezTaskAttemptID, String> uniqueNodeMap = new ConcurrentHashMap<>(); void registerTaskAttempt(ContainerId containerId, TezTaskAttemptID taskAttemptId, String host, int port) { if (LOG.isDebugEnabled()) { LOG.debug("Registering " + containerId + ", " + taskAttemptId + " for node: " + host + ":" + port); } LlapNodeId llapNodeId = LlapNodeId.getInstance(host, port); attemptToNodeMap.putIfAbsent(taskAttemptId, llapNodeId); registerContainer(containerId, host, port); // nodeMap registration. BiMap<ContainerId, TezTaskAttemptID> tmpMap = HashBiMap.create(); BiMap<ContainerId, TezTaskAttemptID> old = nodeMap.putIfAbsent(llapNodeId, tmpMap); BiMap<ContainerId, TezTaskAttemptID> usedInstance; usedInstance = old == null ? tmpMap : old; synchronized(usedInstance) { usedInstance.put(containerId, taskAttemptId); } // Make sure to put the instance back again, in case it was removed as part of a // containerEnd/taskEnd invocation. nodeMap.putIfAbsent(llapNodeId, usedInstance); } public String getUniqueNodeId(TezTaskAttemptID attemptId) { return uniqueNodeMap.get(attemptId); } public void registerTaskSubmittedToNode( TezTaskAttemptID taskAttemptID, String uniqueNodeId) { synchronized (attemptToNodeMap) { if (attemptToNodeMap.containsKey(taskAttemptID)) { // Register only if the attempt is known. In case an unregister call // came in before the register call. String prev = uniqueNodeMap.putIfAbsent(taskAttemptID, uniqueNodeId); if (prev != null) { LOG.warn("Replaced the unique node mapping for task from " + prev + " to " + uniqueNodeId); } } } } void unregisterTaskAttempt(TezTaskAttemptID attemptId) { uniqueNodeMap.remove(attemptId); LlapNodeId llapNodeId; synchronized (attemptToNodeMap) { llapNodeId = attemptToNodeMap.remove(attemptId); if (llapNodeId == null) { // Possible since either container / task can be unregistered. return; } } BiMap<ContainerId, TezTaskAttemptID> bMap = nodeMap.get(llapNodeId); ContainerId matched = null; if (bMap != null) { synchronized(bMap) { matched = bMap.inverse().remove(attemptId); } if (bMap.isEmpty()) { nodeMap.remove(llapNodeId); } } // Remove the container mapping if (matched != null) { containerToNodeMap.remove(matched); } } void registerContainer(ContainerId containerId, String hostname, int port) { if (LOG.isDebugEnabled()) { LOG.debug("Registering " + containerId + " for node: " + hostname + ":" + port); } containerToNodeMap.putIfAbsent(containerId, LlapNodeId.getInstance(hostname, port)); // nodeMap registration is not required, since there's no taskId association. } LlapNodeId getNodeIdForContainer(ContainerId containerId) { return containerToNodeMap.get(containerId); } LlapNodeId getNodeIdForTaskAttempt(TezTaskAttemptID taskAttemptId) { return attemptToNodeMap.get(taskAttemptId); } ContainerId getContainerIdForAttempt(TezTaskAttemptID taskAttemptId) { LlapNodeId llapNodeId = getNodeIdForTaskAttempt(taskAttemptId); if (llapNodeId != null) { BiMap<TezTaskAttemptID, ContainerId> bMap = nodeMap.get(llapNodeId).inverse(); if (bMap != null) { synchronized (bMap) { return bMap.get(taskAttemptId); } } else { return null; } } else { return null; } } TezTaskAttemptID getTaskAttemptIdForContainer(ContainerId containerId) { LlapNodeId llapNodeId = getNodeIdForContainer(containerId); if (llapNodeId != null) { BiMap<ContainerId, TezTaskAttemptID> bMap = nodeMap.get(llapNodeId); if (bMap != null) { synchronized (bMap) { return bMap.get(containerId); } } else { return null; } } else { return null; } } void unregisterContainer(ContainerId containerId) { LlapNodeId llapNodeId = containerToNodeMap.remove(containerId); if (llapNodeId == null) { // Possible since either container / task can be unregistered. return; } BiMap<ContainerId, TezTaskAttemptID> bMap = nodeMap.get(llapNodeId); TezTaskAttemptID matched = null; if (bMap != null) { synchronized(bMap) { matched = bMap.remove(containerId); } if (bMap.isEmpty()) { nodeMap.remove(llapNodeId); } } // Remove the container mapping if (matched != null) { attemptToNodeMap.remove(matched); uniqueNodeMap.remove(matched); } } /** * Return a {@link BiMap} containing container->taskAttemptId mapping for the host specified. * </p> * <p/> * This method return the internal structure used by the EntityTracker. Users must synchronize * on the structure to ensure correct usage. * * @param llapNodeId * @return */ BiMap<ContainerId, TezTaskAttemptID> getContainerAttemptMapForNode(LlapNodeId llapNodeId) { return nodeMap.get(llapNodeId); } } private QueryIdentifierProto constructQueryIdentifierProto(int dagIdentifier) { return QueryIdentifierProto.newBuilder() .setApplicationIdString(getContext().getCurrentAppIdentifier()).setDagIndex(dagIdentifier) .setAppAttemptNumber(getContext().getApplicationAttemptId().getAttemptId()) .build(); } public String getAmHostString() { return amHost; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal; import org.apache.tinkerpop.gremlin.process.traversal.strategy.TraversalStrategyProxy; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.util.function.Lambda; import java.sql.Timestamp; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.function.BiFunction; /** * A Translator will translate {@link Bytecode} into another representation. That representation may be a * Java instance via {@link StepTranslator} or a String script in some language via {@link ScriptTranslator}. * The parameterization of Translator is S (traversal source) and T (full translation). * * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Stark Arya (sandszhou.zj@alibaba-inc.com) * @author Stephen Mallette (http://stephen.genoprime.com) */ public interface Translator<S, T> { /** * Get the {@link TraversalSource} representation rooting this translator. * For string-based translators ({@link ScriptTranslator}), this is typically a "g". * For java-based translators ({@link StepTranslator}), this is typically the {@link TraversalSource} instance * which the {@link Traversal} will be built from. * * @return the traversal source representation */ public S getTraversalSource(); /** * Translate {@link Bytecode} into a new representation. Typically, for language translations, the translation is * to a string representing the traversal in the respective scripting language. * * @param bytecode the bytecode representing traversal source and traversal manipulations. * @return the translated object */ public T translate(final Bytecode bytecode); /** * Translates a {@link Traversal} into the specified form */ public default T translate(final Traversal<?,?> t) { return translate(t.asAdmin().getBytecode()); } /** * Get the language that the translator is converting the traversal byte code to. * * @return the language of the translation */ public String getTargetLanguage(); /// /** * Translates bytecode to a Script representation. */ public interface ScriptTranslator extends Translator<String, Script> { /** * Provides a way for the {@link ScriptTranslator} to convert various data types to their string * representations in their target language. */ public interface TypeTranslator extends BiFunction<String, Object, Script> { } public abstract class AbstractTypeTranslator implements ScriptTranslator.TypeTranslator { protected static final String ANONYMOUS_TRAVERSAL_PREFIX = "__"; protected final boolean withParameters; protected final Script script; protected AbstractTypeTranslator(final boolean withParameters) { this.withParameters = withParameters; this.script = new Script(); } @Override public Script apply(final String traversalSource, final Object o) { this.script.init(); if (o instanceof Bytecode) { return produceScript(traversalSource, (Bytecode) o); } else { return convertToScript(o); } } /** * Gets the syntax for the spawn of an anonymous traversal which is traditionally the double underscore. */ protected String getAnonymousTraversalPrefix() { return ANONYMOUS_TRAVERSAL_PREFIX; } /** * Gets the syntax for a {@code null} value as a string representation. */ protected abstract String getNullSyntax(); /** * Take the string argument and convert it to a string representation in the target language (i.e. escape, * enclose in appropriate quotes, etc.) */ protected abstract String getSyntax(final String o); /** * Take the boolean argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Boolean o); /** * Take the {@code Date} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Date o); /** * Take the {@code Timestamp} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Timestamp o); /** * Take the {@code UUID} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final UUID o); /** * Take the {@link Lambda} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Lambda o); /** * Take the {@link SackFunctions.Barrier} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final SackFunctions.Barrier o); /** * Take the {@link VertexProperty.Cardinality} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final VertexProperty.Cardinality o); /** * Take the {@link Pick} argument and convert it to a string representation in the target language. */ protected abstract String getSyntax(final Pick o); /** * Take the numeric argument and convert it to a string representation in the target language. Languages * that can discern differences in types of numbers will wish to further check the type of the * {@code Number} instance itself (i.e. {@code Double}, {@code Integer}, etc.) */ protected abstract String getSyntax(final Number o); /** * Take the {@code Set} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Set<?> o); /** * Take the {@code List} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final List<?> o); /** * Take the {@code Map} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Map<?,?> o); /** * Take the {@code Class} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Class<?> o); /** * Take the {@code Enum} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Enum<?> o); /** * Take the {@link Vertex} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Vertex o); /** * Take the {@link Edge} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final Edge o); /** * Take the {@link VertexProperty} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final VertexProperty<?> o); /** * Take the {@link TraversalStrategyProxy} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final TraversalStrategyProxy<?> o); /** * Take the {@link Bytecode} and writes the syntax directly to the member {@link #script} variable. */ protected abstract Script produceScript(final String traversalSource, final Bytecode o); /** * Take the {@link P} and writes the syntax directly to the member {@link #script} variable. This * implementation should also consider {@link TextP}. */ protected abstract Script produceScript(final P<?> p); /** * For each operator argument, if withParameters set true, try parametrization as follows: * * ----------------------------------------------- * if unpack, why ? ObjectType * ----------------------------------------------- * (Yes) Bytecode.Binding * (Recursion, No) Bytecode * (Recursion, No) Traversal * (Yes) String * (Recursion, No) Set * (Recursion, No) List * (Recursion, No) Map * (Yes) Long * (Yes) Double * (Yes) Float * (Yes) Integer * (Yes) Timestamp * (Yes) Date * (Yes) Uuid * (Recursion, No) P * (Enumeration, No) SackFunctions.Barrier * (Enumeration, No) VertexProperty.Cardinality * (Enumeration, No) TraversalOptionParent.Pick * (Enumeration, No) Enum * (Recursion, No) Vertex * (Recursion, No) Edge * (Recursion, No) VertexProperty * (Yes) Lambda * (Recursion, No) TraversalStrategyProxy * (Enumeration, No) TraversalStrategy * (Yes) Other * ------------------------------------------------- * * @param object * @return String Repres */ protected Script convertToScript(final Object object) { if (object instanceof Bytecode.Binding) { return script.getBoundKeyOrAssign(withParameters, ((Bytecode.Binding) object).variable()); } else if (object instanceof Bytecode) { return produceScript(getAnonymousTraversalPrefix(), (Bytecode) object); } else if (object instanceof Traversal) { return convertToScript(((Traversal) object).asAdmin().getBytecode()); } else if (object instanceof String) { final Object objectOrWrapper = withParameters ? object : getSyntax((String) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Boolean) { final Object objectOrWrapper = withParameters ? object : getSyntax((Boolean) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Set) { return produceScript((Set<?>) object); } else if (object instanceof List) { return produceScript((List<?>) object); } else if (object instanceof Map) { return produceScript((Map<?, ?>) object); } else if (object instanceof Number) { final Object objectOrWrapper = withParameters ? object : getSyntax((Number) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Class) { return produceScript((Class<?>) object); } else if (object instanceof Timestamp) { final Object objectOrWrapper = withParameters ? object : getSyntax((Timestamp) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof Date) { final Object objectOrWrapper = withParameters ? object : getSyntax((Date) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof UUID) { final Object objectOrWrapper = withParameters ? object : getSyntax((UUID) object); return script.getBoundKeyOrAssign(withParameters, objectOrWrapper); } else if (object instanceof P) { return produceScript((P<?>) object); } else if (object instanceof SackFunctions.Barrier) { return script.append(getSyntax((SackFunctions.Barrier) object)); } else if (object instanceof VertexProperty.Cardinality) { return script.append(getSyntax((VertexProperty.Cardinality) object)); } else if (object instanceof Pick) { return script.append(getSyntax((Pick) object)); } else if (object instanceof Enum) { return produceScript((Enum<?>) object); } else if (object instanceof Vertex) { return produceScript((Vertex) object); } else if (object instanceof Edge) { return produceScript((Edge) object); } else if (object instanceof VertexProperty) { return produceScript((VertexProperty<?>) object); } else if (object instanceof Lambda) { final Object objectOrWrapper = withParameters ? object : getSyntax((Lambda) object); return script.getBoundKeyOrAssign(withParameters,objectOrWrapper); } else if (object instanceof TraversalStrategyProxy) { return produceScript((TraversalStrategyProxy<?>) object); } else if (object instanceof TraversalStrategy) { return convertToScript(new TraversalStrategyProxy(((TraversalStrategy) object))); } else { return null == object ? script.append(getNullSyntax()) : script.getBoundKeyOrAssign(withParameters, object); } } } } /** * Translates bytecode to actual steps. */ public interface StepTranslator<S extends TraversalSource, T extends Traversal.Admin<?, ?>> extends Translator<S, T> { } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p1beta1/video_intelligence.proto package com.google.cloud.videointelligence.v1p1beta1; /** * * * <pre> * Video frame level annotation results for label detection. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p1beta1.LabelFrame} */ public final class LabelFrame extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p1beta1.LabelFrame) LabelFrameOrBuilder { private static final long serialVersionUID = 0L; // Use LabelFrame.newBuilder() to construct. private LabelFrame(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LabelFrame() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new LabelFrame(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LabelFrame( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.Duration.Builder subBuilder = null; if (timeOffset_ != null) { subBuilder = timeOffset_.toBuilder(); } timeOffset_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(timeOffset_); timeOffset_ = subBuilder.buildPartial(); } break; } case 21: { confidence_ = input.readFloat(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p1beta1_LabelFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p1beta1_LabelFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p1beta1.LabelFrame.class, com.google.cloud.videointelligence.v1p1beta1.LabelFrame.Builder.class); } public static final int TIME_OFFSET_FIELD_NUMBER = 1; private com.google.protobuf.Duration timeOffset_; /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> * * @return Whether the timeOffset field is set. */ @java.lang.Override public boolean hasTimeOffset() { return timeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> * * @return The timeOffset. */ @java.lang.Override public com.google.protobuf.Duration getTimeOffset() { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { return getTimeOffset(); } public static final int CONFIDENCE_FIELD_NUMBER = 2; private float confidence_; /** * * * <pre> * Confidence that the label is accurate. Range: [0, 1]. * </pre> * * <code>float confidence = 2;</code> * * @return The confidence. */ @java.lang.Override public float getConfidence() { return confidence_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (timeOffset_ != null) { output.writeMessage(1, getTimeOffset()); } if (confidence_ != 0F) { output.writeFloat(2, confidence_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (timeOffset_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTimeOffset()); } if (confidence_ != 0F) { size += com.google.protobuf.CodedOutputStream.computeFloatSize(2, confidence_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1p1beta1.LabelFrame)) { return super.equals(obj); } com.google.cloud.videointelligence.v1p1beta1.LabelFrame other = (com.google.cloud.videointelligence.v1p1beta1.LabelFrame) obj; if (hasTimeOffset() != other.hasTimeOffset()) return false; if (hasTimeOffset()) { if (!getTimeOffset().equals(other.getTimeOffset())) return false; } if (java.lang.Float.floatToIntBits(getConfidence()) != java.lang.Float.floatToIntBits(other.getConfidence())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTimeOffset()) { hash = (37 * hash) + TIME_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getTimeOffset().hashCode(); } hash = (37 * hash) + CONFIDENCE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidence()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1p1beta1.LabelFrame prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Video frame level annotation results for label detection. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p1beta1.LabelFrame} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p1beta1.LabelFrame) com.google.cloud.videointelligence.v1p1beta1.LabelFrameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p1beta1_LabelFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p1beta1_LabelFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p1beta1.LabelFrame.class, com.google.cloud.videointelligence.v1p1beta1.LabelFrame.Builder.class); } // Construct using com.google.cloud.videointelligence.v1p1beta1.LabelFrame.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (timeOffsetBuilder_ == null) { timeOffset_ = null; } else { timeOffset_ = null; timeOffsetBuilder_ = null; } confidence_ = 0F; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1p1beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p1beta1_LabelFrame_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1p1beta1.LabelFrame getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1p1beta1.LabelFrame.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1p1beta1.LabelFrame build() { com.google.cloud.videointelligence.v1p1beta1.LabelFrame result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1p1beta1.LabelFrame buildPartial() { com.google.cloud.videointelligence.v1p1beta1.LabelFrame result = new com.google.cloud.videointelligence.v1p1beta1.LabelFrame(this); if (timeOffsetBuilder_ == null) { result.timeOffset_ = timeOffset_; } else { result.timeOffset_ = timeOffsetBuilder_.build(); } result.confidence_ = confidence_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1p1beta1.LabelFrame) { return mergeFrom((com.google.cloud.videointelligence.v1p1beta1.LabelFrame) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.videointelligence.v1p1beta1.LabelFrame other) { if (other == com.google.cloud.videointelligence.v1p1beta1.LabelFrame.getDefaultInstance()) return this; if (other.hasTimeOffset()) { mergeTimeOffset(other.getTimeOffset()); } if (other.getConfidence() != 0F) { setConfidence(other.getConfidence()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.videointelligence.v1p1beta1.LabelFrame parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.videointelligence.v1p1beta1.LabelFrame) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.Duration timeOffset_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> timeOffsetBuilder_; /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> * * @return Whether the timeOffset field is set. */ public boolean hasTimeOffset() { return timeOffsetBuilder_ != null || timeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> * * @return The timeOffset. */ public com.google.protobuf.Duration getTimeOffset() { if (timeOffsetBuilder_ == null) { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } else { return timeOffsetBuilder_.getMessage(); } } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeOffset_ = value; onChanged(); } else { timeOffsetBuilder_.setMessage(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration.Builder builderForValue) { if (timeOffsetBuilder_ == null) { timeOffset_ = builderForValue.build(); onChanged(); } else { timeOffsetBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public Builder mergeTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (timeOffset_ != null) { timeOffset_ = com.google.protobuf.Duration.newBuilder(timeOffset_).mergeFrom(value).buildPartial(); } else { timeOffset_ = value; } onChanged(); } else { timeOffsetBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public Builder clearTimeOffset() { if (timeOffsetBuilder_ == null) { timeOffset_ = null; onChanged(); } else { timeOffset_ = null; timeOffsetBuilder_ = null; } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public com.google.protobuf.Duration.Builder getTimeOffsetBuilder() { onChanged(); return getTimeOffsetFieldBuilder().getBuilder(); } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { if (timeOffsetBuilder_ != null) { return timeOffsetBuilder_.getMessageOrBuilder(); } else { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } } /** * * * <pre> * Time-offset, relative to the beginning of the video, corresponding to the * video frame for this location. * </pre> * * <code>.google.protobuf.Duration time_offset = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTimeOffsetFieldBuilder() { if (timeOffsetBuilder_ == null) { timeOffsetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTimeOffset(), getParentForChildren(), isClean()); timeOffset_ = null; } return timeOffsetBuilder_; } private float confidence_; /** * * * <pre> * Confidence that the label is accurate. Range: [0, 1]. * </pre> * * <code>float confidence = 2;</code> * * @return The confidence. */ @java.lang.Override public float getConfidence() { return confidence_; } /** * * * <pre> * Confidence that the label is accurate. Range: [0, 1]. * </pre> * * <code>float confidence = 2;</code> * * @param value The confidence to set. * @return This builder for chaining. */ public Builder setConfidence(float value) { confidence_ = value; onChanged(); return this; } /** * * * <pre> * Confidence that the label is accurate. Range: [0, 1]. * </pre> * * <code>float confidence = 2;</code> * * @return This builder for chaining. */ public Builder clearConfidence() { confidence_ = 0F; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p1beta1.LabelFrame) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p1beta1.LabelFrame) private static final com.google.cloud.videointelligence.v1p1beta1.LabelFrame DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p1beta1.LabelFrame(); } public static com.google.cloud.videointelligence.v1p1beta1.LabelFrame getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LabelFrame> PARSER = new com.google.protobuf.AbstractParser<LabelFrame>() { @java.lang.Override public LabelFrame parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LabelFrame(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LabelFrame> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LabelFrame> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1p1beta1.LabelFrame getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Copyright (C) 2006 Google Inc. * Copyright (C) 2014 John Leacox * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.leacox.dagger.servlet; import com.google.common.annotations.VisibleForTesting; import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.inject.Singleton; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.lang.ref.WeakReference; /** * <p/> * Apply this filter in web.xml above all other filters (typically), to all requests where you plan * to use servlet scopes. This is also needed in order to dispatch requests to injectable filters * and servlets: * <pre> * &lt;filter&gt; * &lt;filter-name&gt;daggerFilter&lt;/filter-name&gt; * &lt;filter-class&gt;<b>com.leacox.dagger.servlet.DaggerFilter</b>&lt;/filter-class&gt; * &lt;/filter&gt; * * &lt;filter-mapping&gt; * &lt;filter-name&gt;daggerFilter&lt;/filter-name&gt; * &lt;url-pattern&gt;/*&lt;/url-pattern&gt; * &lt;/filter-mapping&gt; * </pre> * * This filter must appear before every filter that makes use of Dagger injection or servlet * scopes functionality. Typically, you will only register this filter in web.xml and register * any other filters (and servlets) using a {@link DaggerServletContextListener}. * * @author crazybob@google.com (Bob Lee) * @author dhanji@gmail.com (Dhanji R. Prasanna) * @author John Leacox */ @Singleton public class DaggerFilter implements Filter { static ThreadLocal<Context> localContext = new ThreadLocal<Context>(); static volatile FilterPipeline pipeline = new DefaultFilterPipeline(); private static volatile WeakReference<ServletContext> servletContext = new WeakReference<ServletContext>(null); private static final String MULTIPLE_INJECTORS_WARNING = "Multiple Servlet object graphs detected. This is a warning " + "indicating that you have more than one " + DaggerFilter.class.getSimpleName() + " running " + "in your web application. If this is deliberate, you may safely " + "ignore this message. If this is NOT deliberate however, " + "your application may not work as expected."; // Default constructor needed for container managed construction public DaggerFilter() { } // We allow both the static and dynamic versions of the pipeline to exist. // TODO: I don't think this ever gets used. Injection happens via the constructor instead. @Inject FilterPipeline injectedPipeline; @Inject DaggerFilter(FilterPipeline pipeline) { // This can happen if you create many injectors and they all have their own // servlet module. This is legal, caveat a small warning. if (DaggerFilter.pipeline instanceof ManagedFilterPipeline) { LoggerFactory.getLogger(DaggerFilter.class).warn(MULTIPLE_INJECTORS_WARNING); } // We overwrite the default pipeline DaggerFilter.pipeline = pipeline; } @VisibleForTesting static void reset() { pipeline = new DefaultFilterPipeline(); localContext.remove(); } private FilterPipeline getPipeline() { return injectedPipeline != null ? injectedPipeline : pipeline; } @Override public void doFilter(final ServletRequest servletRequest, final ServletResponse servletResponse, final FilterChain filterChain) throws IOException, ServletException { Context previous = localContext.get(); // Prefer the injected pipeline, but fall back on the static one for web.xml users. final FilterPipeline filterPipeline = getPipeline(); try { localContext.set(new Context((HttpServletRequest) servletRequest, (HttpServletResponse) servletResponse)); //dispatch across the servlet pipeline, ensuring web.xml's filterchain is honored filterPipeline.dispatch(servletRequest, servletResponse, filterChain); } finally { localContext.set(previous); } } // TODO: When ScopingObjectGraph doesn't need to be in the dagger package anymore these methods can become package private /** * This method should not be used directly. * <p/> * This method is used by {@code ScopingObjectGraph} for providing scoped injections. Since * {@code ScopingObjectGraph} must be in the {@code dagger} package to work with Dagger, this method must be * publicly accessible. If {@code ScopingObjectGraph} can be moved to the {@code com.leacox.dagger.servlet} package * this method can become package private. */ @VisibleForTesting public static HttpServletRequest getRequest() { Context context = getContext(); if (context == null) { return null; } return context.getRequest(); } /** * This method should not be used directly. * <p/> * This method is used by {@code ScopingObjectGraph} for providing scoped injections. Since * {@code ScopingObjectGraph} must be in the {@code dagger} package to work with Dagger, this method must be * publicly accessible. If {@code ScopingObjectGraph} can be moved to the {@code com.leacox.dagger.servlet} package * this method can become package private. */ @VisibleForTesting public static HttpServletResponse getResponse() { Context context = getContext(); if (context == null) { return null; } return context.getResponse(); } /** * This method should not be used directly. * <p/> * This method is used by {@code ScopingObjectGraph} for providing scoped injections. Since * {@code ScopingObjectGraph} must be in the {@code dagger} package to work with Dagger, this method must be * publicly accessible. If {@code ScopingObjectGraph} can be moved to the {@code com.leacox.dagger.servlet} package * this method can become package private. */ @VisibleForTesting public static ServletContext getServletContext() { return servletContext.get(); } private static Context getContext() { return localContext.get(); } @Override public void init(FilterConfig filterConfig) throws ServletException { ServletContext servletContext = filterConfig.getServletContext(); // Store servlet context in a weakreference, for injection DaggerFilter.servletContext = new WeakReference<ServletContext>(servletContext); // In the default pipeline, this is a noop. However, if replaced // by a managed pipeline, a lazy init will be triggered the first time // dispatch occurs. FilterPipeline filterPipeline = getPipeline(); filterPipeline.initPipeline(servletContext); } @Override public void destroy() { try { // Destroy all registered filters & servlets in that order FilterPipeline filterPipeline = getPipeline(); filterPipeline.destroyPipeline(); } finally { reset(); servletContext.clear(); } } static class Context { final HttpServletRequest request; final HttpServletResponse response; Context(HttpServletRequest request, HttpServletResponse response) { this.request = request; this.response = response; } HttpServletRequest getRequest() { return request; } HttpServletResponse getResponse() { return response; } } }
package com.sensorberg.di; import android.app.AlarmManager; import android.app.Application; import android.app.NotificationManager; import android.content.Context; import android.content.SharedPreferences; import android.location.LocationManager; import android.support.annotation.Nullable; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.sensorberg.sdk.internal.AndroidBluetoothPlatform; import com.sensorberg.sdk.internal.AndroidClock; import com.sensorberg.sdk.internal.AndroidFileManager; import com.sensorberg.sdk.internal.AndroidHandlerManager; import com.sensorberg.sdk.internal.AndroidPlatform; import com.sensorberg.sdk.internal.AndroidPlatformIdentifier; import com.sensorberg.sdk.internal.AndroidServiceScheduler; import com.sensorberg.sdk.internal.PermissionChecker; import com.sensorberg.sdk.internal.PersistentIntegerCounter; import com.sensorberg.sdk.internal.interfaces.BluetoothPlatform; import com.sensorberg.sdk.internal.interfaces.Clock; import com.sensorberg.sdk.internal.interfaces.FileManager; import com.sensorberg.sdk.internal.interfaces.HandlerManager; import com.sensorberg.sdk.internal.interfaces.Platform; import com.sensorberg.sdk.internal.interfaces.PlatformIdentifier; import com.sensorberg.sdk.internal.interfaces.ServiceScheduler; import com.sensorberg.sdk.internal.transport.RetrofitApiServiceImpl; import com.sensorberg.sdk.internal.transport.RetrofitApiTransport; import com.sensorberg.sdk.internal.transport.interfaces.Transport; import com.sensorberg.sdk.location.GeofenceManager; import com.sensorberg.sdk.location.LocationHelper; import com.sensorberg.sdk.location.PlayServiceManager; import com.sensorberg.sdk.model.ISO8601TypeAdapter; import com.sensorberg.sdk.scanner.BeaconActionHistoryPublisher; import com.sensorberg.sdk.settings.DefaultSettings; import com.sensorberg.sdk.settings.SettingsManager; import com.sensorberg.utils.PlayServicesUtils; import java.util.Date; import javax.inject.Named; import javax.inject.Singleton; import dagger.Module; import dagger.Provides; @Module public class ProvidersModule { private static final String SENSORBERG_PREFERENCE_IDENTIFIER = "com.sensorberg.preferences"; private final Application application; public ProvidersModule(Application app) { application = app; } @Provides @Singleton public Context provideApplicationContext() { return application; } @Provides @Singleton public SharedPreferences provideSettingsSharedPrefs(Context context) { return context.getSharedPreferences(SENSORBERG_PREFERENCE_IDENTIFIER, Context.MODE_PRIVATE); } @Provides @Singleton public NotificationManager provideNotificationManager(Context context) { return (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); } @Provides @Singleton public LocationManager provideLocationManager(Context context) { return (LocationManager) context.getSystemService(Context.LOCATION_SERVICE); } @Provides @Singleton public LocationHelper provideLocationHelper(LocationManager locationManager, @Named("realSettingsManager") SettingsManager settingsManager) { return new LocationHelper(locationManager, settingsManager); } @Provides @Singleton @Nullable public PlayServiceManager providePlayServiceManager(Context context, LocationHelper location, PermissionChecker checker) { if (PlayServicesUtils.isGooglePlayServicesAvailable(context)) { return new PlayServiceManager(context, location, checker); } else { return null; } } @Provides @Singleton public Clock provideRealClock() { return new AndroidClock(); } @Provides @Singleton public FileManager provideFileManager(Context context) { return new AndroidFileManager(context); } @Provides @Singleton public PermissionChecker providePermissionChecker(Context context) { return new PermissionChecker(context); } @Provides @Singleton public PersistentIntegerCounter providePersistentIntegerCounter(SharedPreferences sharedPreferences) { return new PersistentIntegerCounter(sharedPreferences); } @Provides @Singleton public AlarmManager provideAlarmManager(Context context) { return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); } @Provides @Singleton public ServiceScheduler provideIntentScheduler(Context context, AlarmManager alarmManager, Clock clock, PersistentIntegerCounter persistentIntegerCounter) { return new AndroidServiceScheduler(context, alarmManager, clock, persistentIntegerCounter, DefaultSettings.DEFAULT_MESSAGE_DELAY_WINDOW_LENGTH); } @Provides @Named("realHandlerManager") @Singleton public HandlerManager provideAndroidHandlerManager() { return new AndroidHandlerManager(); } @Provides @Named("androidPlatformIdentifier") @Singleton public PlatformIdentifier provideAndroidPlatformIdentifier(Context ctx, SharedPreferences settingsSharedPrefs) { return new AndroidPlatformIdentifier(ctx, settingsSharedPrefs); } @Provides @Named("androidBluetoothPlatform") @Singleton public BluetoothPlatform provideAndroidBluetoothPlatform(Context context) { return new AndroidBluetoothPlatform(context); } @Provides @Named("realTransport") @Singleton public Transport provideRealTransport(@Named("realRetrofitApiService") RetrofitApiServiceImpl retrofitApiService, Clock clock, SharedPreferences sharedPreferences, Gson gson) { return new RetrofitApiTransport(retrofitApiService, clock, sharedPreferences, gson); } @Provides @Singleton public Gson provideGson() { return new GsonBuilder() .excludeFieldsWithoutExposeAnnotation() .registerTypeAdapter(Date.class, ISO8601TypeAdapter.DATE_ADAPTER) .setLenient() .create(); } @Provides @Named("realBeaconActionHistoryPublisher") @Singleton public BeaconActionHistoryPublisher provideBeaconActionHistoryPublisher( @Named("realTransport") Transport transport, Clock clock, @Named("realHandlerManager") HandlerManager handlerManager, SharedPreferences sharedPreferences, Gson gson) { return new BeaconActionHistoryPublisher(transport, clock, handlerManager, sharedPreferences, gson); } @Provides @Named("realSettingsManager") @Singleton public SettingsManager provideSettingsManager(@Named("realTransport") Transport transport, SharedPreferences sharedPreferences) { return new SettingsManager(transport, sharedPreferences); } @Provides @Named("realRetrofitApiService") @Singleton public RetrofitApiServiceImpl provideRealRetrofitApiService(Context context, Gson gson, @Named("androidPlatformIdentifier") PlatformIdentifier platformIdentifier) { return new RetrofitApiServiceImpl(context.getCacheDir(), gson, platformIdentifier, RetrofitApiTransport.RESOLVER_BASE_URL); } @Provides @Named("androidPlatform") @Singleton public Platform provideAndroidPlatform(Context context) { return new AndroidPlatform(context); } @Provides @Singleton @Nullable public GeofenceManager provideGeofenceManager(Context context, @Named("realSettingsManager") SettingsManager settings, SharedPreferences preferences, Gson gson, @Nullable PlayServiceManager play) { if (play != null) { return new GeofenceManager(context, settings, preferences, gson, play); } else { return null; } } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.utilities; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Utility class for executing commands and sending the command's output to a * given OutputStream. * * @author Edwin Shin */ public class ExecUtility { public static Process exec(String cmd) { return exec(cmd, null, System.out, null); } public static Process exec(String[] cmd) { return exec(cmd, null, System.out, null); } public static Process exec(String cmd, OutputStream out) { return exec(cmd, null, out, null); } public static Process exec(String[] cmd, OutputStream out) { return exec(cmd, null, out, null); } public static Process exec(String cmd, File dir) { return exec(cmd, dir, System.out, null); } public static Process exec(String[] cmd, File dir) { return exec(cmd, dir, System.out, null); } public static Process exec(String cmd, File dir, OutputStream out, OutputStream err) { return exec(new String[] {cmd}, dir, out, err); } public static Process exec(String[] cmd, File dir, OutputStream out, OutputStream err) { Process cp = null; try { if (dir == null) { cp = Runtime.getRuntime().exec(cmd, getEnv()); } else { cp = Runtime.getRuntime().exec(cmd, getEnv(), dir); } // Print stdio of cmd if (out != null) { PrintStream pout = new PrintStream(out); PrintStream perr = null; if (err != null) { if (out == err) { perr = pout; } else { perr = new PrintStream(err); } } else { perr = System.err; } String err_line; String in_line; BufferedReader input = new BufferedReader(new InputStreamReader(cp .getInputStream())); BufferedReader error = new BufferedReader(new InputStreamReader(cp .getErrorStream())); while (true) { try { cp.exitValue(); break; // process exited, } catch (IllegalThreadStateException e) { // process has not terminated check for output if (error.ready()) { err_line = error.readLine(); perr.println(err_line); } else if (input.ready()) { in_line = input.readLine(); pout.println(in_line); } else { try { Thread.sleep(10); } catch (InterruptedException ie) { // don't worry, be happy } } } } // Read any remaining buffered output from the process // after it terminates while (error.ready()) { err_line = error.readLine(); perr.println(err_line); } while (input.ready()) { in_line = input.readLine(); pout.println(in_line); } input.close(); error.close(); } } catch (IOException e) { e.printStackTrace(); } return cp; } public static Process execCommandLineUtility(String cmd) { return execCommandLineUtility(cmd, System.out, null); } public static Process execCommandLineUtility(String[] cmd) { return execCommandLineUtility(cmd, System.out, null); } public static Process execCommandLineUtility(String cmd, OutputStream out, OutputStream err) { return execCommandLineUtility(new String[] {cmd}, out, err); } public static Process execCommandLineUtility(String[] cmd, OutputStream out, OutputStream err) { String osName = System.getProperty("os.name"); if (osName.startsWith("Windows")) { String[] temp = new String[cmd.length + 2]; System.arraycopy(cmd, 0, temp, 2, cmd.length); temp[0] = "cmd.exe"; temp[1] = "/C"; cmd = temp; } return exec(cmd, null, out, err); } public static Process altExec(String cmd) { return altExec(new String[] {cmd}); } public static Process altExec(String[] cmd) { int result; // prepare buffers for process output and error streams StringBuffer err = new StringBuffer(); StringBuffer out = new StringBuffer(); try { Process proc = Runtime.getRuntime().exec(cmd, getEnv()); //create thread for reading inputStream (process' stdout) StreamReaderThread outThread = new StreamReaderThread(proc.getInputStream(), out); //create thread for reading errorStream (process' stderr) StreamReaderThread errThread = new StreamReaderThread(proc.getErrorStream(), err); //start both threads outThread.start(); errThread.start(); //wait for process to end result = proc.waitFor(); //finish reading whatever's left in the buffers outThread.join(); errThread.join(); if (result != 0) { System.out.println("Process " + cmd + " returned non-zero value:" + result); System.out.println("Process output:\n" + out.toString()); System.out.println("Process error:\n" + err.toString()); } else { System.out.println("Process " + cmd + " executed successfully"); System.out.println("Process output:\n" + out.toString()); System.out.println("Process error:\n" + err.toString()); } } catch (Exception e) { System.out.println("Error executing " + cmd); e.printStackTrace(); //throw e; } return null; } private static String[] getEnv() { ArrayList<String> fixedEnv = new ArrayList<String>(); Map<String, String> env = new HashMap<String, String>(System.getenv()); if (!env.containsKey("FEDORA_HOME") && System.getProperty("fedora.home") != null) { env.put("FEDORA_HOME", System.getProperty("fedora.home")); } if (!env.containsKey("CATALINA_HOME") && System.getProperty("fedora.home") != null) { env.put("CATALINA_HOME", System.getProperty("fedora.home") + File.separator + "tomcat"); } if (System.getProperty("online") != null) { env.put("ONLINE", System.getProperty("online")); } if (System.getProperty("offline") != null) { env.put("OFFLINE", System.getProperty("offline")); } for (String envName : env.keySet()) { fixedEnv.add(String.format("%s=%s", envName, env.get(envName))); } return fixedEnv.toArray(new String[0]); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.text.DecimalFormat; import java.util.Iterator; import java.util.List; import java.util.Random; import org.apache.pig.PigServer; import org.apache.pig.data.Tuple; import org.apache.pig.test.utils.TestHelper; import org.junit.Test; public class TestForEachNestedPlanLocal { private PigServer pig ; public TestForEachNestedPlanLocal() throws Throwable { pig = new PigServer(Util.getLocalTestMode()) ; } Boolean[] nullFlags = new Boolean[]{ false, true }; @Test public void testInnerOrderBy() throws Exception { for (int i = 0; i < nullFlags.length; i++) { System.err.println("Running testInnerOrderBy with nullFlags set to :" + nullFlags[i]); File tmpFile = genDataSetFile1(nullFlags[i]); pig.registerQuery("a = load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "'; "); pig.registerQuery("b = group a by $0; "); pig.registerQuery("c = foreach b { " + " c1 = order $1 by *; " + " generate flatten(c1); " + "};"); Iterator<Tuple> it = pig.openIterator("c"); Tuple t = null; int count = 0; while (it.hasNext()) { t = it.next(); System.out.println(count + ":" + t); count++; } assertEquals(count, 30); } } @Test public void testInnerLimit() throws Exception { File tmpFile = genDataSetFileOneGroup(); pig.registerQuery("a = load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "'; "); pig.registerQuery("b = group a by $0; "); pig.registerQuery("c = foreach b { " + " c1 = limit $1 5; " + " generate COUNT(c1); " + "};"); Iterator<Tuple> it = pig.openIterator("c"); List<Tuple> expected = Util.getTuplesFromConstantTupleStrings(new String[] {"(5L)", "(5L)", "(3L)" }); Util.checkQueryOutputsAfterSort(it, expected); } @Test public void testNestedCrossTwoRelations() throws Exception { File[] tmpFiles = generateDataSetFilesForNestedCross(); List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStringAsByteArray(new String[] { "({('user1','usa','user1','usa','10'),('user1','usa','user1','usa','30'),('user1','usa','user1','china','20')})", "({('user2','usa','user2','usa','20'),('user2','usa','user2','usa','20')})", "({('user3','singapore','user3','usa','10'),('user3','singapore','user3','singapore','20')})", "({})" }); pig.registerQuery("user = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[0].toString(), pig.getPigContext())) + "' as (uid, region);"); pig.registerQuery("session = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[1].toString(), pig.getPigContext())) + "' as (uid, region, duration);"); pig.registerQuery("C = cogroup user by uid, session by uid;"); pig.registerQuery("D = foreach C {" + "crossed = cross user, session;" + "generate crossed;" + "}"); Iterator<Tuple> expectedItr = expectedResults.iterator(); Iterator<Tuple> actualItr = pig.openIterator("D"); while (expectedItr.hasNext() && actualItr.hasNext()) { Tuple expectedTuple = expectedItr.next(); Tuple actualTuple = actualItr.next(); assertEquals(expectedTuple, actualTuple); } assertEquals(expectedItr.hasNext(), actualItr.hasNext()); } @Test public void testNestedCrossTwoRelationsComplex() throws Exception { File[] tmpFiles = generateDataSetFilesForNestedCross(); List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStringAsByteArray(new String[] { "({('user1','usa','user1','usa','10'),('user1','usa','user1','usa','30')})", "({('user2','usa','user2','usa','20')})", "({('user3','singapore','user3','singapore','20')})", "({})" }); pig.registerQuery("user = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[0].toString(), pig.getPigContext())) + "' as (uid, region);"); pig.registerQuery("session = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[1].toString(), pig.getPigContext())) + "' as (uid, region, duration);"); pig.registerQuery("C = cogroup user by uid, session by uid;"); pig.registerQuery("D = foreach C {" + "distinct_session = distinct session;" + "crossed = cross user, distinct_session;" + "filtered = filter crossed by user::region == distinct_session::region;" + "generate filtered;" + "}"); Iterator<Tuple> expectedItr = expectedResults.iterator(); Iterator<Tuple> actualItr = pig.openIterator("D"); while (expectedItr.hasNext() && actualItr.hasNext()) { Tuple expectedTuple = expectedItr.next(); Tuple actualTuple = actualItr.next(); assertEquals(expectedTuple, actualTuple); } assertEquals(expectedItr.hasNext(), actualItr.hasNext()); } @Test public void testNestedCrossThreeRelations() throws Exception { File[] tmpFiles = generateDataSetFilesForNestedCross(); List<Tuple> expectedResults = Util.getTuplesFromConstantTupleStringAsByteArray(new String[] { "({('user1','usa','user1','usa','10','user1','admin','male'),('user1','usa','user1','usa','30','user1','admin','male'),('user1','usa','user1','china','20','user1','admin','male')})", "({('user2','usa','user2','usa','20','user2','guest','male'),('user2','usa','user2','usa','20','user2','guest','male')})", "({('user3','singapore','user3','usa','10','user3','user','female'),('user3','singapore','user3','singapore','20','user3','user','female')})", "({})" }); pig.registerQuery("user = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[0].toString(), pig.getPigContext())) + "' as (uid, region);"); pig.registerQuery("session = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[1].toString(), pig.getPigContext())) + "' as (uid, region, duration);"); pig.registerQuery("profile = load '" + Util.encodeEscape(Util.generateURI(tmpFiles[2].toString(), pig.getPigContext())) + "' as (uid, role, gender);"); pig.registerQuery("C = cogroup user by uid, session by uid, profile by uid;"); pig.registerQuery("D = foreach C {" + "crossed = cross user, session, profile;" + "generate crossed;" + "}"); Iterator<Tuple> expectedItr = expectedResults.iterator(); Iterator<Tuple> actualItr = pig.openIterator("D"); while (expectedItr.hasNext() && actualItr.hasNext()) { Tuple expectedTuple = expectedItr.next(); Tuple actualTuple = actualItr.next(); assertEquals(expectedTuple, actualTuple); } assertEquals(expectedItr.hasNext(), actualItr.hasNext()); } /* @Test public void testInnerDistinct() throws Exception { File tmpFile = genDataSetFile1() ; pig.registerQuery("a = load 'file:" + tmpFile + "'; ") ; pig.registerQuery("b = group a by $0; "); pig.registerQuery("c = foreach b { " + " c1 = distinct $1 ; " + " generate flatten(c1); " + "};") ; Iterator<Tuple> it = pig.openIterator("c"); Tuple t = null ; int count = 0 ; while(it.hasNext()) { t = it.next() ; System.out.println(count + ":" + t) ; count++ ; } assertEquals(count, 15); } */ /*** * For generating a sample dataset */ private File genDataSetFile1(boolean withNulls) throws IOException { int dataLength = 30; String[][] data = new String[dataLength][] ; DecimalFormat formatter = new DecimalFormat("0000000"); Random r = new Random(); for (int i = 0; i < dataLength; i++) { data[i] = new String[2] ; // inject nulls randomly if(withNulls && r.nextInt(dataLength) < 0.3 * dataLength) { data[i][0] = ""; } else { data[i][0] = formatter.format(i % 10); } data[i][1] = formatter.format((dataLength - i)/2); } return TestHelper.createTempFile(data) ; } private File genDataSetFileOneGroup() throws IOException { File fp1 = File.createTempFile("test", "txt"); PrintStream ps = new PrintStream(new FileOutputStream(fp1)); ps.println("lost\tjack"); ps.println("lost\tkate"); ps.println("lost\tsawyer"); ps.println("lost\tdesmond"); ps.println("lost\thurley"); ps.println("lost\tlocke"); ps.println("lost\tsun"); ps.println("lost\tcharlie"); ps.println("lost\tjin"); ps.println("lost\tben"); ps.println("lotr\tfrodo"); ps.println("lotr\tsam"); ps.println("lotr\tmerry"); ps.println("lotr\tpippen"); ps.println("lotr\tbilbo"); ps.println("3stooges\tlarry"); ps.println("3stooges\tmoe"); ps.println("3stooges\tcurly"); ps.close(); return fp1; } private File[] generateDataSetFilesForNestedCross() throws IOException { File userFile = File.createTempFile("user", "txt"); PrintStream userPS = new PrintStream(new FileOutputStream(userFile)); userPS.println("user1\tusa"); userPS.println("user2\tusa"); userPS.println("user3\tsingapore"); userPS.println("user4\tchina"); userPS.close(); File sessionFile = File.createTempFile("session", "txt"); PrintStream sessionPS = new PrintStream(new FileOutputStream( sessionFile)); sessionPS.println("user3\tusa\t10"); sessionPS.println("user3\tsingapore\t20"); sessionPS.println("user2\tusa\t20"); sessionPS.println("user2\tusa\t20"); sessionPS.println("user1\tusa\t10"); sessionPS.println("user1\tusa\t30"); sessionPS.println("user1\tchina\t20"); sessionPS.close(); File profileFile = File.createTempFile("profile", "txt"); PrintStream profilePS = new PrintStream(new FileOutputStream( profileFile)); profilePS.println("user1\tadmin\tmale"); profilePS.println("user2\tguest\tmale"); profilePS.println("user3\tuser\tfemale"); profilePS.println("user4\tuser\tfemale"); profilePS.close(); return new File[] { userFile, sessionFile, profileFile }; } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.SplitTransaction; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.PairOfSameType; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; @Category(SmallTests.class) public class TestCoprocessorInterface { @Rule public TestName name = new TestName(); static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); static final Path DIR = TEST_UTIL.getDataTestDir(); private static class CustomScanner implements RegionScanner { private RegionScanner delegate; public CustomScanner(RegionScanner delegate) { this.delegate = delegate; } @Override public boolean next(List<Cell> results) throws IOException { return delegate.next(results); } @Override public boolean next(List<Cell> result, int limit) throws IOException { return delegate.next(result, limit); } @Override public boolean nextRaw(List<Cell> result) throws IOException { return delegate.nextRaw(result); } @Override public boolean nextRaw(List<Cell> result, int limit) throws IOException { return delegate.nextRaw(result, limit); } @Override public void close() throws IOException { delegate.close(); } @Override public HRegionInfo getRegionInfo() { return delegate.getRegionInfo(); } @Override public boolean isFilterDone() throws IOException { return delegate.isFilterDone(); } @Override public boolean reseek(byte[] row) throws IOException { return false; } @Override public long getMaxResultSize() { return delegate.getMaxResultSize(); } @Override public long getMvccReadPoint() { return delegate.getMvccReadPoint(); } } public static class CoprocessorImpl extends BaseRegionObserver { private boolean startCalled; private boolean stopCalled; private boolean preOpenCalled; private boolean postOpenCalled; private boolean preCloseCalled; private boolean postCloseCalled; private boolean preCompactCalled; private boolean postCompactCalled; private boolean preFlushCalled; private boolean postFlushCalled; private boolean preSplitCalled; private boolean postSplitCalled; private boolean preSplitWithSplitRowCalled; private ConcurrentMap<String, Object> sharedData; @Override public void start(CoprocessorEnvironment e) { sharedData = ((RegionCoprocessorEnvironment)e).getSharedData(); // using new String here, so that there will be new object on each invocation sharedData.putIfAbsent("test1", new Object()); startCalled = true; } @Override public void stop(CoprocessorEnvironment e) { sharedData = null; stopCalled = true; } @Override public void preOpen(ObserverContext<RegionCoprocessorEnvironment> e) { preOpenCalled = true; } @Override public void postOpen(ObserverContext<RegionCoprocessorEnvironment> e) { postOpenCalled = true; } @Override public void preClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested) { preCloseCalled = true; } @Override public void postClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested) { postCloseCalled = true; } @Override public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, InternalScanner scanner, ScanType scanType) { preCompactCalled = true; return scanner; } @Override public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store, StoreFile resultFile) { postCompactCalled = true; } @Override public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) { preFlushCalled = true; } @Override public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e) { postFlushCalled = true; } @Override public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) { preSplitCalled = true; } @Override public void preSplit(ObserverContext<RegionCoprocessorEnvironment> c, byte[] splitRow) throws IOException { preSplitWithSplitRowCalled = true; } @Override public void postSplit(ObserverContext<RegionCoprocessorEnvironment> e, HRegion l, HRegion r) { postSplitCalled = true; } @Override public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan, final RegionScanner s) throws IOException { return new CustomScanner(s); } boolean wasStarted() { return startCalled; } boolean wasStopped() { return stopCalled; } boolean wasOpened() { return (preOpenCalled && postOpenCalled); } boolean wasClosed() { return (preCloseCalled && postCloseCalled); } boolean wasFlushed() { return (preFlushCalled && postFlushCalled); } boolean wasCompacted() { return (preCompactCalled && postCompactCalled); } boolean wasSplit() { return (preSplitCalled && postSplitCalled && preSplitWithSplitRowCalled); } Map<String, Object> getSharedData() { return sharedData; } } public static class CoprocessorII extends BaseRegionObserver { private ConcurrentMap<String, Object> sharedData; @Override public void start(CoprocessorEnvironment e) { sharedData = ((RegionCoprocessorEnvironment)e).getSharedData(); sharedData.putIfAbsent("test2", new Object()); } @Override public void stop(CoprocessorEnvironment e) { sharedData = null; } @Override public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { if (1/0 == 1) { e.complete(); } } Map<String, Object> getSharedData() { return sharedData; } } @Test public void testSharedData() throws IOException { TableName tableName = TableName.valueOf(name.getMethodName()); byte [][] families = { fam1, fam2, fam3 }; Configuration hc = initSplit(); HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families); for (int i = 0; i < 3; i++) { HBaseTestCase.addContent(region, fam3); region.flushcache(); } region.compactStores(); byte [] splitRow = region.checkSplit(); assertNotNull(splitRow); HRegion [] regions = split(region, splitRow); for (int i = 0; i < regions.length; i++) { regions[i] = reopenRegion(regions[i], CoprocessorImpl.class, CoprocessorII.class); } Coprocessor c = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); Coprocessor c2 = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); Object o = ((CoprocessorImpl)c).getSharedData().get("test1"); Object o2 = ((CoprocessorII)c2).getSharedData().get("test2"); assertNotNull(o); assertNotNull(o2); // to coprocessors get different sharedDatas assertFalse(((CoprocessorImpl)c).getSharedData() == ((CoprocessorII)c2).getSharedData()); for (int i = 1; i < regions.length; i++) { c = regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); c2 = regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); // make sure that all coprocessor of a class have identical sharedDatas assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); assertTrue(((CoprocessorII)c2).getSharedData().get("test2") == o2); } // now have all Environments fail for (int i = 0; i < regions.length; i++) { try { byte [] r = regions[i].getStartKey(); if (r == null || r.length <= 0) { // Its the start row. Can't ask for null. Ask for minimal key instead. r = new byte [] {0}; } Get g = new Get(r); regions[i].get(g); fail(); } catch (org.apache.hadoop.hbase.DoNotRetryIOException xc) { } assertNull(regions[i].getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName())); } c = regions[0].getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); c = c2 = null; // perform a GC System.gc(); // reopen the region region = reopenRegion(regions[0], CoprocessorImpl.class, CoprocessorII.class); c = region.getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); // CPimpl is unaffected, still the same reference assertTrue(((CoprocessorImpl)c).getSharedData().get("test1") == o); c2 = region.getCoprocessorHost(). findCoprocessor(CoprocessorII.class.getName()); // new map and object created, hence the reference is different // hence the old entry was indeed removed by the GC and new one has been created Object o3 = ((CoprocessorII)c2).getSharedData().get("test2"); assertFalse(o3 == o2); } @Test public void testCoprocessorInterface() throws IOException { TableName tableName = TableName.valueOf(name.getMethodName()); byte [][] families = { fam1, fam2, fam3 }; Configuration hc = initSplit(); HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{CoprocessorImpl.class}, families); for (int i = 0; i < 3; i++) { HBaseTestCase.addContent(region, fam3); region.flushcache(); } region.compactStores(); byte [] splitRow = region.checkSplit(); assertNotNull(splitRow); HRegion [] regions = split(region, splitRow); for (int i = 0; i < regions.length; i++) { regions[i] = reopenRegion(regions[i], CoprocessorImpl.class); } HRegion.closeHRegion(region); Coprocessor c = region.getCoprocessorHost(). findCoprocessor(CoprocessorImpl.class.getName()); // HBASE-4197 Scan s = new Scan(); RegionScanner scanner = regions[0].getCoprocessorHost().postScannerOpen(s, regions[0].getScanner(s)); assertTrue(scanner instanceof CustomScanner); // this would throw an exception before HBASE-4197 scanner.next(new ArrayList<Cell>()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); assertTrue("Coprocessor not stopped", ((CoprocessorImpl)c).wasStopped()); assertTrue(((CoprocessorImpl)c).wasOpened()); assertTrue(((CoprocessorImpl)c).wasClosed()); assertTrue(((CoprocessorImpl)c).wasFlushed()); assertTrue(((CoprocessorImpl)c).wasCompacted()); assertTrue(((CoprocessorImpl)c).wasSplit()); for (int i = 0; i < regions.length; i++) { HRegion.closeHRegion(regions[i]); c = region.getCoprocessorHost() .findCoprocessor(CoprocessorImpl.class.getName()); assertTrue("Coprocessor not started", ((CoprocessorImpl)c).wasStarted()); assertTrue("Coprocessor not stopped", ((CoprocessorImpl)c).wasStopped()); assertTrue(((CoprocessorImpl)c).wasOpened()); assertTrue(((CoprocessorImpl)c).wasClosed()); assertTrue(((CoprocessorImpl)c).wasCompacted()); } } HRegion reopenRegion(final HRegion closedRegion, Class<?> ... implClasses) throws IOException { //HRegionInfo info = new HRegionInfo(tableName, null, null, false); HRegion r = HRegion.openHRegion(closedRegion, null); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost // and set it to region. Configuration conf = TEST_UTIL.getConfiguration(); RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); r.setCoprocessorHost(host); for (Class<?> implClass : implClasses) { host.load(implClass, Coprocessor.PRIORITY_USER, conf); } // we need to manually call pre- and postOpen here since the // above load() is not the real case for CP loading. A CP is // expected to be loaded by default from 1) configuration; or 2) // HTableDescriptor. If it's loaded after HRegion initialized, // the pre- and postOpen() won't be triggered automatically. // Here we have to call pre and postOpen explicitly. host.preOpen(); host.postOpen(); return r; } HRegion initHRegion (TableName tableName, String callingMethod, Configuration conf, Class<?> [] implClasses, byte [][] families) throws IOException { HTableDescriptor htd = new HTableDescriptor(tableName); for(byte [] family : families) { htd.addFamily(new HColumnDescriptor(family)); } HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = new Path(DIR + callingMethod); HRegion r = HRegion.createHRegion(info, path, conf, htd); // this following piece is a hack. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); r.setCoprocessorHost(host); for (Class<?> implClass : implClasses) { host.load(implClass, Coprocessor.PRIORITY_USER, conf); Coprocessor c = host.findCoprocessor(implClass.getName()); assertNotNull(c); } // Here we have to call pre and postOpen explicitly. host.preOpen(); host.postOpen(); return r; } Configuration initSplit() { // Always compact if there is more than one store file. TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 2); // Make lease timeout longer, lease checks less frequent TEST_UTIL.getConfiguration().setInt( "hbase.master.lease.thread.wakefrequency", 5 * 1000); TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 10 * 1000); // Increase the amount of time between client retries TEST_UTIL.getConfiguration().setLong("hbase.client.pause", 15 * 1000); // This size should make it so we always split using the addContent // below. After adding all data, the first region is 1.3M TEST_UTIL.getConfiguration().setLong(HConstants.HREGION_MAX_FILESIZE, 1024 * 128); TEST_UTIL.getConfiguration().setBoolean("hbase.testing.nocluster", true); TEST_UTIL.getConfiguration().setBoolean(CoprocessorHost.ABORT_ON_ERROR_KEY, false); return TEST_UTIL.getConfiguration(); } private HRegion [] split(final HRegion r, final byte [] splitRow) throws IOException { HRegion[] regions = new HRegion[2]; SplitTransaction st = new SplitTransaction(r, splitRow); int i = 0; if (!st.prepare()) { // test fails. assertTrue(false); } try { Server mockServer = Mockito.mock(Server.class); when(mockServer.getConfiguration()).thenReturn( TEST_UTIL.getConfiguration()); PairOfSameType<HRegion> daughters = st.execute(mockServer, null); for (HRegion each_daughter: daughters) { regions[i] = each_daughter; i++; } } catch (IOException ioe) { LOG.info("Split transaction of " + r.getRegionNameAsString() + " failed:" + ioe.getMessage()); assertTrue(false); } catch (RuntimeException e) { LOG.info("Failed rollback of failed split of " + r.getRegionNameAsString() + e.getMessage()); } assertTrue(i == 2); return regions; } }
package net.sabamiso.p5_sumaho_player.net; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetAddress; import java.net.ServerSocket; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.ByteOrder; import p5_sumaho.Payload; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import com.google.protobuf.InvalidProtocolBufferException; class SumahoServerThread extends Thread { TCPServer parent; ServerSocket server_socket; Socket peer_socket; boolean break_flag = false; public SumahoServerThread(TCPServer parent, ServerSocket server_socket) { this.parent = parent; this.server_socket = server_socket; } public String getPeerName() { if (peer_socket == null) return null; try { InetAddress addr = peer_socket.getInetAddress(); return addr.getHostAddress(); } catch(Exception e) { return null; } } @Override public void run() { while (!break_flag) { try { peer_socket = server_socket.accept(); readLoop(peer_socket); } catch (IOException e) { peer_socket = null; } } } public void finish() { if (server_socket != null) { break_flag = true; try { server_socket.close(); } catch (IOException e) { } try { this.join(); } catch (InterruptedException e) { } server_socket = null; } } void readLoop(Socket socket) { byte[] header_buf = new byte[4]; byte[] payload_size_buf = new byte[4]; int payload_size; byte[] payload; // setup stream InputStream is = null; OutputStream os = null; try { is = socket.getInputStream(); os = socket.getOutputStream(); } catch (IOException e1) { e1.printStackTrace(); } // read loop while (!break_flag) { try { // read header (4bytes) is.read(header_buf); if (header_buf[0] != 'S' || header_buf[1] != 'M' || header_buf[2] != 'H' || header_buf[3] != '1') { break; } // read payload size (4bytes) is.read(payload_size_buf); ByteBuffer bb = ByteBuffer.wrap(payload_size_buf).order( ByteOrder.LITTLE_ENDIAN); payload_size = bb.getInt(); // read payload payload = new byte[payload_size]; int read_size = 0; while (true) { int s = is.read(payload, read_size, payload.length - read_size); if (s <= 0) break; read_size += s; if (read_size >= payload.length) break; } // decode boolean rv = decode(payload, read_size); if (rv == false) break; // close connection if failed decoding } catch (Exception e) { break; } try { sleep(parent.getReadIntervalTime()); } catch (InterruptedException e) { } } try { is.close(); os.close(); socket.close(); } catch (Exception e) { } } private boolean decode(byte[] payload, int size) { // deserialize Payload.Image img; try { img = Payload.Image.parseFrom(payload); } catch (InvalidProtocolBufferException e) { System.err .println("InvalidProtocolBufferException : payload.length=" + payload.length); e.printStackTrace(); return false; } try { // decode to Bitmap byte[] jpeg_data = img.getJpeg().toByteArray(); Bitmap bmp = BitmapFactory.decodeByteArray(jpeg_data, 0, jpeg_data.length); parent.setBitmap(bmp); // set update time parent.setLastUpdateTime(System.currentTimeMillis()); } catch (Exception e) { } return true; } } public class TCPServer { int listen_port; SumahoServerThread thread; UpdateBitmapListener listener; Bitmap bitmap; long last_update_time; int timeout = 1000; int read_interval_time = 1; public TCPServer(int listen_port) { this.listen_port = listen_port; } public void setUpdateBitmapListener(UpdateBitmapListener listener) { this.listener = listener; } public void setTimeout(int ms) { this.timeout = ms; } public void setReadIntervalTime(int ms) { this.read_interval_time = ms; } public int getReadIntervalTime() { return this.read_interval_time; } public boolean isActive() { long t = getLastUpdateTime(); if (t == 0) return false; long diff = System.currentTimeMillis() - t; if (diff > timeout) return false; return true; } public String getPeerName() { if (isActive() == false) { return null; } return thread.getPeerName(); } protected void setLastUpdateTime(Long time) { last_update_time = time; } public long getLastUpdateTime() { return last_update_time; } public int getLietenPort() { return listen_port; } public boolean start() { ServerSocket s; try { s = new ServerSocket(listen_port, 5); } catch (IOException e) { e.printStackTrace(); return false; } thread = new SumahoServerThread(this, s); thread.start(); return true; } public void stop() { if (thread != null) { thread.finish(); thread = null; } } public Bitmap getBitmap() { return bitmap; } protected void setBitmap(Bitmap bitmap) { this.bitmap = bitmap; if (listener != null) { listener.updateBitmap(bitmap); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ // switchview because switch is a keyword package com.facebook.react.views.switchview; import android.content.Context; import android.view.View; import android.widget.CompoundButton; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.uimanager.LayoutShadowNode; import com.facebook.react.uimanager.PixelUtil; import com.facebook.react.uimanager.SimpleViewManager; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.UIManagerModule; import com.facebook.react.uimanager.ViewManagerDelegate; import com.facebook.react.uimanager.ViewProps; import com.facebook.react.uimanager.annotations.ReactProp; import com.facebook.react.viewmanagers.AndroidSwitchManagerDelegate; import com.facebook.react.viewmanagers.AndroidSwitchManagerInterface; import com.facebook.yoga.YogaMeasureFunction; import com.facebook.yoga.YogaMeasureMode; import com.facebook.yoga.YogaMeasureOutput; import com.facebook.yoga.YogaNode; /** View manager for {@link ReactSwitch} components. */ public class ReactSwitchManager extends SimpleViewManager<ReactSwitch> implements AndroidSwitchManagerInterface<ReactSwitch> { public static final String REACT_CLASS = "AndroidSwitch"; static class ReactSwitchShadowNode extends LayoutShadowNode implements YogaMeasureFunction { private int mWidth; private int mHeight; private boolean mMeasured; private ReactSwitchShadowNode() { initMeasureFunction(); } private void initMeasureFunction() { setMeasureFunction(this); } @Override public long measure( YogaNode node, float width, YogaMeasureMode widthMode, float height, YogaMeasureMode heightMode) { if (!mMeasured) { // Create a switch with the default config and measure it; since we don't (currently) // support setting custom switch text, this is fine, as all switches will measure the same // on a specific device/theme/locale combination. ReactSwitch reactSwitch = new ReactSwitch(getThemedContext()); reactSwitch.setShowText(false); final int spec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); reactSwitch.measure(spec, spec); mWidth = reactSwitch.getMeasuredWidth(); mHeight = reactSwitch.getMeasuredHeight(); mMeasured = true; } return YogaMeasureOutput.make(mWidth, mHeight); } } private static final CompoundButton.OnCheckedChangeListener ON_CHECKED_CHANGE_LISTENER = new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { ReactContext reactContext = (ReactContext) buttonView.getContext(); reactContext .getNativeModule(UIManagerModule.class) .getEventDispatcher() .dispatchEvent(new ReactSwitchEvent(buttonView.getId(), isChecked)); } }; private final ViewManagerDelegate<ReactSwitch> mDelegate; public ReactSwitchManager() { mDelegate = new AndroidSwitchManagerDelegate<>(this); } @Override public String getName() { return REACT_CLASS; } @Override public LayoutShadowNode createShadowNodeInstance() { return new ReactSwitchShadowNode(); } @Override public Class getShadowNodeClass() { return ReactSwitchShadowNode.class; } @Override protected ReactSwitch createViewInstance(ThemedReactContext context) { ReactSwitch view = new ReactSwitch(context); view.setShowText(false); return view; } @Override @ReactProp(name = "disabled", defaultBoolean = false) public void setDisabled(ReactSwitch view, boolean disabled) { view.setEnabled(!disabled); } @Override @ReactProp(name = ViewProps.ENABLED, defaultBoolean = true) public void setEnabled(ReactSwitch view, boolean enabled) { view.setEnabled(enabled); } @Override @ReactProp(name = ViewProps.ON) public void setOn(ReactSwitch view, boolean on) { setValueInternal(view, on); } @Override @ReactProp(name = "value") public void setValue(ReactSwitch view, boolean value) { setValueInternal(view, value); } @Override @ReactProp(name = "thumbTintColor", customType = "Color") public void setThumbTintColor(ReactSwitch view, @Nullable Integer color) { this.setThumbColor(view, color); } @Override @ReactProp(name = "thumbColor", customType = "Color") public void setThumbColor(ReactSwitch view, @Nullable Integer color) { view.setThumbColor(color); } @Override @ReactProp(name = "trackColorForFalse", customType = "Color") public void setTrackColorForFalse(ReactSwitch view, @Nullable Integer color) { view.setTrackColorForFalse(color); } @Override @ReactProp(name = "trackColorForTrue", customType = "Color") public void setTrackColorForTrue(ReactSwitch view, @Nullable Integer color) { view.setTrackColorForTrue(color); } @Override @ReactProp(name = "trackTintColor", customType = "Color") public void setTrackTintColor(ReactSwitch view, @Nullable Integer color) { view.setTrackColor(color); } @Override public void setNativeValue(ReactSwitch view, boolean value) { // TODO(T52835863): Implement when view commands start using delegates generated by JS. } @Override public void receiveCommand( @NonNull ReactSwitch view, String commandId, @Nullable ReadableArray args) { switch (commandId) { case "setNativeValue": setValueInternal(view, args != null && args.getBoolean(0)); break; } } @Override protected void addEventEmitters(final ThemedReactContext reactContext, final ReactSwitch view) { view.setOnCheckedChangeListener(ON_CHECKED_CHANGE_LISTENER); } @Override protected ViewManagerDelegate<ReactSwitch> getDelegate() { return mDelegate; } @Override public long measure( Context context, ReadableMap localData, ReadableMap props, ReadableMap state, float width, YogaMeasureMode widthMode, float height, YogaMeasureMode heightMode, @Nullable int[] attachmentsPositions) { ReactSwitch view = new ReactSwitch(context); view.setShowText(false); int measureSpec = View.MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED); view.measure(measureSpec, measureSpec); return YogaMeasureOutput.make( PixelUtil.toDIPFromPixel(view.getMeasuredWidth()), PixelUtil.toDIPFromPixel(view.getMeasuredHeight())); } private static void setValueInternal(ReactSwitch view, boolean value) { // we set the checked change listener to null and then restore it so that we don't fire an // onChange event to JS when JS itself is updating the value of the switch view.setOnCheckedChangeListener(null); view.setOn(value); view.setOnCheckedChangeListener(ON_CHECKED_CHANGE_LISTENER); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.hamcrest.Matchers; import java.util.List; import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.hamcrest.Matchers.equalTo; /** */ public class FilterRoutingTests extends ESAllocationTestCase { private final Logger logger = Loggers.getLogger(FilterRoutingTests.class); public void testClusterFilters() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.include.tag1", "value1,value2") .put("cluster.routing.allocation.exclude.tag1", "value3,value4") .build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(1)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding four nodes and performing rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("tag1", "value1"))) .add(newNode("node2", singletonMap("tag1", "value2"))) .add(newNode("node3", singletonMap("tag1", "value3"))) .add(newNode("node4", singletonMap("tag1", "value4"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(2)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> make sure shards are only allocated on tag1 with value1 and value2"); List<ShardRouting> startedShards = clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); assertThat(startedShards.size(), equalTo(4)); for (ShardRouting startedShard : startedShards) { assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node2"))); } } public void testIndexFilters() { AllocationService strategy = createAllocationService(Settings.builder() .build()); logger.info("Building initial routing table"); MetaData initialMetaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put("index.number_of_shards", 2) .put("index.number_of_replicas", 1) .put("index.routing.allocation.include.tag1", "value1,value2") .put("index.routing.allocation.exclude.tag1", "value3,value4") .build())) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(initialMetaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(initialMetaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes and performing rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() .add(newNode("node1", singletonMap("tag1", "value1"))) .add(newNode("node2", singletonMap("tag1", "value2"))) .add(newNode("node3", singletonMap("tag1", "value3"))) .add(newNode("node4", singletonMap("tag1", "value4"))) ).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(2)); logger.info("--> start the shards (primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> start the shards (replicas)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> make sure shards are only allocated on tag1 with value1 and value2"); List<ShardRouting> startedShards = clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); assertThat(startedShards.size(), equalTo(4)); for (ShardRouting startedShard : startedShards) { assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node2"))); } logger.info("--> switch between value2 and value4, shards should be relocating"); IndexMetaData existingMetaData = clusterState.metaData().index("test"); MetaData updatedMetaData = MetaData.builder() .put(IndexMetaData.builder(existingMetaData).settings(Settings.builder().put(existingMetaData.getSettings()) .put("index.routing.allocation.include.tag1", "value1,value4") .put("index.routing.allocation.exclude.tag1", "value2,value3") .build())) .build(); clusterState = ClusterState.builder(clusterState).metaData(updatedMetaData).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(2)); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(2)); logger.info("--> finish relocation"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); startedShards = clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); assertThat(startedShards.size(), equalTo(4)); for (ShardRouting startedShard : startedShards) { assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node4"))); } } public void testConcurrentRecoveriesAfterShardsCannotRemainOnNode() { AllocationService strategy = createAllocationService(Settings.builder().build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test1")) .addAsNew(metaData.index("test2")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes and performing rerouting"); DiscoveryNode node1 = newNode("node1", singletonMap("tag1", "value1")); DiscoveryNode node2 = newNode("node2", singletonMap("tag1", "value2")); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(node1).add(node2)).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().node(node1.getId()).numberOfShardsWithState(INITIALIZING), equalTo(2)); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(INITIALIZING), equalTo(2)); logger.info("--> start the shards (only primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> make sure all shards are started"); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4)); logger.info("--> disable allocation for node1 and reroute"); strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", "1") .put("cluster.routing.allocation.exclude.tag1", "value1") .build()); logger.info("--> move shards from node1 to node2"); clusterState = strategy.reroute(clusterState, "reroute"); logger.info("--> check that concurrent recoveries only allows 1 shard to move"); assertThat(clusterState.getRoutingNodes().node(node1.getId()).numberOfShardsWithState(STARTED), equalTo(1)); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(INITIALIZING), equalTo(1)); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(STARTED), equalTo(2)); logger.info("--> start the shards (only primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> move second shard from node1 to node2"); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(INITIALIZING), equalTo(1)); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(STARTED), equalTo(3)); logger.info("--> start the shards (only primaries)"); clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().node(node2.getId()).numberOfShardsWithState(STARTED), equalTo(4)); } }
/* * Copyright 2011-2019 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.snomed.cis.client; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.b2international.commons.CompareUtils; import com.b2international.commons.exceptions.BadRequestException; import com.b2international.snowowl.core.IDisposableService; import com.b2international.snowowl.core.api.SnowowlRuntimeException; import com.b2international.snowowl.core.date.DateFormats; import com.b2international.snowowl.core.date.Dates; import com.b2international.snowowl.core.terminology.ComponentCategory; import com.b2international.snowowl.snomed.cis.AbstractSnomedIdentifierService; import com.b2international.snowowl.snomed.cis.SnomedIdentifierConfiguration; import com.b2international.snowowl.snomed.cis.SnomedIdentifiers; import com.b2international.snowowl.snomed.cis.domain.IdentifierStatus; import com.b2international.snowowl.snomed.cis.domain.SctId; import com.b2international.snowowl.snomed.cis.model.*; import com.b2international.snowowl.snomed.cis.model.Record; import com.b2international.snowowl.snomed.cis.reservations.ISnomedIdentifierReservationService; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Joiner; import com.google.common.base.Predicates; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; /** * CIS (IHTSDO) based implementation of the identifier service. * * @since 4.5 */ public class CisSnomedIdentifierService extends AbstractSnomedIdentifierService implements IDisposableService { private static final Logger LOGGER = LoggerFactory.getLogger(CisSnomedIdentifierService.class); private final long numberOfPollTries; private final long numberOfReauthTries; private final long timeBetweenPollTries; private final int requestBulkLimit; private final String clientKey; private final ObjectMapper mapper; private CisClient client; private boolean disposed; public CisSnomedIdentifierService(final SnomedIdentifierConfiguration conf, final ISnomedIdentifierReservationService reservationService, final ObjectMapper mapper) { super(reservationService, conf); this.clientKey = conf.getCisClientSoftwareKey(); this.numberOfPollTries = conf.getCisNumberOfPollTries(); this.timeBetweenPollTries = conf.getCisTimeBetweenPollTries(); this.numberOfReauthTries = conf.getCisNumberOfReauthTries(); this.requestBulkLimit = conf.getRequestBulkLimit(); this.mapper = mapper; this.client = new CisClient(conf, mapper); // Log in at startup, and keep the token as long as possible login(); } @Override public Set<String> generate(final String namespace, final ComponentCategory category, final int quantity) { return ImmutableSet.copyOf(generateSctIds(namespace, category, quantity).keySet()); } @Override public Map<String, SctId> generateSctIds(String namespace, ComponentCategory category, int quantity) { checkNotNull(category, "Component category must not be null."); checkArgument(quantity > 0, "Number of requested IDs should be non-negative."); checkCategory(category); LOGGER.debug("Generating {} component IDs for category {}.", quantity, category.getDisplayName()); HttpPost generateRequest = null; HttpGet recordsRequest = null; try { if (quantity > 1) { LOGGER.debug("Sending {} ID bulk generation request.", category.getDisplayName()); generateRequest = httpPost(String.format("sct/bulk/generate?token=%s", getToken()), createBulkGenerationData(namespace, category, quantity)); final String response = execute(generateRequest); final String jobId = mapper.readValue(response, JsonNode.class).get("id").asText(); joinBulkJobPolling(jobId, quantity, getToken()); recordsRequest = httpGet(String.format("bulk/jobs/%s/records?token=%s", jobId, getToken())); final String recordsResponse = execute(recordsRequest); final JsonNode[] records = mapper.readValue(recordsResponse, JsonNode[].class); return readSctIds(getComponentIds(records)); } else { LOGGER.debug("Sending {} ID single generation request.", category.getDisplayName()); generateRequest = httpPost(String.format("sct/generate?token=%s", getToken()), createGenerationData(namespace, category)); final String response = execute(generateRequest); final SctId sctid = mapper.readValue(response, SctId.class); return readSctIds(Collections.singleton(sctid.getSctid())); } } catch (IOException e) { throw new SnowowlRuntimeException("Caught exception while generating IDs.", e); } finally { release(generateRequest); release(recordsRequest); } } @Override public Map<String, SctId> register(final Set<String> componentIds) { if (CompareUtils.isEmpty(componentIds)) { return Collections.emptyMap(); } LOGGER.debug("Registering {} component IDs.", componentIds.size()); final Map<String, SctId> sctIds = getSctIds(componentIds); final Map<String, SctId> availableOrReservedSctIds = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.or( SctId::isAvailable, SctId::isReserved))); if (availableOrReservedSctIds.isEmpty()) { return Collections.emptyMap(); } HttpPost registerRequest = null; String currentNamespace = null; try { if (availableOrReservedSctIds.size() > 1) { final Multimap<String, String> componentIdsByNamespace = toNamespaceMultimap(availableOrReservedSctIds.keySet()); for (final Entry<String, Collection<String>> entry : componentIdsByNamespace.asMap().entrySet()) { currentNamespace = entry.getKey(); for (final Collection<String> bulkIds : Iterables.partition(entry.getValue(), requestBulkLimit)) { LOGGER.debug("Sending bulk registration request for namespace {} with size {}.", currentNamespace, bulkIds.size()); registerRequest = httpPost(String.format("sct/bulk/register?token=%s", getToken()), createBulkRegistrationData(bulkIds)); execute(registerRequest); } } } else { final String componentId = Iterables.getOnlyElement(availableOrReservedSctIds.keySet()); currentNamespace = SnomedIdentifiers.getNamespace(componentId); registerRequest = httpPost(String.format("sct/register?token=%s", getToken()), createRegistrationData(componentId)); execute(registerRequest); } return ImmutableMap.copyOf(availableOrReservedSctIds); } catch (IOException e) { throw new SnowowlRuntimeException(String.format("Exception while reserving IDs for namespace %s.", currentNamespace), e); } finally { release(registerRequest); } } @Override public Set<String> reserve(String namespace, ComponentCategory category, int quantity) { return ImmutableSet.copyOf(reserveSctIds(namespace, category, quantity).keySet()); } @Override public Map<String, SctId> reserveSctIds(String namespace, ComponentCategory category, int quantity) { checkNotNull(category, "Component category must not be null."); checkArgument(quantity > 0, "Number of requested IDs should be non-negative."); checkCategory(category); LOGGER.debug("Reserving {} component IDs for category {}.", quantity, category.getDisplayName()); HttpPost reserveRequest = null; HttpGet recordsRequest = null; try { if (quantity > 1) { LOGGER.debug("Sending {} ID bulk reservation request.", category.getDisplayName()); reserveRequest = httpPost(String.format("sct/bulk/reserve?token=%s", getToken()), createBulkReservationData(namespace, category, quantity)); final String bulkResponse = execute(reserveRequest); final String jobId = mapper.readValue(bulkResponse, JsonNode.class).get("id").asText(); joinBulkJobPolling(jobId, quantity, getToken()); recordsRequest = httpGet(String.format("bulk/jobs/%s/records?token=%s", jobId, getToken())); final String recordsResponse = execute(recordsRequest); final JsonNode[] records = mapper.readValue(recordsResponse, JsonNode[].class); return readSctIds(getComponentIds(records)); } else { LOGGER.debug("Sending {} ID reservation request.", category.getDisplayName()); reserveRequest = httpPost(String.format("sct/reserve?token=%s", getToken()), createReservationData(namespace, category)); final String response = execute(reserveRequest); final SctId sctid = mapper.readValue(response, SctId.class); return readSctIds(Collections.singleton(sctid.getSctid())); } } catch (IOException e) { throw new SnowowlRuntimeException("Exception while bulk reserving IDs.", e); } finally { release(reserveRequest); release(recordsRequest); } } @Override public Map<String, SctId> release(final Set<String> componentIds) { LOGGER.debug("Releasing {} component IDs.", componentIds.size()); final Map<String, SctId> sctIds = getSctIds(componentIds); final Map<String, SctId> problemSctIds = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.<SctId>not(Predicates.or( SctId::isAssigned, SctId::isReserved, SctId::isAvailable)))); if (!problemSctIds.isEmpty()) { throw new SctIdStatusException("Cannot release %s component IDs because they are not assigned, reserved, or already available.", problemSctIds); } final Map<String, SctId> assignedOrReservedSctIds = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.or( SctId::isAssigned, SctId::isReserved))); // if there is no IDs to release, then just return the current sctIds set as a response if (assignedOrReservedSctIds.isEmpty()) { return sctIds; } HttpPut releaseRequest = null; String currentNamespace = null; try { if (assignedOrReservedSctIds.size() > 1) { final Multimap<String, String> componentIdsByNamespace = toNamespaceMultimap(assignedOrReservedSctIds.keySet()); for (final Entry<String, Collection<String>> entry : componentIdsByNamespace.asMap().entrySet()) { currentNamespace = entry.getKey(); for (final Collection<String> bulkIds : Iterables.partition(entry.getValue(), requestBulkLimit)) { LOGGER.debug("Sending bulk release request for namespace {} with size {}.", currentNamespace, bulkIds.size()); releaseRequest = httpPut(String.format("sct/bulk/release?token=%s", getToken()), createBulkReleaseData(currentNamespace, bulkIds)); execute(releaseRequest); } } } else { final String componentId = Iterables.getOnlyElement(assignedOrReservedSctIds.keySet()); currentNamespace = SnomedIdentifiers.getNamespace(componentId); releaseRequest = httpPut(String.format("sct/release?token=%s", getToken()), createReleaseData(componentId)); execute(releaseRequest); } return ImmutableMap.copyOf(assignedOrReservedSctIds); } catch (IOException e) { throw new SnowowlRuntimeException(String.format("Exception while releasing IDs for namespace %s.", currentNamespace), e); } finally { release(releaseRequest); } } @Override public Map<String, SctId> deprecate(final Set<String> componentIds) { LOGGER.debug("Deprecating {} component IDs.", componentIds.size()); final Map<String, SctId> sctIds = getSctIds(componentIds); final Map<String, SctId> problemSctIds = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.<SctId>not(Predicates.or( SctId::isAssigned, SctId::isPublished, SctId::isDeprecated)))); if (!problemSctIds.isEmpty()) { throw new SctIdStatusException("Cannot deprecate '%s' component IDs because they are not assigned, published, or already deprecated.", problemSctIds); } final Map<String, SctId> assignedOrPublishedSctIds = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.or( SctId::isAssigned, SctId::isPublished))); if (assignedOrPublishedSctIds.isEmpty()) { return Collections.emptyMap(); } HttpPut deprecateRequest = null; String currentNamespace = null; try { if (assignedOrPublishedSctIds.size() > 1) { final Multimap<String, String> componentIdsByNamespace = toNamespaceMultimap(assignedOrPublishedSctIds.keySet()); for (final Entry<String, Collection<String>> entry : componentIdsByNamespace.asMap().entrySet()) { currentNamespace = entry.getKey(); for (final Collection<String> bulkIds : Iterables.partition(entry.getValue(), requestBulkLimit)) { LOGGER.debug("Sending bulk deprecation request for namespace {} with size {}.", currentNamespace, bulkIds.size()); deprecateRequest = httpPut(String.format("sct/bulk/deprecate?token=%s", getToken()), createBulkDeprecationData(currentNamespace, bulkIds)); execute(deprecateRequest); } } } else { final String componentId = Iterables.getOnlyElement(assignedOrPublishedSctIds.keySet()); currentNamespace = SnomedIdentifiers.getNamespace(componentId); deprecateRequest = httpPut(String.format("sct/deprecate?token=%s", getToken()), createDeprecationData(componentId)); execute(deprecateRequest); } return ImmutableMap.copyOf(assignedOrPublishedSctIds); } catch (IOException e) { throw new SnowowlRuntimeException(String.format("Exception while deprecating IDs for namespace %s.", currentNamespace), e); } finally { release(deprecateRequest); } } @Override public Map<String, SctId> publish(final Set<String> componentIds) { LOGGER.debug("Publishing {} component IDs.", componentIds.size()); final Map<String, SctId> sctIds = getSctIds(componentIds); HttpPut publishRequest = null; String currentNamespace = null; try { final Map<String, SctId> sctIdsToPublish = ImmutableMap.copyOf(Maps.filterValues(sctIds, Predicates.not(SctId::isPublished))); if (!sctIdsToPublish.isEmpty()) { if (sctIdsToPublish.size() > 1) { final Multimap<String, String> componentIdsByNamespace = toNamespaceMultimap(sctIdsToPublish.keySet()); for (final Entry<String, Collection<String>> entry : componentIdsByNamespace.asMap().entrySet()) { currentNamespace = entry.getKey(); for (final Collection<String> bulkIds : Iterables.partition(entry.getValue(), requestBulkLimit)) { LOGGER.debug("Sending bulk publication request for namespace {} with size {}.", currentNamespace, bulkIds.size()); publishRequest = httpPut(String.format("sct/bulk/publish?token=%s", getToken()), createBulkPublishData(currentNamespace, bulkIds)); execute(publishRequest); } } } else { final String componentId = Iterables.getOnlyElement(sctIdsToPublish.keySet()); currentNamespace = SnomedIdentifiers.getNamespace(componentId); publishRequest = httpPut(String.format("sct/publish?token=%s", getToken()), createPublishData(componentId)); execute(publishRequest); } } return ImmutableMap.copyOf(sctIdsToPublish); } catch (IOException e) { throw new SnowowlRuntimeException(String.format("Exception while publishing IDs for namespace %s.", currentNamespace), e); } finally { release(publishRequest); } } @Override public Map<String, SctId> getSctIds(final Set<String> componentIds) { final Map<String, SctId> existingIdsMap = readSctIds(componentIds); if (existingIdsMap.size() == componentIds.size()) { return existingIdsMap; } else { final Set<String> knownComponentIds = existingIdsMap.keySet(); final Set<String> difference = ImmutableSet.copyOf(Sets.difference(componentIds, knownComponentIds)); final ImmutableMap.Builder<String, SctId> resultBuilder = ImmutableMap.builder(); resultBuilder.putAll(existingIdsMap); for (final String componentId : difference) { resultBuilder.put(componentId, buildSctId(componentId, IdentifierStatus.AVAILABLE)); } return resultBuilder.build(); } } private SctId buildSctId(final String componentId, final IdentifierStatus status) { final SctId sctId = new SctId(); sctId.setSctid(componentId); sctId.setStatus(status.getSerializedName()); sctId.setSequence(SnomedIdentifiers.getItemId(componentId)); sctId.setNamespace(SnomedIdentifiers.getNamespace(componentId)); sctId.setPartitionId(SnomedIdentifiers.getPartitionId(componentId)); sctId.setCheckDigit(SnomedIdentifiers.getCheckDigit(componentId)); // TODO: Other attributes of SctId could also be set here return sctId; } private Map<String, SctId> readSctIds(final Set<String> componentIds) { if (CompareUtils.isEmpty(componentIds)) { return Collections.emptyMap(); } HttpPost bulkRequest = null; HttpGet singleRequest = null; try { if (componentIds.size() > 1) { LOGGER.debug("Sending bulk component ID get request."); final ImmutableMap.Builder<String, SctId> resultBuilder = ImmutableMap.builder(); for (final Collection<String> ids : Iterables.partition(componentIds, requestBulkLimit)) { final String idsAsString = Joiner.on(',').join(ids); final ObjectNode idsAsJson = mapper.createObjectNode().put("sctids", idsAsString); bulkRequest = client.httpPost(String.format("sct/bulk/ids/?token=%s", getToken()), idsAsJson); final String response = execute(bulkRequest); final SctId[] sctIds = mapper.readValue(response, SctId[].class); final Map<String, SctId> sctIdMap = Maps.uniqueIndex(Arrays.asList(sctIds), SctId::getSctid); resultBuilder.putAll(sctIdMap); } return resultBuilder.build(); } else { final String componentId = Iterables.getOnlyElement(componentIds); LOGGER.debug("Sending component ID {} get request.", componentId); singleRequest = httpGet(String.format("sct/ids/%s?token=%s", componentId, getToken())); final String response = execute(singleRequest); final SctId sctId = mapper.readValue(response, SctId.class); return ImmutableMap.of(sctId.getSctid(), sctId); } } catch (IOException e) { throw new SnowowlRuntimeException("Exception while getting IDs.", e); } finally { release(bulkRequest); release(singleRequest); } } @Override public boolean importSupported() { return true; } private void login() { client.login(); } private HttpGet httpGet(final String suffix) { return client.httpGet(suffix); } private HttpPost httpPost(final String suffix, final RequestData data) throws IOException { return client.httpPost(suffix, data); } private HttpPut httpPut(final String suffix, final RequestData data) throws IOException { return client.httpPut(suffix, data); } private String execute(final HttpRequestBase request) throws IOException { CisClientException last = null; long remainingAttempts = numberOfReauthTries; do { try { return client.execute(request); } catch (CisClientException e) { if (e.getStatusCode() == HttpStatus.SC_UNAUTHORIZED || e.getStatusCode() == HttpStatus.SC_FORBIDDEN) { last = e; remainingAttempts--; LOGGER.warn("Unauthorized response from CIS, retrying request ({} attempt(s) left).", remainingAttempts); login(); // Update the corresponding query parameter in the request, then retry try { URI requestUri = request.getURI(); URI updatedUri = new URIBuilder(requestUri) .setParameter("token", getToken()) .build(); request.setURI(updatedUri); request.reset(); } catch (URISyntaxException se) { throw new IOException("Couldn't update authentication token.", se); } } else { throw new BadRequestException(e.getReasonPhrase(), e); } } } while (remainingAttempts > 0); // Re-throw the last captured exception otherwise throw new BadRequestException(last.getReasonPhrase()); } private void release(final HttpRequestBase request) { if (null != request) { client.release(request); } } private void joinBulkJobPolling(final String jobId, final int quantity, final String token) { HttpGet request = null; JobStatus status = JobStatus.PENDING; try { LOGGER.debug("Polling job status with ID {}.", jobId); request = httpGet(String.format("bulk/jobs/%s?token=%s", jobId, token)); for (long pollTry = numberOfPollTries; pollTry > 0; pollTry--) { final String response = execute(request); final JsonNode node = mapper.readValue(response, JsonNode.class); status = JobStatus.get(node.get("status").asInt()); if (JobStatus.FINISHED == status) { break; } else if (JobStatus.ERROR == status) { throw new SnowowlRuntimeException("Bulk request has ended in error."); } else { Thread.sleep(timeBetweenPollTries); } } } catch (Exception e) { throw new SnowowlRuntimeException("Exception while polling job status.", e); } finally { release(request); } if (JobStatus.FINISHED != status) { throw new SnowowlRuntimeException("Job didn't finish with expected status: " + status); } } private Set<String> getComponentIds(final JsonNode[] records) { return FluentIterable.from(Arrays.asList(records)) .transform(jsonNode -> jsonNode.get("sctid").asText()) .toSet(); } private Multimap<String, String> toNamespaceMultimap(final Set<String> componentIds) { return FluentIterable.from(componentIds).index(componentId -> getNamespace(componentId)); } private String getNamespace(final String componentId) { final String namespace = SnomedIdentifiers.getNamespace(componentId); if (Strings.isNullOrEmpty(namespace)) { return "0"; } else { return namespace; } } private RequestData createGenerationData(final String namespace, final ComponentCategory category) throws IOException { return new GenerationData(namespace, clientKey, category); } private RequestData createBulkGenerationData(final String namespace, final ComponentCategory category, final int quantity) throws IOException { return new BulkGenerationData(namespace, clientKey, category, quantity); } private RequestData createRegistrationData(final String componentId) throws IOException { return new RegistrationData(SnomedIdentifiers.getNamespace(componentId), clientKey, componentId, ""); } private RequestData createBulkRegistrationData(final Collection<String> componentIds) throws IOException { final Collection<Record> records = Lists.newArrayList(); for (final String componentId : componentIds) { records.add(new Record(componentId)); } return new BulkRegistrationData(SnomedIdentifiers.getNamespace(componentIds.iterator().next()), clientKey, records); } private RequestData createDeprecationData(final String componentId) throws IOException { return new DeprecationData(SnomedIdentifiers.getNamespace(componentId), clientKey, componentId); } private RequestData createBulkDeprecationData(final String namespace, final Collection<String> componentIds) throws IOException { return new BulkDeprecationData(namespace, clientKey, componentIds); } private RequestData createReservationData(final String namespace, final ComponentCategory category) throws IOException { return new ReservationData(namespace, clientKey, getExpirationDate(), category); } private RequestData createBulkReservationData(final String namespace, final ComponentCategory category, final int quantity) throws IOException { return new BulkReservationData(namespace, clientKey, getExpirationDate(), category, quantity); } private String getExpirationDate() { final Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, 1); final Date expirationDate = calendar.getTime(); return Dates.formatByGmt(expirationDate, DateFormats.DEFAULT); } private RequestData createReleaseData(final String componentId) throws IOException { return new ReleaseData(SnomedIdentifiers.getNamespace(componentId), clientKey, componentId); } private RequestData createBulkReleaseData(final String namespace, final Collection<String> componentIds) throws IOException { return new BulkReleaseData(namespace, clientKey, componentIds); } private RequestData createPublishData(final String componentId) throws IOException { return new PublicationData(SnomedIdentifiers.getNamespace(componentId), clientKey, componentId); } private RequestData createBulkPublishData(final String namespace, final Collection<String> componentIds) throws IOException { return new BulkPublicationData(namespace, clientKey, componentIds); } @Override public void dispose() { if (null != client) { client.logout(); client.close(); client = null; } disposed = true; } @Override public boolean isDisposed() { return disposed; } public String getToken() { return client.getToken(); } }
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package org.tensorflow; import java.nio.charset.Charset; /** * A builder for {@link Operation}s in a {@link Graph}. * * <p>Instances of an OperationBuilder are <b>not</b> thread-safe. * * <p>A builder for adding {@link Operation}s to a {@link Graph}. For example, the following uses * the builder to create an operation that produces the constant "3" as its output: * * <pre>{@code * // g is a Graph instance. * try (Tensor c1 = Tensor.create(3.0f)) { * g.opBuilder("Constant", "MyConst") * .setAttr("dtype", c1.dataType()) * .setAttr("value", c1) * .build(); * } * }</pre> */ public final class OperationBuilder { OperationBuilder(Graph graph, String type, String name) { this.graph = graph; try (Graph.Reference r = graph.ref()) { this.unsafeNativeHandle = allocate(r.nativeHandle(), type, name); } } /** * Add the {@link Operation} being built to the {@link Graph}. * * <p>The OperationBuilder is not usable after build() returns. */ public Operation build() { try (Graph.Reference r = graph.ref()) { Operation op = new Operation(graph, finish(unsafeNativeHandle)); unsafeNativeHandle = 0; return op; } } public OperationBuilder addInput(Output input) { try (Graph.Reference r = graph.ref()) { addInput(unsafeNativeHandle, input.op().getUnsafeNativeHandle(), input.index()); } return this; } public OperationBuilder addInputList(Output[] inputs) { try (Graph.Reference r = graph.ref()) { long[] opHandles = new long[inputs.length]; int[] indices = new int[inputs.length]; for (int i = 0; i < inputs.length; ++i) { opHandles[i] = inputs[i].op().getUnsafeNativeHandle(); indices[i] = inputs[i].index(); } addInputList(unsafeNativeHandle, opHandles, indices); } return this; } public OperationBuilder setDevice(String device) { try (Graph.Reference r = graph.ref()) { setDevice(unsafeNativeHandle, device); } return this; } public OperationBuilder setAttr(String name, String value) { setAttr(name, value.getBytes(Charset.forName("UTF-8"))); return this; } public OperationBuilder setAttr(String name, byte[] value) { try (Graph.Reference r = graph.ref()) { setAttrString(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, long value) { try (Graph.Reference r = graph.ref()) { setAttrInt(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, long[] value) { try (Graph.Reference r = graph.ref()) { setAttrIntList(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, float value) { try (Graph.Reference r = graph.ref()) { setAttrFloat(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, float[] value) { try (Graph.Reference r = graph.ref()) { setAttrFloatList(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, boolean value) { try (Graph.Reference r = graph.ref()) { setAttrBool(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, boolean[] value) { try (Graph.Reference r = graph.ref()) { setAttrBoolList(unsafeNativeHandle, name, value); } return this; } public OperationBuilder setAttr(String name, DataType value) { try (Graph.Reference r = graph.ref()) { setAttrType(unsafeNativeHandle, name, value.c()); } return this; } public OperationBuilder setAttr(String name, DataType[] value) { int[] ctypes = new int[value.length]; for (int i = 0; i < value.length; ++i) { ctypes[i] = value[i].c(); } try (Graph.Reference r = graph.ref()) { setAttrTypeList(unsafeNativeHandle, name, ctypes); } return this; } public OperationBuilder setAttr(String name, Tensor value) { try (Graph.Reference r = graph.ref()) { setAttrTensor(unsafeNativeHandle, name, value.getNativeHandle()); } return this; } public OperationBuilder setAttr(String name, Tensor[] value) { long[] handles = new long[value.length]; int idx = 0; for (Tensor t : value) { handles[idx++] = t.getNativeHandle(); } try (Graph.Reference r = graph.ref()) { setAttrTensorList(unsafeNativeHandle, name, handles); } return this; } private long unsafeNativeHandle; private Graph graph; private static native long allocate(long graphHandle, String type, String name); private static native long finish(long handle); private static native void addInput(long handle, long opHandle, int index); private static native void addInputList(long handle, long[] opHandles, int[] indices); private static native void setDevice(long handle, String device); // The names of all the setAttr* family functions below correspond to the C library types, not the // Java library types. Roughly, setAttrFoo calls the TensorFlow C library function: TF_SetAttrFoo. // // TODO(ashankar): // - setAttrStringList: Which would take in an array of byte[] (java Strings will need to be UTF-8 // encoded?) // - setAttrShape and setAttrShapeList: Which would take in a long[] or long[][]? private static native void setAttrString(long handle, String name, byte[] value); private static native void setAttrInt(long handle, String name, long value); private static native void setAttrIntList(long handle, String name, long[] value); private static native void setAttrFloat(long handle, String name, float value); private static native void setAttrFloatList(long handle, String name, float[] value); private static native void setAttrBool(long handle, String name, boolean value); private static native void setAttrBoolList(long handle, String name, boolean[] value); private static native void setAttrType(long handle, String name, int type); private static native void setAttrTypeList(long handle, String name, int[] type); private static native void setAttrTensor(long handle, String name, long tensorHandle); private static native void setAttrTensorList(long handle, String name, long[] tensorHandle); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.coyote.http11.filters; import java.io.IOException; import java.nio.charset.Charset; import org.apache.coyote.InputBuffer; import org.apache.coyote.Request; import org.apache.coyote.http11.InputFilter; import org.apache.tomcat.util.buf.ByteChunk; import org.apache.tomcat.util.res.StringManager; /** * Identity input filter. * * @author Remy Maucherat */ public class IdentityInputFilter implements InputFilter { private static final StringManager sm = StringManager.getManager( IdentityInputFilter.class.getPackage().getName()); // -------------------------------------------------------------- Constants protected static final String ENCODING_NAME = "identity"; protected static final ByteChunk ENCODING = new ByteChunk(); // ----------------------------------------------------- Static Initializer static { ENCODING.setBytes(ENCODING_NAME.getBytes(Charset.defaultCharset()), 0, ENCODING_NAME.length()); } // ----------------------------------------------------- Instance Variables /** * Content length. */ protected long contentLength = -1; /** * Remaining bytes. */ protected long remaining = 0; /** * Next buffer in the pipeline. */ protected InputBuffer buffer; /** * Chunk used to read leftover bytes. */ protected ByteChunk endChunk = new ByteChunk(); private final int maxSwallowSize; // ------------------------------------------------------------- Properties /** * Get content length. * * @deprecated Unused - will be removed in 8.0.x */ @Deprecated public long getContentLength() { return contentLength; } /** * Get remaining bytes. * * @deprecated Unused - will be removed in 8.0.x */ @Deprecated public long getRemaining() { return remaining; } // ------------------------------------------------------------ Constructor public IdentityInputFilter(int maxSwallowSize) { this.maxSwallowSize = maxSwallowSize; } // ---------------------------------------------------- InputBuffer Methods /** * Read bytes. * * @return If the filter does request length control, this value is * significant; it should be the number of bytes consumed from the buffer, * up until the end of the current request body, or the buffer length, * whichever is greater. If the filter does not do request body length * control, the returned value should be -1. */ @Override public int doRead(ByteChunk chunk, Request req) throws IOException { int result = -1; if (contentLength >= 0) { if (remaining > 0) { int nRead = buffer.doRead(chunk, req); if (nRead > remaining) { // The chunk is longer than the number of bytes remaining // in the body; changing the chunk length to the number // of bytes remaining chunk.setBytes(chunk.getBytes(), chunk.getStart(), (int) remaining); result = (int) remaining; } else { result = nRead; } if (nRead > 0) { remaining = remaining - nRead; } } else { // No more bytes left to be read : return -1 and clear the // buffer chunk.recycle(); result = -1; } } return result; } // ---------------------------------------------------- InputFilter Methods /** * Read the content length from the request. */ @Override public void setRequest(Request request) { contentLength = request.getContentLengthLong(); remaining = contentLength; } @Override public long end() throws IOException { final boolean maxSwallowSizeExceeded = (maxSwallowSize > -1 && remaining > maxSwallowSize); long swallowed = 0; // Consume extra bytes. while (remaining > 0) { int nread = buffer.doRead(endChunk, null); if (nread > 0 ) { swallowed += nread; remaining = remaining - nread; if (maxSwallowSizeExceeded && swallowed > maxSwallowSize) { // Note: We do not fail early so the client has a chance to // read the response before the connection is closed. See: // http://httpd.apache.org/docs/2.0/misc/fin_wait_2.html#appendix throw new IOException(sm.getString("inputFilter.maxSwallow")); } } else { // errors are handled higher up. remaining = 0; } } // If too many bytes were read, return the amount. return -remaining; } /** * Amount of bytes still available in a buffer. */ @Override public int available() { return 0; } /** * Set the next buffer in the filter pipeline. */ @Override public void setBuffer(InputBuffer buffer) { this.buffer = buffer; } /** * Make the filter ready to process the next request. */ @Override public void recycle() { contentLength = -1; remaining = 0; endChunk.recycle(); } /** * Return the name of the associated encoding; Here, the value is * "identity". */ @Override public ByteChunk getEncodingName() { return ENCODING; } }
/* * Copyright 2019 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.collector.receiver.grpc; import com.google.common.util.concurrent.Uninterruptibles; import com.google.protobuf.GeneratedMessageV3; import com.navercorp.pinpoint.common.profiler.concurrent.PinpointThreadFactory; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.grpc.AgentHeaderFactory; import com.navercorp.pinpoint.grpc.client.ChannelFactory; import com.navercorp.pinpoint.grpc.client.ChannelFactoryBuilder; import com.navercorp.pinpoint.grpc.client.config.ClientOption; import com.navercorp.pinpoint.grpc.client.DefaultChannelFactoryBuilder; import com.navercorp.pinpoint.grpc.client.HeaderFactory; import com.navercorp.pinpoint.grpc.trace.MetadataGrpc; import com.navercorp.pinpoint.grpc.trace.PApiMetaData; import com.navercorp.pinpoint.grpc.trace.PResult; import com.navercorp.pinpoint.grpc.trace.PSqlMetaData; import com.navercorp.pinpoint.grpc.trace.PStringMetaData; import com.navercorp.pinpoint.profiler.sender.grpc.RetryResponseStreamObserver; import com.navercorp.pinpoint.profiler.sender.grpc.RetryScheduler; import io.grpc.ManagedChannel; import io.grpc.stub.StreamObserver; import io.netty.util.HashedWheelTimer; import io.netty.util.Timeout; import io.netty.util.Timer; import io.netty.util.TimerTask; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static com.navercorp.pinpoint.grpc.MessageFormatUtils.debugLog; public class MetadataClientMock { private static final int MAX_TOTAL_ATTEMPTS = 3; private static final ScheduledExecutorService RECONNECT_SCHEDULER = Executors.newScheduledThreadPool(1, new PinpointThreadFactory("Pinpoint-reconnect-thread")); private final Logger logger = LogManager.getLogger(this.getClass()); private final ChannelFactory channelFactory; private final ManagedChannel channel; private final Timer retryTimer; private final RetryScheduler<GeneratedMessageV3, PResult> retryScheduler; private volatile MetadataGrpc.MetadataStub metadataStub; private final AtomicInteger requestCounter = new AtomicInteger(0); private final AtomicInteger responseCounter = new AtomicInteger(0); private final List<String> responseList = new ArrayList<>(); public MetadataClientMock(final String host, final int port, final boolean agentHeader) { this.retryTimer = newTimer(this.getClass().getName()); this.channelFactory = newChannelFactory(); this.channel = channelFactory.build(host, port); this.metadataStub = MetadataGrpc.newStub(channel); this.retryScheduler = new RetryScheduler<GeneratedMessageV3, PResult>() { @Override public boolean isSuccess(PResult response) { return response.getSuccess(); } @Override public void scheduleNextRetry(GeneratedMessageV3 request, int remainingRetryCount) { MetadataClientMock.this.scheduleNextRetry(request, remainingRetryCount); } }; } private ChannelFactory newChannelFactory() { HeaderFactory headerFactory = new AgentHeaderFactory("mockAgentId", "mockAgentName", "mockApplicationName", ServiceType.UNDEFINED.getCode(), System.currentTimeMillis()); ChannelFactoryBuilder channelFactoryBuilder = new DefaultChannelFactoryBuilder("MetadataClientMock"); channelFactoryBuilder.setHeaderFactory(headerFactory); channelFactoryBuilder.setClientOption(new ClientOption()); return channelFactoryBuilder.build(); } public void stop() throws InterruptedException { stop(5); } public void stop(long await) throws InterruptedException { channel.shutdown().awaitTermination(await, TimeUnit.SECONDS); channelFactory.close(); retryTimer.stop(); } private Timer newTimer(String name) { ThreadFactory threadFactory = new PinpointThreadFactory(PinpointThreadFactory.DEFAULT_THREAD_NAME_PREFIX + name, true); return new HashedWheelTimer(threadFactory, 100, TimeUnit.MILLISECONDS, 512, false, 100); } public void apiMetaData() { apiMetaData(1); } public void apiMetaData(final int count) { for (int i = 0; i < count; i++) { PApiMetaData request = PApiMetaData.newBuilder().setApiId(i).build(); request(request, MAX_TOTAL_ATTEMPTS); } } public void sqlMetaData() { sqlMetaData(1); } public void sqlMetaData(final int count) { for (int i = 0; i < count; i++) { PSqlMetaData request = PSqlMetaData.newBuilder().build(); request(request, MAX_TOTAL_ATTEMPTS); } } public void stringMetaData() { stringMetaData(1); } public void stringMetaData(final int count) { for (int i = 0; i < count; i++) { PStringMetaData request = PStringMetaData.newBuilder().build(); request(request, MAX_TOTAL_ATTEMPTS); } } public List<String> getResponseList() { return responseList; } private void request(GeneratedMessageV3 message, int retryCount) { if (retryCount <= 0) { logger.warn("Drop message {}", debugLog(message)); return; } if (message instanceof PSqlMetaData) { PSqlMetaData sqlMetaData = (PSqlMetaData) message; StreamObserver<PResult> responseObserver = newResponseObserver(message, retryCount); this.metadataStub.requestSqlMetaData(sqlMetaData, responseObserver); } else if (message instanceof PApiMetaData) { final PApiMetaData apiMetaData = (PApiMetaData) message; StreamObserver<PResult> responseObserver = newResponseObserver(message, retryCount); this.metadataStub.requestApiMetaData(apiMetaData, responseObserver); } else if (message instanceof PStringMetaData) { final PStringMetaData stringMetaData = (PStringMetaData) message; StreamObserver<PResult> responseObserver = newResponseObserver(message, retryCount); this.metadataStub.requestStringMetaData(stringMetaData, responseObserver); } else { logger.warn("Unsupported message {}", debugLog(message)); } int requestCount = requestCounter.getAndIncrement(); logger.info("Request {} {}", requestCount, message); Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); } private void scheduleNextRetry(GeneratedMessageV3 request, int remainingRetryCount) { final TimerTask timerTask = new TimerTask() { @Override public void run(Timeout timeout) { if (timeout.cancel()) { return; } logger.info("Retry {} {}", remainingRetryCount, request); request(request, remainingRetryCount - 1); } }; try { retryTimer.newTimeout(timerTask, 1000, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException e) { logger.debug("retry fail {}", e.getCause(), e); } } private StreamObserver<PResult> newResponseObserver(GeneratedMessageV3 message, int retryCount) { return new RetryResponseStreamObserver<GeneratedMessageV3, PResult>(logger, this.retryScheduler, message, retryCount); } }
package org.apache.sling.coffee; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.InputStream; import java.util.Dictionary; import java.util.HashMap; import java.util.Hashtable; import javax.jcr.Binary; import javax.jcr.Node; import javax.jcr.Property; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceResolverFactory; import org.apache.sling.coffee.CoffeeScriptCompilerImpl; import org.apache.sling.webresource.WebResourceInventoryManager; import org.apache.sling.webresource.WebResourceScriptCache; import org.apache.sling.webresource.WebResourceScriptRunnerFactory; import org.apache.sling.webresource.impl.WebResourceInventoryManagerImpl; import org.apache.sling.webresource.impl.WebResourceScriptCacheImpl; import org.apache.sling.webresource.impl.WebResourceScriptRunnerFactoryImpl; import org.junit.BeforeClass; import org.junit.AfterClass; import org.junit.Test; import org.osgi.service.component.ComponentContext; import org.apache.commons.io.IOUtils; import org.apache.commons.io.FileUtils; import static org.easymock.EasyMock.*; import junit.framework.TestCase; /** * * CoffeeScript compiler tests * * Based on the language demo at http://coffeescript.org * * @author bpaulin * */ public class CoffeeScriptCompilerImplTest extends TestCase { private CoffeeScriptCompilerImpl coffeeEx; private WebResourceScriptRunnerFactoryImpl webResourceScriptRunnerFactory; private ResourceResolverFactory mockResourceResolverFactory; private ComponentContext mockComponentContext; private ResourceResolver mockResourceResolver; private Resource mockResource; private Node mockNode; private Property mockProperty; private Binary mockBinary; @BeforeClass public void setUp() throws Exception { super.setUp(); coffeeEx = new CoffeeScriptCompilerImpl(); webResourceScriptRunnerFactory = new WebResourceScriptRunnerFactoryImpl(); WebResourceInventoryManager inventoryManagerMock = new WebResourceInventoryManagerImpl(); WebResourceScriptCache scriptCacheMock = new WebResourceScriptCacheImpl(); webResourceScriptRunnerFactory.setWebResourceInventoryManager(inventoryManagerMock); webResourceScriptRunnerFactory.setWebResourceScriptCache(scriptCacheMock); mockResourceResolverFactory = createMock(ResourceResolverFactory.class); coffeeEx.setResourceResolverFactory(mockResourceResolverFactory); coffeeEx.setWebResourceScriptRunnerFactory(webResourceScriptRunnerFactory); mockComponentContext = createMock(ComponentContext.class); mockResourceResolver = createMock(ResourceResolver.class); mockResource = createMock(Resource.class); mockNode = createMock(Node.class); mockProperty = createMock(Property.class); mockBinary = createMock(Binary.class); InputStream coffeeInputStream = getClass().getResourceAsStream("/coffee-script.js"); Hashtable bundleProperties = new Hashtable(); bundleProperties.put("coffee.compiler.path", "/test/path"); bundleProperties.put("coffee.cache.path", "/test/path2"); expect(mockComponentContext.getProperties()).andReturn(bundleProperties); expect(mockResourceResolverFactory.getAdministrativeResourceResolver(null)).andReturn(mockResourceResolver); expect(mockResourceResolver.getResource("/test/path")).andReturn(mockResource); expect(mockResource.adaptTo(Node.class)).andReturn(mockNode); expect(mockNode.getNode(Property.JCR_CONTENT)).andReturn(mockNode); expect(mockNode.getProperty(Property.JCR_DATA)).andReturn(mockProperty); expect(mockProperty.getBinary()).andReturn(mockBinary); expect(mockBinary.getStream()).andReturn(coffeeInputStream); mockResourceResolver.close(); replay(mockBinary); replay(mockProperty); replay(mockComponentContext); replay(mockNode); replay(mockResource); replay(mockResourceResolver); replay(mockResourceResolverFactory); coffeeEx.activate(mockComponentContext); } @Test public void testFunctionCompile() throws Exception { testFileCompile("functions.coffee"); } @Test public void testDefaultFunctionCompile() throws Exception { testFileCompile("defaultFunctions.coffee"); } @Test public void testObjectAndArrayCompile() throws Exception { testFileCompile("objectAndArrays.coffee"); } @Test public void testJavaScriptReservedWordCompile() throws Exception { testFileCompile("reservedWords.coffee"); } @Test public void testLexicalScopingSafetyCompile() throws Exception { testFileCompile("lexicalScopingSafety.coffee"); } @Test public void testIfElseUnlessCompile() throws Exception { testFileCompile("ifElseUnless.coffee"); } @Test public void testSplatsCompile() throws Exception { testFileCompile("splats.coffee"); } @Test public void testLoopsCompile() throws Exception { testFileCompile("loops.coffee"); } @Test public void testComprehensionCompile() throws Exception { testFileCompile("comprehension.coffee"); } @Test public void testArraySlicingSplicingCompile() throws Exception { testFileCompile("arraySlicingSplicing.coffee"); } @Test public void testExpressionsCompile() throws Exception { testFileCompile("expressions.coffee"); } @Test public void testOperatorsAliasesCompile() throws Exception { testFileCompile("operatorsAliases.coffee"); } @Test public void testClassesCompile() throws Exception { testFileCompile("classes.coffee"); } @Test public void testAssignmentCompile() throws Exception { testFileCompile("assignment.coffee"); } @Test public void testFunctionBindingCompile() throws Exception { testFileCompile("functionBinding.coffee"); } @Test public void testEmbeddedJsCompile() throws Exception { testFileCompile("embeddedJs.coffee"); } @Test public void testSwitchWhenElseCompile() throws Exception { testFileCompile("switchWhenElse.coffee"); } @Test public void testTryCatchFinallyCompile() throws Exception { testFileCompile("tryCatchFinally.coffee"); } @Test public void testChainedComparisionsCompile() throws Exception { testFileCompile("chainedComparisons.coffee"); } @Test public void testStringInterpolationCommentsCompile() throws Exception { testFileCompile("stringInterpolationComments.coffee"); } @Test public void testBlockRegularExpressionsCompile() throws Exception { testFileCompile("blockRegularExpressions.coffee"); } private void testFileCompile(String fileName) throws Exception { InputStream coffeeScriptStream = getTestCoffeeFile(fileName); InputStream result = coffeeEx.compile(coffeeScriptStream); assertEquals("File named: " + fileName + " should compile to proper JavaScript", IOUtils.toString(getResultJavaScriptFile(fileName), "UTF-8"), IOUtils.toString(result, "UTF-8")); } private InputStream convertFileToStream(String filePath) throws Exception { return getClass().getResourceAsStream(filePath); } private InputStream getTestCoffeeFile(String fileName) throws Exception { return convertFileToStream("/test-coffee/"+ fileName); } private InputStream getResultJavaScriptFile(String fileName) throws Exception { fileName = fileName.replaceFirst(".coffee", ""); return convertFileToStream("/result-js/"+ fileName + ".js"); } @AfterClass public void after() { verify(mockBinary); verify(mockProperty); verify(mockComponentContext); verify(mockNode); verify(mockResource); verify(mockResourceResolver); verify(mockResourceResolverFactory); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.project.autoimport; import com.intellij.ProjectTopics; import com.intellij.ide.file.BatchFileChangeListener; import com.intellij.notification.*; import com.intellij.openapi.application.*; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware; import com.intellij.openapi.externalSystem.ExternalSystemManager; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ExternalSystemDataKeys; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.project.ProjectData; import com.intellij.openapi.externalSystem.model.task.*; import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode; import com.intellij.openapi.externalSystem.service.internal.ExternalSystemProcessingManager; import com.intellij.openapi.externalSystem.service.notification.ExternalSystemProgressNotificationManager; import com.intellij.openapi.externalSystem.service.project.ExternalProjectRefreshCallback; import com.intellij.openapi.externalSystem.service.project.ProjectDataManager; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemBundle; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.ModuleRootListener; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.events.VFileContentChangeEvent; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.pointers.VirtualFilePointer; import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager; import com.intellij.psi.*; import com.intellij.psi.impl.source.tree.LeafElement; import com.intellij.util.PathUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.event.HyperlinkEvent; import java.io.File; import java.util.*; import java.util.stream.Collectors; import java.util.zip.CRC32; import static com.intellij.util.ui.update.MergingUpdateQueue.ANY_COMPONENT; /** * @author Vladislav.Soroka * @since 1/30/2017 */ public class ExternalSystemProjectsWatcherImpl extends ExternalSystemTaskNotificationListenerAdapter implements ExternalSystemProjectsWatcher { private static final Key<Long> CRC_WITHOUT_SPACES_CURRENT = Key.create("ExternalSystemProjectsWatcher.CRC_WITHOUT_SPACES_CURRENT"); private static final Key<Long> CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT = Key.create("ExternalSystemProjectsWatcher.CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT"); private static final int DOCUMENT_SAVE_DELAY = 1000; private final Project myProject; private final Set<Document> myChangedDocuments = new THashSet<>(); private final MergingUpdateQueue myChangedDocumentsQueue; private final List<ExternalSystemAutoImportAware> myImportAwareManagers; private final MergingUpdateQueue myUpdatesQueue; private final Map<ProjectSystemId, MyNotification> myNotificationMap; private final MultiMap<String/* project path */, String /* files paths */> myKnownAffectedFiles = MultiMap.createConcurrentSet(); private final MultiMap<VirtualFilePointer, String /* project path */> myFilesPointers = MultiMap.createConcurrentSet(); private final List<LocalFileSystem.WatchRequest> myWatchedRoots = new ArrayList<>(); private final MergingUpdateQueue myRefreshRequestsQueue; public ExternalSystemProjectsWatcherImpl(Project project) { myProject = project; myChangedDocumentsQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Document changes queue", DOCUMENT_SAVE_DELAY, false, ANY_COMPONENT, myProject); myRefreshRequestsQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Refresh requests queue", 0, false, ANY_COMPONENT, myProject, null, false); myImportAwareManagers = ContainerUtil.newArrayList(); for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (manager instanceof ExternalSystemAutoImportAware) { myImportAwareManagers.add((ExternalSystemAutoImportAware)manager); NotificationsConfiguration.getNotificationsConfiguration().register( manager.getSystemId().getReadableName() + " Import", NotificationDisplayType.STICKY_BALLOON, false); } } myUpdatesQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Notifier queue", 500, false, ANY_COMPONENT, myProject); myNotificationMap = ContainerUtil.newConcurrentMap(); ApplicationManager.getApplication().getMessageBus().connect(myProject).subscribe(BatchFileChangeListener.TOPIC, new BatchFileChangeListener() { @Override public void batchChangeStarted(Project project) { myRefreshRequestsQueue.suspend(); } @Override public void batchChangeCompleted(Project project) { myRefreshRequestsQueue.resume(); } }); } @Override public void markDirtyAllExternalProjects() { findLinkedProjectsSettings().forEach(this::scheduleUpdate); } @Override public void markDirty(Module module) { scheduleUpdate(ExternalSystemApiUtil.getExternalProjectPath(module)); } @Override public void markDirty(String projectPath) { scheduleUpdate(projectPath); } public synchronized void start() { if (ExternalSystemUtil.isNoBackgroundMode()) { return; } myUpdatesQueue.activate(); final MessageBusConnection myBusConnection = myProject.getMessageBus().connect(myChangedDocumentsQueue); myBusConnection.subscribe(VirtualFileManager.VFS_CHANGES, new MyFileChangeListener(this)); makeUserAware(myChangedDocumentsQueue, myProject); myChangedDocumentsQueue.activate(); myRefreshRequestsQueue.activate(); DocumentListener myDocumentListener = new DocumentListener() { @Override public void documentChanged(DocumentEvent event) { Document doc = event.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(doc); if (file == null) return; String externalProjectPath = getRelatedExternalProjectPath(file); if (externalProjectPath == null) return; synchronized (myChangedDocuments) { myChangedDocuments.add(doc); } myChangedDocumentsQueue.queue(new Update(ExternalSystemProjectsWatcherImpl.this) { @Override public void run() { final Document[] copy; synchronized (myChangedDocuments) { copy = myChangedDocuments.toArray(new Document[myChangedDocuments.size()]); myChangedDocuments.clear(); } ExternalSystemUtil.invokeLater(myProject, () -> new WriteAction() { @Override protected void run(@NotNull Result result) throws Throwable { for (Document each : copy) { PsiDocumentManager.getInstance(myProject).commitDocument(each); ((FileDocumentManagerImpl)FileDocumentManager.getInstance()).saveDocument(each, false); } } }.execute()); } }); } }; EditorFactory.getInstance().getEventMulticaster().addDocumentListener(myDocumentListener, myBusConnection); ServiceManager.getService(ExternalSystemProgressNotificationManager.class).addNotificationListener(this); updateWatchedRoots(true); Disposer.register(myChangedDocumentsQueue, () -> myFilesPointers.clear()); } public synchronized void stop() { Disposer.dispose(myChangedDocumentsQueue); Disposer.dispose(myUpdatesQueue); Disposer.dispose(myRefreshRequestsQueue); myNotificationMap.clear(); ServiceManager.getService(ExternalSystemProgressNotificationManager.class).removeNotificationListener(this); } @Override public void onStart(@NotNull ExternalSystemTaskId id, String workingDir) { if (id.getType() == ExternalSystemTaskType.RESOLVE_PROJECT) { final ProjectSystemId systemId = id.getProjectSystemId(); for (String filePath : ContainerUtil.newArrayList(myKnownAffectedFiles.get(workingDir))) { VirtualFile file = VfsUtil.findFileByIoFile(new File(filePath), false); if (file != null && !file.isDirectory()) { file.putUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT, file.getUserData(CRC_WITHOUT_SPACES_CURRENT)); } } myUpdatesQueue.queue(new Update(Pair.create(systemId, workingDir)) { @Override public void run() { doUpdateNotifications(true, systemId, workingDir); } }); } } @Override public void onSuccess(@NotNull ExternalSystemTaskId id) { if (id.getType() == ExternalSystemTaskType.RESOLVE_PROJECT) { updateWatchedRoots(false); } } private void scheduleUpdate(String projectPath) { Pair<ExternalSystemManager, ExternalProjectSettings> linkedProject = findLinkedProjectSettings(projectPath); if (linkedProject == null) return; scheduleUpdate(linkedProject); } private void scheduleUpdate(@NotNull Pair<ExternalSystemManager, ExternalProjectSettings> linkedProject) { ExternalSystemManager<?, ?, ?, ?, ?> manager = linkedProject.first; String projectPath = linkedProject.second.getExternalProjectPath(); ProjectSystemId systemId = manager.getSystemId(); boolean useAutoImport = linkedProject.second.isUseAutoImport(); if (useAutoImport) { final ExternalSystemTask resolveTask = ServiceManager.getService(ExternalSystemProcessingManager.class) .findTask(ExternalSystemTaskType.RESOLVE_PROJECT, systemId, projectPath); final ExternalSystemTaskState taskState = resolveTask == null ? null : resolveTask.getState(); if (taskState == null || taskState.isStopped()) { addToRefreshQueue(projectPath, systemId); } else if (taskState != ExternalSystemTaskState.NOT_STARTED) { // re-schedule to wait for the active project import task end final ExternalSystemProgressNotificationManager progressManager = ServiceManager.getService(ExternalSystemProgressNotificationManager.class); final ExternalSystemTaskNotificationListenerAdapter taskListener = new ExternalSystemTaskNotificationListenerAdapter() { @Override public void onEnd(@NotNull ExternalSystemTaskId id) { progressManager.removeNotificationListener(this); addToRefreshQueue(projectPath, systemId); } }; progressManager.addNotificationListener(resolveTask.getId(), taskListener); } } else { myUpdatesQueue.queue(new Update(Pair.create(systemId, projectPath)) { @Override public void run() { doUpdateNotifications(false, systemId, projectPath); } }); } } private void addToRefreshQueue(String projectPath, ProjectSystemId systemId) { myRefreshRequestsQueue.queue(new Update(Pair.create(systemId, projectPath)) { @Override public void run() { scheduleRefresh(myProject, projectPath, systemId, false); } }); } private void updateWatchedRoots(boolean isProjectOpen) { List<String> pathsToWatch = new SmartList<>(); myFilesPointers.clear(); LocalFileSystem.getInstance().removeWatchedRoots(myWatchedRoots); Map<String, VirtualFilePointer> pointerMap = ContainerUtil.newHashMap(); for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (!(manager instanceof ExternalSystemAutoImportAware)) continue; ExternalSystemAutoImportAware importAware = (ExternalSystemAutoImportAware)manager; for (ExternalProjectSettings settings : manager.getSettingsProvider().fun(myProject).getLinkedProjectsSettings()) { List<File> files = importAware.getAffectedExternalProjectFiles(settings.getExternalProjectPath(), myProject); long timeStamp = 0; for (File file : files) { timeStamp += file.lastModified(); } Map<String, Long> modificationStamps = manager.getLocalSettingsProvider().fun(myProject).getExternalConfigModificationStamps(); if (isProjectOpen && myProject.getUserData(ExternalSystemDataKeys.NEWLY_CREATED_PROJECT) != Boolean.TRUE) { Long affectedFilesTimestamp = modificationStamps.get(settings.getExternalProjectPath()); affectedFilesTimestamp = affectedFilesTimestamp == null ? -1L : affectedFilesTimestamp; if (timeStamp != affectedFilesTimestamp.longValue()) { scheduleUpdate(settings.getExternalProjectPath()); } } else { modificationStamps.put(settings.getExternalProjectPath(), timeStamp); } for (File file : files) { if (file == null) continue; String path = getNormalizedPath(file); if (path == null) continue; pathsToWatch.add(path); String url = VfsUtilCore.pathToUrl(path); VirtualFilePointer pointer = pointerMap.get(url); if (pointer == null) { pointer = VirtualFilePointerManager.getInstance().create(url, myChangedDocumentsQueue, null); pointerMap.put(url, pointer); // update timestamps based on file crc and local settings final VirtualFile virtualFile = pointer.getFile(); if (virtualFile != null) { Long crc = virtualFile.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT); if (crc != null) { modificationStamps.put(path, crc); } } } myFilesPointers.putValue(pointer, settings.getExternalProjectPath()); } } } myWatchedRoots.addAll(LocalFileSystem.getInstance().addRootsToWatch(pathsToWatch, false)); } @Nullable private String getRelatedExternalProjectPath(VirtualFile file) { String path = file.getPath(); return getRelatedExternalProjectPath(path); } @Nullable private String getRelatedExternalProjectPath(String path) { String externalProjectPath = null; for (ExternalSystemAutoImportAware importAware : myImportAwareManagers) { externalProjectPath = importAware.getAffectedExternalProjectPath(path, myProject); if (externalProjectPath != null) { break; } } if (externalProjectPath != null) { myKnownAffectedFiles.putValue(externalProjectPath, path); } return externalProjectPath; } private void doUpdateNotifications(boolean close, @NotNull ProjectSystemId systemId, @NotNull String projectPath) { MyNotification notification = myNotificationMap.get(systemId); if (close) { if (notification == null) return; notification.projectPaths.remove(projectPath); if (notification.projectPaths.isEmpty()) { notification.expire(); } } else { if (notification != null && !notification.isExpired()) { notification.projectPaths.add(projectPath); return; } notification = new MyNotification(myProject, myNotificationMap, systemId, projectPath); myNotificationMap.put(systemId, notification); Notifications.Bus.notify(notification, myProject); } } private static void scheduleRefresh(@NotNull final Project project, String projectPath, ProjectSystemId systemId, final boolean reportRefreshError) { ExternalSystemUtil.refreshProject( project, systemId, projectPath, new ExternalProjectRefreshCallback() { @Override public void onSuccess(@Nullable final DataNode<ProjectData> externalProject) { if (externalProject != null) { ServiceManager.getService(ProjectDataManager.class).importData(externalProject, project, true); } } @Override public void onFailure(@NotNull String errorMessage, @Nullable String errorDetails) { // Do nothing. } }, false, ProgressExecutionMode.IN_BACKGROUND_ASYNC, reportRefreshError); } private static void makeUserAware(final MergingUpdateQueue mergingUpdateQueue, final Project project) { AccessToken accessToken = ReadAction.start(); try { EditorEventMulticaster multicaster = EditorFactory.getInstance().getEventMulticaster(); multicaster.addCaretListener(new CaretListener() { @Override public void caretPositionChanged(CaretEvent e) { mergingUpdateQueue.restartTimer(); } }, mergingUpdateQueue); multicaster.addDocumentListener(new DocumentListener() { @Override public void documentChanged(DocumentEvent event) { mergingUpdateQueue.restartTimer(); } }, mergingUpdateQueue); project.getMessageBus().connect(mergingUpdateQueue).subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() { int beforeCalled; @Override public void beforeRootsChange(ModuleRootEvent event) { if (beforeCalled++ == 0) { mergingUpdateQueue.suspend(); } } @Override public void rootsChanged(ModuleRootEvent event) { if (beforeCalled == 0) { return; // This may occur if listener has been added between beforeRootsChange() and rootsChanged() calls. } if (--beforeCalled == 0) { mergingUpdateQueue.resume(); mergingUpdateQueue.restartTimer(); } } }); } finally { accessToken.finish(); } } private static class MyNotification extends Notification { private final ProjectSystemId mySystemId; private final Map<ProjectSystemId, MyNotification> myNotificationMap; private final Set<String> projectPaths; public MyNotification(Project project, Map<ProjectSystemId, MyNotification> notificationMap, ProjectSystemId systemId, String projectPath) { super(systemId.getReadableName() + " Import", ExternalSystemBundle.message("import.needed", systemId.getReadableName()), "<a href='reimport'>" + ExternalSystemBundle.message("import.importChanged") + "</a>" + " &nbsp;&nbsp;" + "<a href='autoImport'>" + ExternalSystemBundle.message("import.enableAutoImport") + "</a>", NotificationType.INFORMATION, null); mySystemId = systemId; myNotificationMap = notificationMap; projectPaths = ContainerUtil.newHashSet(projectPath); setListener(new NotificationListener.Adapter() { @Override protected void hyperlinkActivated(@NotNull Notification notification, @NotNull HyperlinkEvent event) { boolean isReimport = event.getDescription().equals("reimport"); boolean isAutoImport = event.getDescription().equals("autoImport"); projectPaths.stream() .map(path -> ExternalSystemApiUtil.getSettings(project, systemId).getLinkedProjectSettings(path)) .distinct() .filter(Objects::nonNull) .forEach(settings -> { if (isReimport) { scheduleRefresh(project, settings.getExternalProjectPath(), systemId, true); } if (isAutoImport) { settings.setUseAutoImport(true); scheduleRefresh(project, settings.getExternalProjectPath(), systemId, false); } }); notification.expire(); } }); } @Override public void expire() { super.expire(); projectPaths.clear(); myNotificationMap.remove(mySystemId); } } private class MyFileChangeListener extends FileChangeListenerBase { private final ExternalSystemProjectsWatcherImpl myWatcher; private MultiMap<String/* file path */, String /* project path */> myKnownFiles = MultiMap.createSet(); private List<VirtualFile> filesToUpdate; private List<VirtualFile> filesToRemove; public MyFileChangeListener(ExternalSystemProjectsWatcherImpl watcher) { myWatcher = watcher; } @Override protected boolean isRelevant(String path) { if (!myKnownFiles.get(path).isEmpty()) return true; for (VirtualFilePointer pointer : myFilesPointers.keySet()) { VirtualFile f = pointer.getFile(); if (f != null && FileUtil.pathsEqual(path, f.getPath())) { for (String projectPath : myFilesPointers.get(pointer)) { myKnownFiles.putValue(path, projectPath); myKnownAffectedFiles.putValue(projectPath, path); } return true; } } String affectedProjectPath = getRelatedExternalProjectPath(path); if (affectedProjectPath != null) { myKnownFiles.putValue(path, affectedProjectPath); } return affectedProjectPath != null; } @Override protected void updateFile(VirtualFile file, VFileEvent event) { doUpdateFile(file, event, false); } @Override protected void deleteFile(VirtualFile file, VFileEvent event) { doUpdateFile(file, event, true); } private void doUpdateFile(VirtualFile file, VFileEvent event, boolean remove) { init(); if (remove) { filesToRemove.add(file); } else { if (fileWasChanged(file, event)) { filesToUpdate.add(file); } else { for (String externalProjectPath : myKnownFiles.get(file.getPath())) { handleRevertedChanges(externalProjectPath); } } } } private void handleRevertedChanges(final String externalProjectPath) { for (String filePath : ContainerUtil.newArrayList(myKnownAffectedFiles.get(externalProjectPath))) { VirtualFile f = VfsUtil.findFileByIoFile(new File(filePath), false); if (f == null || !Objects.equals(f.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT), f.getUserData(CRC_WITHOUT_SPACES_CURRENT))) { return; } } ProjectSystemId systemId = null; for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (manager.getSettingsProvider().fun(myProject).getLinkedProjectSettings(externalProjectPath) != null) { systemId = manager.getSystemId(); } } if (systemId != null) { ProjectSystemId finalSystemId = systemId; myUpdatesQueue.queue(new Update(Pair.create(finalSystemId, externalProjectPath)) { @Override public void run() { doUpdateNotifications(true, finalSystemId, externalProjectPath); } }); } } private boolean fileWasChanged(VirtualFile file, VFileEvent event) { if (!file.isValid() || !(event instanceof VFileContentChangeEvent)) return true; Long newCrc = calculateCrc(file); file.putUserData(CRC_WITHOUT_SPACES_CURRENT, newCrc); Long crc = file.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT); if (crc == null) { file.putUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT, newCrc); return true; } return !newCrc.equals(crc); } @Override protected void apply() { // the save may occur during project close. in this case the background task // can not be started since the window has already been closed. if (areFileSetsInitialised()) { filesToUpdate.removeAll(filesToRemove); scheduleUpdate(ContainerUtil.concat(filesToUpdate, filesToRemove)); } clear(); } private boolean areFileSetsInitialised() { return filesToUpdate != null; } private void scheduleUpdate(List<VirtualFile> filesToUpdate) { filesToUpdate.stream() .flatMap(f -> myKnownFiles.get(f.getPath()).stream()) .distinct() .forEach(path -> myWatcher.scheduleUpdate(path)); } private void init() { // Do not use before() method to initialize the lists // since the listener can be attached during the update // and before method can be skipped. // The better way to fix if, of course, is to do something with // subscription - add listener not during postStartupActivity // but on project initialization to avoid this situation. if (areFileSetsInitialised()) return; filesToUpdate = new ArrayList<>(); filesToRemove = new ArrayList<>(); } private void clear() { filesToUpdate = null; filesToRemove = null; myKnownFiles.clear(); } } @Nullable private Pair<ExternalSystemManager, ExternalProjectSettings> findLinkedProjectSettings(String projectPath) { final ExternalProjectSettings[] linkedProjectSettings = new ExternalProjectSettings[1]; Optional<ExternalSystemManager<?, ?, ?, ?, ?>> systemManager = ExternalSystemApiUtil.getAllManagers().stream() .filter(m -> { linkedProjectSettings[0] = m.getSettingsProvider().fun(myProject).getLinkedProjectSettings(projectPath); return linkedProjectSettings[0] != null; }).findAny(); if (!systemManager.isPresent()) return null; ExternalSystemManager<?, ?, ?, ?, ?> manager = systemManager.get(); return Pair.create(manager, linkedProjectSettings[0]); } @NotNull private List<Pair<ExternalSystemManager, ExternalProjectSettings>> findLinkedProjectsSettings() { return ExternalSystemApiUtil.getAllManagers().stream() .flatMap( manager -> manager.getSettingsProvider().fun(myProject).getLinkedProjectsSettings().stream() .map(settings -> Pair.create((ExternalSystemManager)manager, (ExternalProjectSettings)settings))) .collect(Collectors.toList()); } @Nullable private static String getNormalizedPath(@NotNull File file) { String canonized = PathUtil.getCanonicalPath(file.getAbsolutePath()); return canonized == null ? null : FileUtil.toSystemIndependentName(canonized); } @NotNull private Long calculateCrc(VirtualFile file) { Long newCrc; PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file); if (psiFile != null) { final CRC32 crc32 = new CRC32(); ApplicationManager.getApplication().runReadAction(() -> psiFile.acceptChildren(new PsiRecursiveElementVisitor() { @Override public void visitElement(PsiElement element) { if (element instanceof LeafElement && !(element instanceof PsiWhiteSpace) && !(element instanceof PsiComment)) { String text = element.getText(); if (!text.trim().isEmpty()) { for (int i = 0, end = text.length(); i < end; i++) { crc32.update(text.charAt(i)); } } } super.visitElement(element); } })); newCrc = crc32.getValue(); } else { newCrc = file.getModificationStamp(); } return newCrc; } }
package net.eusashead.hateoas.hal.http.converter; /* * #[license] * spring-halbuilder * %% * Copyright (C) 2013 Eusa's Head * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * %[license] */ import com.theoryinpractise.halbuilder.api.ReadableRepresentation; import com.theoryinpractise.halbuilder.api.Representation; import com.theoryinpractise.halbuilder.api.RepresentationFactory; import net.eusashead.hateoas.hal.http.converter.module.HalHttpMessageConverterModule; import org.springframework.http.HttpInputMessage; import org.springframework.http.HttpOutputMessage; import org.springframework.http.MediaType; import org.springframework.http.converter.AbstractHttpMessageConverter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.HttpMessageNotReadableException; import org.springframework.http.converter.HttpMessageNotWritableException; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * {@link HttpMessageConverter} implementation * that handles HAL representations * (application/hal+json and (application/hal+xml) * using the HALBuilder {@link Representation} * @author patrickvk * */ public class HalHttpMessageConverter extends AbstractHttpMessageConverter<Object> { // Supported character set public static final Charset CHARSET = Charset.forName("UTF-8"); // MediaTypes public static MediaType HAL_JSON = new MediaType("application", "hal+json", CHARSET); public static MediaType HAL_XML = new MediaType("application", "hal+xml", CHARSET); // RepresentationFactory to read resources private final RepresentationFactory factory; // Modules registered with this converter private final List<HalHttpMessageConverterModule> modules = new ArrayList<HalHttpMessageConverterModule>(); public HalHttpMessageConverter(RepresentationFactory factory) { super(HAL_JSON, HAL_XML); this.factory = factory; } /* (non-Javadoc) * @see org.springframework.http.converter.AbstractHttpMessageConverter#canRead(java.lang.Class, org.springframework.http.MediaType) */ @Override public boolean canRead(java.lang.Class<?> type, MediaType mediaType) { if (!canRead(mediaType)) { return false; } if (ReadableRepresentation.class.isAssignableFrom(type)) { return true; } else if (hasReaderModule(type)) { return true; } return false; } /* (non-Javadoc) * @see org.springframework.http.converter.AbstractHttpMessageConverter#canWrite(java.lang.Class, org.springframework.http.MediaType) */ @Override public boolean canWrite(Class<?> type, MediaType mediaType) { if (!canWrite(mediaType)) { return false; } if (ReadableRepresentation.class.isAssignableFrom(type)) { return true; } else if (hasWriterModule(type)) { return true; } return false; } /* (non-Javadoc) * @see org.springframework.http.converter.AbstractHttpMessageConverter#readInternal(java.lang.Class, org.springframework.http.HttpInputMessage) */ @Override protected Object readInternal(Class<? extends Object> type, HttpInputMessage message) throws IOException, HttpMessageNotReadableException { if (message.getHeaders().getContentType() == null) { throw new IllegalArgumentException("Cannot read class without content-type set in input message."); } String contentType = message.getHeaders().getContentType().getType() + "/" + message.getHeaders().getContentType().getSubtype(); ReadableRepresentation representation = factory.readRepresentation(contentType, new InputStreamReader(message.getBody())); if (ReadableRepresentation.class.isAssignableFrom(type)) { return representation; } else if (hasReaderModule(type)) { return getReaderModule(type).read(representation, type); } throw new IllegalArgumentException(String.format("Cannot read class %s", type)); } /* (non-Javadoc) * @see org.springframework.http.converter.AbstractHttpMessageConverter#supports(java.lang.Class) */ @Override protected boolean supports(Class<?> type) { throw new UnsupportedOperationException(); } /* (non-Javadoc) * @see org.springframework.http.converter.AbstractHttpMessageConverter#writeInternal(java.lang.Object, org.springframework.http.HttpOutputMessage) */ @Override protected void writeInternal(Object target, HttpOutputMessage outputMessage) throws IOException, HttpMessageNotWritableException { ReadableRepresentation rep = getRepresentation(target); MediaType contentType = outputMessage.getHeaders().getContentType(); String mediaType = contentType.getType() + "/" + contentType.getSubtype(); Writer writer = new OutputStreamWriter(outputMessage.getBody()); rep.toString(mediaType, writer); } /** * Retrieve {@link ReadableRepresentation} from * the supplied target * @param target * @return */ private ReadableRepresentation getRepresentation(Object target) { if (ReadableRepresentation.class.isAssignableFrom(target.getClass())) { return (ReadableRepresentation)target; } else if (hasWriterModule(target.getClass())) { return getWriterModule(target.getClass()).write(target, this.factory); } throw new IllegalArgumentException(String.format("Unable to create Representation from object of type %s", target.getClass())); } /** * Is there a {@link HalHttpMessageConverterModule} * registered that can read this {@link Class} * @param type * @return true if there is, false if there ain't */ private boolean hasReaderModule(Class<?> type) { HalHttpMessageConverterModule module = getReaderModule(type); return module != null ? true : false; } /** * Is there a {@link HalHttpMessageConverterModule} * registered that can write this {@link Class} * @param type * @return true if there is, false if there ain't */ private boolean hasWriterModule(Class<?> type) { HalHttpMessageConverterModule module = getWriterModule(type); return module != null ? true : false; } /** * Get a {@link HalHttpMessageConverterModule} * that can read the supplied {@link Class} type, * if one exists. * * @param type {@link Class} to read * @return matching {@link HalHttpMessageConverterModule} module or null */ private HalHttpMessageConverterModule getReaderModule(Class<?> type) { for (HalHttpMessageConverterModule module : this.modules) { if (module.canRead(type)) { return module; } } return null; } /** * Get a {@link HalHttpMessageConverterModule} * that can write the supplied {@link Class} * or null * @param type {@link Class} to write * @return a {@link HalHttpMessageConverterModule} or null */ private HalHttpMessageConverterModule getWriterModule(Class<?> type) { for (HalHttpMessageConverterModule module : this.modules) { if (module.canWrite(type)) { return module; } } return null; } /** * Add a {@link HalHttpMessageConverterModule} * to handle a conversion * @param module */ public void addModule(HalHttpMessageConverterModule module) { this.modules.add(module); } /** * Get the {@link HalHttpMessageConverterModule} * modules registered with this converter. * @return */ public List<HalHttpMessageConverterModule> getModules() { return Collections.unmodifiableList(this.modules); } }
/******************************************************************************* * Copyright [2014] [Joarder Kamal] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package main.java.cluster; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import main.java.cluster.SSD.SSD_STATE; import main.java.entry.Global; public class Server implements Comparable<Server> { private int server_id; private String server_label; private Map<Integer, SSD> server_SSDs; private Set<Integer> server_partitions; private double server_load; private int server_inflow; private int server_outflow; private int server_total_data; private int server_last_assigned_ssd; public Server(int id) { this.setServer_id(id); this.setServer_label("S"+id); this.setServer_SSDs(new HashMap<Integer, SSD>()); this.setServer_partitions(new HashSet<Integer>()); this.setServer_load(0.0d); this.setServer_inflow(0); this.setServer_outflow(0); this.setServer_total_data(0); this.setServer_last_assigned_ssd(0); for(int i = 1; i <= Global.serverSSD; ++i) this.getServer_SSDs().put(i, new SSD(i, id)); } public int getServer_id() { return server_id; } public void setServer_id(int server_id) { this.server_id = server_id; } public String getServer_label() { return server_label; } public void setServer_label(String server_label) { this.server_label = server_label; } public double getServer_load() { return server_load; } public void setServer_load(double server_load) { this.server_load = server_load; } public Map<Integer, SSD> getServer_SSDs() { return server_SSDs; } public void setServer_SSDs(Map<Integer, SSD> server_SSDs) { this.server_SSDs = server_SSDs; } public void setServer_partitions(Set<Integer> server_partitions) { this.server_partitions = server_partitions; } public Set<Integer> getServer_partitions() { return server_partitions; } public int getServer_inflow() { return server_inflow; } public void setServer_inflow(int server_inflow) { this.server_inflow = server_inflow; } public int getServer_outflow() { return server_outflow; } public void setServer_outflow(int server_outflow) { this.server_outflow = server_outflow; } public int getServer_total_data() { return server_total_data; } public void setServer_total_data(int server_total_data) { this.server_total_data = server_total_data; } public int getServer_last_assigned_ssd() { return server_last_assigned_ssd; } public void setServer_last_assigned_ssd(int server_last_assigned_ssd) { this.server_last_assigned_ssd = server_last_assigned_ssd; } public void incServer_totalData(int val){ this.setServer_total_data((this.getServer_total_data() + val)); } public void decServer_totalData(int val){ this.setServer_total_data((this.getServer_total_data() - val)); } public void incServer_totalData(){ int val = this.getServer_total_data(); this.setServer_total_data(++val); } public void decServer_totalData(){ int val = this.getServer_total_data(); this.setServer_total_data(--val); } public void incServer_inflow(int val){ this.setServer_inflow((this.getServer_inflow() + val)); } public void decServer_inflow(int val){ this.setServer_inflow((this.getServer_inflow() - val)); } public void incServer_inflow(){ int val = this.getServer_inflow(); this.setServer_inflow(++val); } public void decServer_inflow(){ int val = this.getServer_inflow(); this.setServer_inflow(--val); } public void incServer_outflow(int val){ this.setServer_outflow((this.getServer_outflow() + val)); } public void decServer_outflow(int val){ this.setServer_outflow((this.getServer_outflow() - val)); } public void incServer_outflow(){ int val = this.getServer_outflow(); this.setServer_outflow(++val); } public void decServer_outflow(){ int val = this.getServer_outflow(); this.setServer_outflow(--val); } private void updateSSDLoad(Cluster cluster, SSD ssd) { double load = 0.0; for(int p_id : ssd.getSSD_partitions()) { Partition p = cluster.getPartition(p_id); load += (double) p.getPartition_dataSet().size(); } load = Math.round((load/Global.serverSSDCapacity) * 100.0) / 100.0; ssd.setSSD_load(load); if(ssd.getSSD_load() > 80.0) ssd.setSSD_state(SSD_STATE.Overloaded); else if(ssd.getSSD_load() >= 90.0) ssd.setSSD_state(SSD_STATE.Critical); else if(ssd.getSSD_load() >= 100.0) ssd.setSSD_state(SSD_STATE.Faulty); } public void updateServer_load(Cluster cluster) { // Updating individual SSD load for(Entry<Integer, SSD> ssd_entry : this.getServer_SSDs().entrySet()) this.updateSSDLoad(cluster, ssd_entry.getValue()); double load = Math.round((double) this.getServer_total_data()/(Global.serverSSD * Global.serverSSDCapacity) * 100.0) / 100.0; this.setServer_load(load); } public void show(Cluster cluster) { for(Entry<Integer, SSD> ssd_entry : this.getServer_SSDs().entrySet()) Global.LOGGER.info("\t----"+ssd_entry.getValue().toString()); for(int p_id : this.getServer_partitions()) { Partition p = cluster.getPartition(p_id); Global.LOGGER.info("\t\t----"+p.toString()); //p.show(); } } @Override public String toString() { return (this.getServer_label() +" - Partitions["+this.getServer_partitions().size()+"] " +"| Data["+this.getServer_total_data()+"]" +"| Load("+this.getServer_load()+"%)"); } @Override public boolean equals(Object object) { if (!(object instanceof Server)) return false; Server server = (Server) object; return (this.getServer_label().equals(server.getServer_label())); } @Override public int hashCode() { return (this.getServer_label().hashCode()); } @Override public int compareTo(Server server) { return (this.server_id < server.server_id) ? -1 : (this.server_id > server.server_id) ? 1 : 0; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.stram.engine; import java.util.ArrayList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datatorrent.api.InputOperator; import com.datatorrent.api.Operator.IdleTimeHandler; import com.datatorrent.api.Operator.ProcessingMode; import com.datatorrent.api.Operator.ShutdownException; import com.datatorrent.api.Sink; import com.datatorrent.netlet.util.DTThrowable; import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerStats; import com.datatorrent.stram.tuple.Tuple; /** * <p> * InputNode class.</p> * * @since 0.3.2 */ public class InputNode extends Node<InputOperator> { private final ArrayList<SweepableReservoir> deferredInputConnections = new ArrayList<SweepableReservoir>(); protected SweepableReservoir controlTuples; public InputNode(InputOperator operator, OperatorContext context) { super(operator, context); } @Override public void connectInputPort(String port, SweepableReservoir reservoir) { if (Node.INPUT.equals(port)) { if (controlTuples == null) { controlTuples = reservoir; } else { deferredInputConnections.add(reservoir); } } } @Override @SuppressWarnings(value = {"SleepWhileInLoop", "BroadCatchBlock", "TooBroadCatch"}) public final void run() { long spinMillis = context.getValue(OperatorContext.SPIN_MILLIS); final boolean handleIdleTime = operator instanceof IdleTimeHandler; boolean insideApplicationWindow = applicationWindowCount != 0; boolean doCheckpoint = false; boolean insideStreamingWindow = false; calculateNextCheckpointWindow(); try { while (alive) { Tuple t = controlTuples.sweep(); if (t == null) { if (insideStreamingWindow) { int generatedTuples = 0; for (Sink<Object> cs : sinks) { generatedTuples -= cs.getCount(false); } operator.emitTuples(); for (Sink<Object> cs : sinks) { generatedTuples += cs.getCount(false); } if (generatedTuples == 0) { if (handleIdleTime) { ((IdleTimeHandler)operator).handleIdleTime(); } else { Thread.sleep(spinMillis); } } } else { Thread.sleep(0); } } else { controlTuples.remove(); switch (t.getType()) { case BEGIN_WINDOW: for (int i = sinks.length; i-- > 0;) { sinks[i].put(t); } controlTupleCount++; currentWindowId = t.getWindowId(); insideStreamingWindow = true; if (applicationWindowCount == 0) { insideApplicationWindow = true; operator.beginWindow(currentWindowId); } operator.emitTuples(); /* give at least one chance to emit the tuples */ break; case END_WINDOW: insideStreamingWindow = false; if (++applicationWindowCount == APPLICATION_WINDOW_COUNT) { insideApplicationWindow = false; operator.endWindow(); applicationWindowCount = 0; } endWindowEmitTime = System.currentTimeMillis(); for (int i = sinks.length; i-- > 0;) { sinks[i].put(t); } controlTupleCount++; if (doCheckpoint) { dagCheckpointOffsetCount = (dagCheckpointOffsetCount + 1) % DAG_CHECKPOINT_WINDOW_COUNT; } if (++checkpointWindowCount == CHECKPOINT_WINDOW_COUNT) { checkpointWindowCount = 0; if (doCheckpoint) { checkpoint(currentWindowId); doCheckpoint = false; } else if (PROCESSING_MODE == ProcessingMode.EXACTLY_ONCE) { checkpoint(currentWindowId); } } ContainerStats.OperatorStats stats = new ContainerStats.OperatorStats(); reportStats(stats, currentWindowId); if(!insideApplicationWindow){ stats.metrics = collectMetrics(); } handleRequests(currentWindowId); break; case CHECKPOINT: dagCheckpointOffsetCount = 0; if (checkpointWindowCount == 0 && PROCESSING_MODE != ProcessingMode.EXACTLY_ONCE) { checkpoint(currentWindowId); } else { doCheckpoint = true; } for (int i = sinks.length; i-- > 0;) { sinks[i].put(t); } controlTupleCount++; break; case END_STREAM: if (deferredInputConnections.isEmpty()) { for (int i = sinks.length; i-- > 0;) { sinks[i].put(t); } controlTupleCount++; alive = false; } else { controlTuples = deferredInputConnections.remove(0); } break; default: for (int i = sinks.length; i-- > 0;) { sinks[i].put(t); } controlTupleCount++; break; } } } } catch (ShutdownException se) { logger.debug("Shutdown requested by the operator when alive = {}.", alive); alive = false; } catch (Throwable cause) { synchronized (this) { if (alive) { DTThrowable.rethrow(cause); } } Throwable rootCause = cause; while (rootCause != null) { if (rootCause instanceof InterruptedException) { break; } rootCause = rootCause.getCause(); } if (rootCause == null) { DTThrowable.rethrow(cause); } else { logger.debug("Ignoring InterruptedException after shutdown", cause); } } if (insideApplicationWindow) { operator.endWindow(); endWindowEmitTime = System.currentTimeMillis(); if (++applicationWindowCount == APPLICATION_WINDOW_COUNT) { applicationWindowCount = 0; } if (++checkpointWindowCount == CHECKPOINT_WINDOW_COUNT) { checkpointWindowCount = 0; if (doCheckpoint || PROCESSING_MODE == ProcessingMode.EXACTLY_ONCE) { checkpoint(currentWindowId); } } ContainerStats.OperatorStats stats = new ContainerStats.OperatorStats(); reportStats(stats, currentWindowId); stats.metrics = collectMetrics(); handleRequests(currentWindowId); } } @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(InputNode.class); }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.concurrency.caching; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.when; import org.jasig.portal.url.IPortalRequestUtils; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * @author Eric Dalquist * @version $Revision$ */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = "classpath:requestCacheAspectTestContext.xml") public class RequestCacheAspectTest { @Autowired private IPortalRequestUtils portalRequestUtils; @Autowired private CacheTestInterface cacheTestInterface; @Before public void setup() { reset(portalRequestUtils); cacheTestInterface.reset(); } @Test public void testNoRequestCache() { assertEquals(0, cacheTestInterface.testMethodNoCacheCount()); String result = cacheTestInterface.testMethodNoCache("1", false, false); assertEquals("testMethodNoCache(1)", result); assertEquals(1, cacheTestInterface.testMethodNoCacheCount()); result = cacheTestInterface.testMethodNoCache("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodNoCacheCount()); result = cacheTestInterface.testMethodNoCache("1", true, false); assertNull(result); assertEquals(3, cacheTestInterface.testMethodNoCacheCount()); try { result = cacheTestInterface.testMethodNoCache("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodNoCacheCount()); try { result = cacheTestInterface.testMethodNoCache("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(5, cacheTestInterface.testMethodNoCacheCount()); result = cacheTestInterface.testMethodNoCache("1", false, false); assertEquals("testMethodNoCache(1)", result); assertEquals(6, cacheTestInterface.testMethodNoCacheCount()); result = cacheTestInterface.testMethodNoCache("2", false, false); assertEquals("testMethodNoCache(2)", result); assertEquals(7, cacheTestInterface.testMethodNoCacheCount()); } @Test public void testMethodCacheDefaultNoArgs() { when(this.portalRequestUtils.getCurrentPortalRequest()).thenReturn(new MockHttpServletRequest()); assertEquals(0, cacheTestInterface.testMethodNoCacheCountNoArgsCount()); String result = cacheTestInterface.testMethodCacheDefaultNoArgs(); assertEquals("testMethodCacheDefaultNoArgs()", result); assertEquals(1, cacheTestInterface.testMethodNoCacheCountNoArgsCount()); result = cacheTestInterface.testMethodCacheDefaultNoArgs(); assertEquals("testMethodCacheDefaultNoArgs()", result); assertEquals(1, cacheTestInterface.testMethodNoCacheCountNoArgsCount()); } @Test public void testMethodCacheDefault() { when(this.portalRequestUtils.getCurrentPortalRequest()).thenReturn(new MockHttpServletRequest()); assertEquals(0, cacheTestInterface.testMethodCacheDefaultCount()); String result = cacheTestInterface.testMethodCacheDefault("1", false, false); assertEquals("testMethodCacheDefault(1)", result); assertEquals(1, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", true, false); assertNull(result); assertEquals(3, cacheTestInterface.testMethodCacheDefaultCount()); try { result = cacheTestInterface.testMethodCacheDefault("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodCacheDefaultCount()); try { result = cacheTestInterface.testMethodCacheDefault("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", false, false); assertEquals("testMethodCacheDefault(1)", result); assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("2", false, false); assertEquals("testMethodCacheDefault(2)", result); assertEquals(6, cacheTestInterface.testMethodCacheDefaultCount()); } @Test public void testMethodCacheDefaultNoRequest() { when(this.portalRequestUtils.getCurrentPortalRequest()).thenThrow(new IllegalStateException()); assertEquals(0, cacheTestInterface.testMethodCacheDefaultCount()); String result = cacheTestInterface.testMethodCacheDefault("1", false, false); assertEquals("testMethodCacheDefault(1)", result); assertEquals(1, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", true, false); assertNull(result); assertEquals(3, cacheTestInterface.testMethodCacheDefaultCount()); try { result = cacheTestInterface.testMethodCacheDefault("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodCacheDefaultCount()); try { result = cacheTestInterface.testMethodCacheDefault("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("1", false, false); assertEquals("testMethodCacheDefault(1)", result); assertEquals(6, cacheTestInterface.testMethodCacheDefaultCount()); result = cacheTestInterface.testMethodCacheDefault("2", false, false); assertEquals("testMethodCacheDefault(2)", result); assertEquals(7, cacheTestInterface.testMethodCacheDefaultCount()); } @Test public void testMethodCacheNull() { when(this.portalRequestUtils.getCurrentPortalRequest()).thenReturn(new MockHttpServletRequest()); assertEquals(0, cacheTestInterface.testMethodCacheNullCount()); String result = cacheTestInterface.testMethodCacheNull("1", false, false); assertEquals("testMethodCacheNull(1)", result); assertEquals(1, cacheTestInterface.testMethodCacheNullCount()); result = cacheTestInterface.testMethodCacheNull("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodCacheNullCount()); result = cacheTestInterface.testMethodCacheNull("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodCacheNullCount()); try { result = cacheTestInterface.testMethodCacheNull("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(3, cacheTestInterface.testMethodCacheNullCount()); try { result = cacheTestInterface.testMethodCacheNull("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodCacheNullCount()); result = cacheTestInterface.testMethodCacheNull("1", false, false); assertEquals("testMethodCacheNull(1)", result); assertEquals(4, cacheTestInterface.testMethodCacheNullCount()); result = cacheTestInterface.testMethodCacheNull("2", false, false); assertEquals("testMethodCacheNull(2)", result); assertEquals(5, cacheTestInterface.testMethodCacheNullCount()); } @Test public void testMethodCacheThrows() { when(this.portalRequestUtils.getCurrentPortalRequest()).thenReturn(new MockHttpServletRequest()); assertEquals(0, cacheTestInterface.testMethodCacheThrowsCount()); String result = cacheTestInterface.testMethodCacheThrows("1", false, false); assertEquals("testMethodCacheThrows(1)", result); assertEquals(1, cacheTestInterface.testMethodCacheThrowsCount()); result = cacheTestInterface.testMethodCacheThrows("1", true, false); assertNull(result); assertEquals(2, cacheTestInterface.testMethodCacheThrowsCount()); result = cacheTestInterface.testMethodCacheThrows("1", true, false); assertNull(result); assertEquals(3, cacheTestInterface.testMethodCacheThrowsCount()); try { result = cacheTestInterface.testMethodCacheThrows("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount()); try { result = cacheTestInterface.testMethodCacheThrows("1", false, true); fail(); } catch (Throwable t) { //expected } assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount()); result = cacheTestInterface.testMethodCacheThrows("1", false, false); assertEquals("testMethodCacheThrows(1)", result); assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount()); result = cacheTestInterface.testMethodCacheThrows("2", false, false); assertEquals("testMethodCacheThrows(2)", result); assertEquals(5, cacheTestInterface.testMethodCacheThrowsCount()); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmMplsLabelMaskedVer15 implements OFOxmMplsLabelMasked { private static final Logger logger = LoggerFactory.getLogger(OFOxmMplsLabelMaskedVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int LENGTH = 12; private final static U32 DEFAULT_VALUE = U32.ZERO; private final static U32 DEFAULT_VALUE_MASK = U32.ZERO; // OF message fields private final U32 value; private final U32 mask; // // Immutable default instance final static OFOxmMplsLabelMaskedVer15 DEFAULT = new OFOxmMplsLabelMaskedVer15( DEFAULT_VALUE, DEFAULT_VALUE_MASK ); // package private constructor - used by readers, builders, and factory OFOxmMplsLabelMaskedVer15(U32 value, U32 mask) { if(value == null) { throw new NullPointerException("OFOxmMplsLabelMaskedVer15: property value cannot be null"); } if(mask == null) { throw new NullPointerException("OFOxmMplsLabelMaskedVer15: property mask cannot be null"); } this.value = value; this.mask = mask; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x80004508L; } @Override public U32 getValue() { return value; } @Override public U32 getMask() { return mask; } @Override public MatchField<U32> getMatchField() { return MatchField.MPLS_LABEL; } @Override public boolean isMasked() { return true; } public OFOxm<U32> getCanonical() { if (U32.NO_MASK.equals(mask)) { return new OFOxmMplsLabelVer15(value); } else if(U32.FULL_MASK.equals(mask)) { return null; } else { return this; } } @Override public OFVersion getVersion() { return OFVersion.OF_15; } public OFOxmMplsLabelMasked.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmMplsLabelMasked.Builder { final OFOxmMplsLabelMaskedVer15 parentMessage; // OF message fields private boolean valueSet; private U32 value; private boolean maskSet; private U32 mask; BuilderWithParent(OFOxmMplsLabelMaskedVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x80004508L; } @Override public U32 getValue() { return value; } @Override public OFOxmMplsLabelMasked.Builder setValue(U32 value) { this.value = value; this.valueSet = true; return this; } @Override public U32 getMask() { return mask; } @Override public OFOxmMplsLabelMasked.Builder setMask(U32 mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<U32> getMatchField() { return MatchField.MPLS_LABEL; } @Override public boolean isMasked() { return true; } @Override public OFOxm<U32> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.5"); } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFOxmMplsLabelMasked build() { U32 value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); U32 mask = this.maskSet ? this.mask : parentMessage.mask; if(mask == null) throw new NullPointerException("Property mask must not be null"); // return new OFOxmMplsLabelMaskedVer15( value, mask ); } } static class Builder implements OFOxmMplsLabelMasked.Builder { // OF message fields private boolean valueSet; private U32 value; private boolean maskSet; private U32 mask; @Override public long getTypeLen() { return 0x80004508L; } @Override public U32 getValue() { return value; } @Override public OFOxmMplsLabelMasked.Builder setValue(U32 value) { this.value = value; this.valueSet = true; return this; } @Override public U32 getMask() { return mask; } @Override public OFOxmMplsLabelMasked.Builder setMask(U32 mask) { this.mask = mask; this.maskSet = true; return this; } @Override public MatchField<U32> getMatchField() { return MatchField.MPLS_LABEL; } @Override public boolean isMasked() { return true; } @Override public OFOxm<U32> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.5"); } @Override public OFVersion getVersion() { return OFVersion.OF_15; } // @Override public OFOxmMplsLabelMasked build() { U32 value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); U32 mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK; if(mask == null) throw new NullPointerException("Property mask must not be null"); return new OFOxmMplsLabelMaskedVer15( value, mask ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmMplsLabelMasked> { @Override public OFOxmMplsLabelMasked readFrom(ByteBuf bb) throws OFParseError { // fixed value property typeLen == 0x80004508L int typeLen = bb.readInt(); if(typeLen != (int) 0x80004508) throw new OFParseError("Wrong typeLen: Expected=0x80004508L(0x80004508L), got="+typeLen); U32 value = U32.of(bb.readInt()); U32 mask = U32.of(bb.readInt()); OFOxmMplsLabelMaskedVer15 oxmMplsLabelMaskedVer15 = new OFOxmMplsLabelMaskedVer15( value, mask ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmMplsLabelMaskedVer15); return oxmMplsLabelMaskedVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmMplsLabelMaskedVer15Funnel FUNNEL = new OFOxmMplsLabelMaskedVer15Funnel(); static class OFOxmMplsLabelMaskedVer15Funnel implements Funnel<OFOxmMplsLabelMaskedVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmMplsLabelMaskedVer15 message, PrimitiveSink sink) { // fixed value property typeLen = 0x80004508L sink.putInt((int) 0x80004508); message.value.putTo(sink); message.mask.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmMplsLabelMaskedVer15> { @Override public void write(ByteBuf bb, OFOxmMplsLabelMaskedVer15 message) { // fixed value property typeLen = 0x80004508L bb.writeInt((int) 0x80004508); bb.writeInt(message.value.getRaw()); bb.writeInt(message.mask.getRaw()); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmMplsLabelMaskedVer15("); b.append("value=").append(value); b.append(", "); b.append("mask=").append(mask); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmMplsLabelMaskedVer15 other = (OFOxmMplsLabelMaskedVer15) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; if (mask == null) { if (other.mask != null) return false; } else if (!mask.equals(other.mask)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); result = prime * result + ((mask == null) ? 0 : mask.hashCode()); return result; } }
/* * Copyright (C) 2009 Dimagi Inc., UNICEF * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package org.rapidandroid; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Vector; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.rapidandroid.content.translation.MessageTranslator; import org.rapidandroid.content.translation.ModelTranslator; import org.rapidandroid.data.RapidSmsDBConstants; import org.rapidandroid.data.RapidSmsDBConstants.FieldType; import org.rapidsms.java.core.model.Field; import org.rapidsms.java.core.model.Form; import org.rapidsms.java.core.model.SimpleFieldType; import org.rapidsms.java.core.parser.service.ParsingService.ParserType; import android.annotation.SuppressLint; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.BaseColumns; import android.util.Log; /** * @author Daniel Myung dmyung@dimagi.com * @created Jan 27, 2009 Summary: */ @SuppressLint("UseSparseArrays") public class ModelBootstrap { private static SystemHealthTracking healthTracker = new SystemHealthTracking(ModelBootstrap.class); private static Context mContext; private static HashMap<Integer, Form> formIdCache = new HashMap<Integer, Form>(); private static HashMap<Integer, Vector<Field>> fieldToFormHash = new HashMap<Integer, Vector<Field>>(); private static HashMap<Integer, SimpleFieldType> fieldTypeHash = new HashMap<Integer, SimpleFieldType>(); public static void InitApplicationDatabase(Context context) { mContext = context; // SAGES/pokuam1: force check existence of tables and forms if (true) { //if (isFieldTypeTableEmpty()) { healthTracker.logInfo("Bootstrapping fieldtypes, fields, and forms."); applicationInitialFormFieldTypesBootstrap(); } MessageTranslator.updateMonitorHash(context); } // private static boolean isFieldTypeTableEmpty() { // Uri fieldtypeUri = RapidSmsDBConstants.FieldType.CONTENT_URI; // Cursor fieldtypecheck = mContext.getContentResolver().query(fieldtypeUri, null, null, null, null); // if (fieldtypecheck.getCount() == 0) { // fieldtypecheck.close(); // return true; // } else { // // not empty! // fieldtypecheck.close(); // return false; // } // } private static String loadAssetFile(String filename) { try { InputStream is = mContext.getAssets().open(filename); int size = is.available(); // Read the entire asset into a local byte buffer. byte[] buffer = new byte[size]; is.read(buffer); is.close(); // Convert the buffer into a Java string. String text = new String(buffer); return text; } catch (IOException e) { // Should never happen! throw new RuntimeException(e); } } //TODO: make less code repeat private static String loadSdCardFile(String filename) { // InputStream is = mContext.getAssets().open(filename); File file = new File(filename); if (!file.exists()){ return null; } InputStream is = null; try { is = new BufferedInputStream(new FileInputStream(file)); int size = is.available(); // Read the entire asset into a local byte buffer. byte[] buffer = new byte[size]; is.read(buffer); is.close(); // Convert the buffer into a Java string. String text = new String(buffer); return text; } catch (IOException e) { // Should never happen! throw new RuntimeException(e); } } /** * Initial app startup, ONLY SHOULD BE RUN ONCE!!! called when the existence * of some data in the fieldtypes table is missing. * * external-custom-fieldtypes: place them here: /sdcard/rapidandroid/externalcustomfieldtypes.json */ private static void applicationInitialFormFieldTypesBootstrap() { healthTracker.logInfo("Loading field types and forms from assets."); loadFieldTypesFromAssets(); insertFieldTypesIntoDBIfNecessary(); loadInitialFormsFromAssets(); checkIfFormTablesExistCreateIfNecessary(); } private static void insertFieldTypesIntoDBIfNecessary() { Iterator<Map.Entry<Integer, SimpleFieldType>> it = fieldTypeHash.entrySet().iterator(); // for(int i = 0; i < forms.size(); i++) { while (it.hasNext()) { Map.Entry<Integer, SimpleFieldType> pairs = it.next(); SimpleFieldType thetype = pairs.getValue(); // make the URI and insert for the Fieldtype Uri fieldtypeUri = Uri.parse(RapidSmsDBConstants.FieldType.CONTENT_URI_STRING + thetype.getId()); Cursor typeCursor = mContext.getContentResolver().query(fieldtypeUri, null, null, null, null); if (typeCursor.getCount() == 0) { ContentValues typecv = new ContentValues(); typecv.put(BaseColumns._ID, thetype.getId()); typecv.put(RapidSmsDBConstants.FieldType.DATATYPE, thetype.getDataType()); typecv.put(RapidSmsDBConstants.FieldType.NAME, thetype.getReadableName()); typecv.put(RapidSmsDBConstants.FieldType.REGEX, thetype.getRegex()); Log.d("dimagi", "InsertFieldType: " + thetype.getId()); Log.d("dimagi", "InsertFieldType: " + thetype.getDataType()); Log.d("dimagi", "InsertFieldType: " + thetype.getReadableName()); Log.d("dimagi", "InsertFieldType: " + thetype.getRegex()); Uri insertedTypeUri = mContext.getContentResolver().insert(RapidSmsDBConstants.FieldType.CONTENT_URI, typecv); Log.d("dimagi", "********** Inserted SimpleFieldType into db: " + insertedTypeUri); } else if (typeCursor.getCount() == 1 && typeCursor.moveToFirst()) { // SAGES: update the fieldtype in the database -- the Name and/or Regex has changed int nameColIndx = typeCursor.getColumnIndex(FieldType.NAME); int regexColIndx = typeCursor.getColumnIndex(FieldType.REGEX); boolean isUpdatedFieldType = (!typeCursor.getString(nameColIndx).equals(thetype.getReadableName())); boolean isUpdatedRegex = (!typeCursor.getString(regexColIndx).equals(thetype.getRegex())); if (isUpdatedFieldType || isUpdatedRegex) { ContentValues typecv = new ContentValues(); //typecv.put(BaseColumns._ID, thetype.getId()); typecv.put(RapidSmsDBConstants.FieldType.DATATYPE, thetype.getDataType()); typecv.put(RapidSmsDBConstants.FieldType.NAME, thetype.getReadableName()); typecv.put(RapidSmsDBConstants.FieldType.REGEX, thetype.getRegex()); Log.d("sages", "UpdateFieldType: " + thetype.getId()); Log.d("sages", "UpdateFieldType: " + thetype.getDataType()); Log.d("sages", "UpdateFieldType: " + thetype.getReadableName()); Log.d("sages", "UpdateFieldType: " + thetype.getRegex()); String whereClause = BaseColumns._ID + "= ?"; String[] whereClauseArgs = {String.valueOf(thetype.getId())}; int numUpdatedType = mContext.getContentResolver().update(RapidSmsDBConstants.FieldType.CONTENT_URI, typecv, whereClause, whereClauseArgs); Log.d("sages", "********** Updated SimpleFieldType into db: " + numUpdatedType); } } typeCursor.close(); } } private static void loadFieldTypesFromAssets() { String types = loadAssetFile("definitions/fieldtypes.json"); String customtypes = loadAssetFile("definitions/customfieldtypes.json"); // SAGES: loading the custom fieldtype files String sdcard = RapidAndroidApplication.PATH_SDCARD; String externalcustomtypes = loadSdCardFile(sdcard + "/rapidandroid/externalcustomfieldtypes.json"); try { JSONArray typesarray = new JSONArray(types); int arrlength = typesarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = typesarray.getJSONObject(i); Log.d("dimagi", "type loop: " + i + " model: " + obj.getString("model")); if (!obj.getString("model").equals("rapidandroid.fieldtype")) { Log.d("dimagi", "###" + obj.getString("model") + "###"); throw new IllegalArgumentException("Error in parsing fieldtypes.json"); } int pk = obj.getInt("pk"); JSONObject jsonfields = obj.getJSONObject("fields"); Log.d("dimagi", "#### Regex from file: " + jsonfields.getString("name") + " [" + jsonfields.getString("regex") + "]"); SimpleFieldType newtype = new SimpleFieldType(pk, jsonfields.getString("datatype"), jsonfields.getString("regex"), jsonfields.getString("name")); fieldTypeHash.put(Integer.valueOf(pk), newtype); } catch (JSONException e) { } } } catch (JSONException e) { } try { // SAGES/pokuam1: load custom fieldtypes from sdcard but always append JSONArray customtypesarray = new JSONArray(customtypes); int arrlength = customtypesarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = customtypesarray.getJSONObject(i); Log.d("sages", "type loop: " + i + " model: " + obj.getString("model")); if (!obj.getString("model").equals("rapidandroid.fieldtype")) { Log.d("sages", "###" + obj.getString("model") + "###"); throw new IllegalArgumentException("Error in parsing fieldtypes.json"); } int pk = obj.getInt("pk"); JSONObject jsonfields = obj.getJSONObject("fields"); Log.d("sages", "#### Regex from file: " + jsonfields.getString("name") + " [" + jsonfields.getString("regex") + "]"); SimpleFieldType newtype = new SimpleFieldType(pk, jsonfields.getString("datatype"), jsonfields.getString("regex"), jsonfields.getString("name")); fieldTypeHash.put(Integer.valueOf(pk), newtype); } catch (JSONException e) { } } } catch (JSONException e) { } // SAGES/pokuam1: load EXTERNAL (on the sdcard) custom fieldtypes from sdcard but always append if (externalcustomtypes != null) { try { JSONArray externalcustomtypesarray = new JSONArray(externalcustomtypes); int arrlength = externalcustomtypesarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = externalcustomtypesarray.getJSONObject(i); Log.d("sages", "type loop: " + i + " model: " + obj.getString("model")); if (!obj.getString("model").equals("rapidandroid.fieldtype")) { Log.d("sages", "###" + obj.getString("model") + "###"); throw new IllegalArgumentException("Error in parsing fieldtypes.json"); } int pk = obj.getInt("pk"); JSONObject jsonfields = obj.getJSONObject("fields"); Log.d("sages", "#### Regex from file: " + jsonfields.getString("name") + " [" + jsonfields.getString("regex") + "]"); SimpleFieldType newtype = new SimpleFieldType(pk, jsonfields.getString("datatype"), jsonfields.getString("regex"), jsonfields.getString("name")); fieldTypeHash.put(Integer.valueOf(pk), newtype); } catch (JSONException e) { } } } catch (JSONException e) { } } } private static void loadInitialFormsFromAssets() { parseFieldsFromAssets(); parseFormsFromAssets(); parseFieldsFromLoadableAssets(); parseFormsFromLoadableAssets(); } /** * Loads externally defined field types from /sdcard/rapidandroid/loadablefields.json file * @return void */ // TODO pokuam1 - refactor to reuse original method, see parseFieldsFromAssets() private static void parseFieldsFromLoadableAssets() { String sdcard = RapidAndroidApplication.PATH_SDCARD; String sdcardFields = loadSdCardFile(sdcard + "/rapidandroid/loadablefields.json"); if (sdcardFields != null){ try { JSONArray fieldsarray = new JSONArray(sdcardFields); int arrlength = fieldsarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = fieldsarray.getJSONObject(i); if (!obj.getString("model").equals("rapidandroid.field")) { } int pk = obj.getInt("pk"); JSONObject jsonfields = obj.getJSONObject("fields"); int form_id = jsonfields.getInt("form"); Field newfield = new Field(pk, jsonfields.getInt("sequence"), jsonfields.getString("name"), jsonfields.getString("prompt"), fieldTypeHash.get(Integer.valueOf(jsonfields.getInt("fieldtype")))); Integer formInt = Integer.valueOf(form_id); if (!fieldToFormHash.containsKey(formInt)) { fieldToFormHash.put(formInt, new Vector<Field>()); Log.d("sages", "### adding a key again?!" + formInt); } fieldToFormHash.get(formInt).add(newfield); Log.d("sages", "#### Parsed field: " + newfield.getFieldId() + " [" + newfield.getName() + "] newlength: " + fieldToFormHash.get(formInt).size()); } catch (JSONException e) { // TODO Auto-generated catch block Log.d("sages", e.getMessage()); } } } catch (JSONException e) { } } } private static void parseFieldsFromAssets() { String fields = loadAssetFile("definitions/fields.json"); try { JSONArray fieldsarray = new JSONArray(fields); int arrlength = fieldsarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = fieldsarray.getJSONObject(i); if (!obj.getString("model").equals("rapidandroid.field")) { } int pk = obj.getInt("pk"); JSONObject jsonfields = obj.getJSONObject("fields"); int form_id = jsonfields.getInt("form"); Field newfield = new Field(pk, jsonfields.getInt("sequence"), jsonfields.getString("name"), jsonfields.getString("prompt"), fieldTypeHash.get(Integer.valueOf(jsonfields.getInt("fieldtype")))); Integer formInt = Integer.valueOf(form_id); if (!fieldToFormHash.containsKey(formInt)) { fieldToFormHash.put(formInt, new Vector<Field>()); Log.d("dimagi", "### adding a key again?!" + formInt); } fieldToFormHash.get(formInt).add(newfield); Log.d("dimagi", "#### Parsed field: " + newfield.getFieldId() + " [" + newfield.getName() + "] newlength: " + fieldToFormHash.get(formInt).size()); } catch (JSONException e) { // TODO Auto-generated catch block Log.d("dimagi", e.getMessage()); } } } catch (JSONException e) { } } private static void parseFormsFromAssets() { String forms = loadAssetFile("definitions/forms.json"); try { JSONArray formarray = new JSONArray(forms); int arrlength = formarray.length(); for (int i = 0; i < arrlength; i++) { try { JSONObject obj = formarray.getJSONObject(i); if (!obj.getString("model").equals("rapidandroid.form")) { } int pk = obj.getInt("pk"); Integer pkInt = pk; JSONObject jsonfields = obj.getJSONObject("fields"); Field[] fieldarr = new Field[fieldToFormHash.get(pkInt).size()]; for (int q = 0; q < fieldarr.length; q++) { fieldarr[q] = fieldToFormHash.get(pkInt).get(q); } Form newform = new Form(pk, jsonfields.getString("formname"), jsonfields.getString("prefix"), jsonfields.getString("description"), fieldarr, ParserType.getTypeFromConfig(jsonfields.getString("parsemethod"))); formIdCache.put(pkInt, newform); } catch (JSONException e) { // TODO Auto-generated catch block Log.d("dimagi", e.getMessage()); } } } catch (JSONException e) { } } /** * Loads externally defined forms from /sdcard/rapidandroid/loadableforms.json file * @return void * */ // TODO pokuam1 - refactor to reuse original method, see parseFormsFromAssets() private static void parseFormsFromLoadableAssets() { // String forms = loadAssetFile("definitions/forms.json"); String sdcard = RapidAndroidApplication.PATH_SDCARD; String sdcardForms = loadSdCardFile(sdcard + "/rapidandroid/loadableforms.json"); if (sdcardForms != null) { try { JSONArray formarray = new JSONArray(sdcardForms); int arrlength = formarray.length(); Integer curPkInt = null; for (int i = 0; i < arrlength; i++) { try { JSONObject obj = formarray.getJSONObject(i); if (!obj.getString("model").equals("rapidandroid.form")) { } int pk = obj.getInt("pk"); Integer pkInt = pk; curPkInt = pkInt; JSONObject jsonfields = obj.getJSONObject("fields"); Field[] fieldarr = new Field[fieldToFormHash.get(pkInt) .size()]; for (int q = 0; q < fieldarr.length; q++) { fieldarr[q] = fieldToFormHash.get(pkInt).get(q); } Form newform = new Form(pk, jsonfields.getString("formname"), jsonfields.getString("prefix"), jsonfields.getString("description"), fieldarr, ParserType.getTypeFromConfig(jsonfields .getString("parsemethod"))); formIdCache.put(pkInt, newform); } catch (JSONException e) { // TODO Auto-generated catch block Log.d("sages", e.getMessage()); } catch (NullPointerException npe) { Log.e("sages", "pkInt: " + curPkInt + "may have caused exception."); throw npe; } } } catch (JSONException e) { } } } private static void checkIfFormTablesExistCreateIfNecessary() { // so, todo: // check if tables exist // else Iterator<Map.Entry<Integer, Form>> it = formIdCache.entrySet().iterator(); // for(int i = 0; i < forms.size(); i++) { while (it.hasNext()) { Map.Entry<Integer, Form> pairs = it.next(); Form f = pairs.getValue(); Log.d("dimagi", "**** inserting form " + f.getFormName()); // insert the form first Uri formUri = Uri.parse(RapidSmsDBConstants.Form.CONTENT_URI_STRING + f.getFormId()); Cursor crform = mContext.getContentResolver().query(formUri, null, null, null, null); // boolean newFormInserted = false; if (crform.getCount() == 0) { ModelTranslator.addFormToDatabase(f); } crform.close(); } } }
/******************************************************************************* * Copyright (c) 2001-2013 Mathew A. Nelson and Robocode contributors * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://robocode.sourceforge.net/license/epl-v10.html *******************************************************************************/ package net.sf.robocode.repository.items; import net.sf.robocode.io.FileUtil; import net.sf.robocode.io.Logger; import net.sf.robocode.io.URLJarCollector; import net.sf.robocode.repository.IRobotSpecItem; import net.sf.robocode.repository.root.IRepositoryRoot; import net.sf.robocode.core.Container; import net.sf.robocode.version.IVersionManager; import java.io.*; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.HashSet; import java.util.Properties; import java.util.Set; /** * @author Pavel Savara (original) */ public class TeamItem extends RobotSpecItem implements IRobotSpecItem { private static final long serialVersionUID = 1L; private final static String TEAM_DESCRIPTION = "team.description"; private final static String TEAM_AUTHOR_NAME = "team.author.name"; // private final static String TEAM_AUTHOR_EMAIL = "team.author.email"; private final static String TEAM_AUTHOR_WEBSITE = "team.author.website"; private final static String TEAM_VERSION = "team.version"; private final static String TEAM_WEBPAGE = "team.webpage"; private final static String TEAM_MEMBERS = "team.members"; private final static String ROBOCODE_VERSION = "robocode.version"; private final String fullTeamName; public TeamItem(URL itemURL, IRepositoryRoot root) { super(itemURL, root); String tUrl = itemURL.toString(); tUrl = tUrl.substring(0, tUrl.lastIndexOf(".team")); final int versionSeparator = tUrl.lastIndexOf(" "); final int rootLen = root.getURL().toString().length(); if (versionSeparator != -1) { fullTeamName = tUrl.substring(rootLen, versionSeparator).replace('/', '.').replace('\\', '.'); } else { fullTeamName = tUrl.substring(rootLen).replace('/', '.').replace('\\', '.'); } if (loadProperties()) { isValid = true; } } private void htmlURLFromPropertiesURL() { try { htmlURL = new URL(itemUrl.toString().replaceAll("\\.team", ".html")); // test that html file exists final URLConnection conn = URLJarCollector.openConnection(htmlURL); conn.getInputStream().close(); } catch (IOException ignored) { // doesn't exist htmlURL = null; } } public Set<String> getFriendlyURLs() { Set<String> urls = new HashSet<String>(); if (itemUrl != null) { String url = itemUrl.toString(); String urlNoType = url.substring(0, url.lastIndexOf('.')); String path = itemUrl.getPath(); String pathNoType = path.substring(0, path.lastIndexOf('.')); urls.add(urlNoType); urls.add(pathNoType); } if (System.getProperty("TESTING", "false").equals("true")) { urls.add(getFullClassName()); } else { urls.add(getUniqueFullClassName()); } urls.add(getUniqueFullClassNameWithVersion()); return urls; } /** * {@inheritDoc} */ public void update(long lastModified, boolean force) { if (lastModified > this.lastModified || force) { this.lastModified = lastModified; loadProperties(); } } private boolean loadProperties() { if (itemUrl != null) { InputStream ios = null; try { final URLConnection connection = URLJarCollector.openConnection(itemUrl); ios = connection.getInputStream(); properties.load(ios); return true; } catch (IOException e) { Logger.logError(e); } finally { FileUtil.cleanupStream(ios); } } return false; } public URL getHtmlURL() { // lazy if (htmlURL == null) { htmlURLFromPropertiesURL(); } return htmlURL; } public URL getPropertiesURL() { return itemUrl; } public boolean isTeam() { return true; } public String getFullClassName() { return fullTeamName; } public String getMembers() { return properties.getProperty(TEAM_MEMBERS, null); } public String getVersion() { return properties.getProperty(TEAM_VERSION, null); } public String getDescription() { return properties.getProperty(TEAM_DESCRIPTION, null); } public String getAuthorName() { return properties.getProperty(TEAM_AUTHOR_NAME, null); } public URL getWebpage() { try { return new URL(properties.getProperty(TEAM_AUTHOR_WEBSITE, null)); } catch (MalformedURLException e) { return null; } } public boolean getIncludeSource() { return false; } public boolean isSourceIncluded() { return false; } public String getRobocodeVersion() { return properties.getProperty(ROBOCODE_VERSION, null); } public String toString() { return itemUrl.toString(); } public void storeProperties(OutputStream os, boolean includeSources, String version, String desc, String author, URL web) throws IOException { if (version != null) { properties.setProperty(TEAM_VERSION, version); } if (desc != null) { properties.setProperty(TEAM_DESCRIPTION, desc); } if (author != null) { properties.setProperty(TEAM_AUTHOR_NAME, author); } if (web != null) { properties.setProperty(TEAM_WEBPAGE, web.toString()); } properties.setProperty(ROBOCODE_VERSION, Container.getComponent(IVersionManager.class).getVersion()); properties.store(os, "Robocode Robot Team"); } public static void createOrUpdateTeam(File target, URL web, String desc, String author, String members, String teamVersion, String robocodeVersion) throws IOException { FileOutputStream os = null; try { Properties team = loadTeamProperties(target); if (robocodeVersion != null) { team.setProperty(ROBOCODE_VERSION, robocodeVersion); } if (web != null) { team.setProperty(TEAM_WEBPAGE, web.toString()); } if (desc != null) { team.setProperty(TEAM_DESCRIPTION, desc); } if (author != null) { team.setProperty(TEAM_AUTHOR_NAME, author); } if (members != null) { team.setProperty(TEAM_MEMBERS, members); } if (teamVersion != null) { team.setProperty(TEAM_VERSION, teamVersion); } os = new FileOutputStream(target); team.store(os, "Robocode robot team"); } finally { FileUtil.cleanupStream(os); } } private static Properties loadTeamProperties(File target) { Properties team = new Properties(); if (target.exists()) { FileInputStream fis = null; try { fis = new FileInputStream(target); team.load(fis); } catch (Exception e) { Logger.logError(e); } finally { FileUtil.cleanupStream(fis); } } return team; } }
package sh.isaac.solor.rf2.exporters.refsets; import java.util.concurrent.Semaphore; import java.util.stream.IntStream; import sh.isaac.api.Get; import sh.isaac.api.chronicle.LatestVersion; import sh.isaac.api.component.semantic.version.ComponentNidVersion; import sh.isaac.api.component.semantic.version.DynamicVersion; import sh.isaac.api.component.semantic.version.LongVersion; import sh.isaac.api.component.semantic.version.SemanticVersion; import sh.isaac.api.component.semantic.version.StringVersion; import sh.isaac.api.component.semantic.version.brittle.*; import sh.isaac.api.component.semantic.version.dynamic.DynamicData; import sh.isaac.api.component.semantic.version.dynamic.types.DynamicNid; import sh.isaac.api.component.semantic.version.dynamic.types.DynamicUUID; import sh.isaac.api.statement.constraints.MeasureConstraints; import sh.isaac.solor.rf2.config.RF2Configuration; import sh.isaac.solor.rf2.exporters.RF2AbstractExporter; import sh.isaac.solor.rf2.utility.RF2ExportHelper; public class RF2RefsetExporter extends RF2AbstractExporter { private final RF2ExportHelper rf2ExportHelper; private final IntStream intStream; private final Semaphore readSemaphore; private final RF2Configuration rf2Configuration; public RF2RefsetExporter(RF2Configuration rf2Configuration, RF2ExportHelper rf2ExportHelper, IntStream intStream, Semaphore readSemaphore) { super(rf2Configuration); this.rf2ExportHelper = rf2ExportHelper; this.intStream = intStream; this.readSemaphore = readSemaphore; this.rf2Configuration = rf2Configuration; readSemaphore.acquireUninterruptibly(); Get.activeTasks().add(this); } @Override protected Void call() { try{ this.intStream.forEach(nid -> { super.clearLineOutput(); super.incrementProgressCount(); switch (this.rf2Configuration.getRf2ReleaseType()){ case FULL: Get.assemblageService().getSemanticChronology(nid).getVersionList().stream().forEach(version -> writeSemantic((SemanticVersion)version)); break; case SNAPSHOT: LatestVersion<SemanticVersion> lv = Get.assemblageService().getSemanticChronology(nid) .getLatestVersion(this.rf2ExportHelper.getManifoldCoordinate().getVertexStampFilter()); if (lv.isPresent()) { writeSemantic(lv.get()); } break; default : throw new RuntimeException("Unsupported case " + this.rf2Configuration.getRf2ReleaseType()); } super.writeToFile(); super.tryAndUpdateProgressTracker(); }); if(this.rf2Configuration.isDescriptorAssemblage()){ super.writeToFile(this.rf2Configuration.getRefsetDescriptorDefinitions()); } } finally { this.readSemaphore.release(); Get.activeTasks().remove(this); } return null; } private void writeSemantic(final SemanticVersion sv) { String refsetID = this.rf2ExportHelper.getIdString(sv.getAssemblageNid()); String referenceComponentID = this.rf2ExportHelper.getIdString(sv.getReferencedComponentNid()); super.outputToWrite .append(sv.getPrimordialUuid() + "\t") .append(this.rf2ExportHelper.getTimeString(sv) + "\t") .append(this.rf2ExportHelper.getActiveString(sv) + "\t") .append(this.rf2ExportHelper.getIdString(sv.getModuleNid()) + "\t") .append(refsetID + "\t") .append(referenceComponentID); switch (sv.getSemanticType()) { case MEMBER: break; case Nid1_Int2: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Int2_Version)sv).getNid1()) + "\t") .append(((Nid1_Int2_Version)sv).getInt2()); break; case Nid1_Long2: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Long2_Version)sv).getNid1()) + "\t") .append(((Nid1_Long2_Version)sv).getLong2()); break; case Nid1_Nid2: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Nid2_Version)sv).getNid1()) + "\t") .append(this.rf2ExportHelper.getIdString(((Nid1_Nid2_Version)sv).getNid2())); break; case Nid1_Str2: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Str2_Version)sv).getNid1()) + "\t") .append(((Nid1_Str2_Version)sv).getStr2()); break; case Str1_Str2: super.outputToWrite .append("\t" + ((Str1_Str2_Version)sv).getStr1() + "\t") .append(((Str1_Str2_Version)sv).getStr2()); break; case Nid1_Nid2_Str3: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Nid2_Str3_Version)sv).getNid1()) + "\t") .append(this.rf2ExportHelper.getIdString(((Nid1_Nid2_Str3_Version)sv).getNid2()) + "\t") .append(((Nid1_Nid2_Str3_Version)sv).getStr3()); break; case Nid1_Nid2_Int3: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Nid2_Int3_Version)sv).getNid1()) + "\t") .append(this.rf2ExportHelper.getIdString(((Nid1_Nid2_Int3_Version)sv).getNid2()) + "\t") .append(((Nid1_Nid2_Int3_Version)sv).getInt3()); break; case Str1_Nid2_Nid3_Nid4: super.outputToWrite .append("\t" + ((Str1_Nid2_Nid3_Nid4_Version)sv).getStr1() + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Nid2_Nid3_Nid4_Version)sv).getNid2()) + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Nid2_Nid3_Nid4_Version)sv).getNid3()) + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Nid2_Nid3_Nid4_Version)sv).getNid4())); break; case Str1_Str2_Nid3_Nid4: super.outputToWrite .append("\t" + ((Str1_Str2_Nid3_Nid4_Version)sv).getStr1() + "\t") .append(((Str1_Str2_Nid3_Nid4_Version)sv).getStr2() + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Str2_Nid3_Nid4_Version)sv).getNid3()) + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Str2_Nid3_Nid4_Version)sv).getNid4())); break; case Str1_Str2_Nid3_Nid4_Nid5: super.outputToWrite .append("\t" + ((Str1_Str2_Nid3_Nid4_Nid5_Version)sv).getStr1() + "\t") .append(((Str1_Str2_Nid3_Nid4_Nid5_Version)sv).getStr2() + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Str2_Nid3_Nid4_Nid5_Version)sv).getNid3()) + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Str2_Nid3_Nid4_Nid5_Version)sv).getNid4()) + "\t") .append(this.rf2ExportHelper.getIdString(((Str1_Str2_Nid3_Nid4_Nid5_Version)sv).getNid5())); break; case Nid1_Int2_Str3_Str4_Nid5_Nid6: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getNid1()) + "\t") .append(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getInt2() + "\t") .append(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getStr3() + "\t") .append(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getStr4() + "\t") .append(this.rf2ExportHelper.getIdString(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getNid5()) + "\t") .append(this.rf2ExportHelper.getIdString(((Nid1_Int2_Str3_Str4_Nid5_Nid6_Version)sv).getNid6())); break; case Int1_Int2_Str3_Str4_Str5_Nid6_Nid7: super.outputToWrite .append("\t" + ((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getInt1() + "\t") .append(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getInt2() + "\t") .append(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getStr3() + "\t") .append(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getStr4() + "\t") .append(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getStr5() + "\t") .append(this.rf2ExportHelper.getIdString(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getNid6()) + "\t") .append(this.rf2ExportHelper.getIdString(((Int1_Int2_Str3_Str4_Str5_Nid6_Nid7_Version)sv).getNid7())); break; case Str1_Str2_Str3_Str4_Str5_Str6_Str7: super.outputToWrite .append("\t" + ((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr1() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr2() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr3() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr4() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr5() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr6() + "\t") .append(((Str1_Str2_Str3_Str4_Str5_Str6_Str7_Version)sv).getStr7()); break; case LONG: super.outputToWrite .append("\t" + ((LongVersion)sv).getLongValue()); break; case STRING: super.outputToWrite .append("\t" + ((StringVersion)sv).getString()); break; case DYNAMIC: DynamicData[] dd = ((DynamicVersion)sv).getData(); for (DynamicData di : dd) { switch (di.getDynamicDataType()) { case BOOLEAN: case DOUBLE: case FLOAT: case INTEGER: case LONG: case STRING: super.outputToWrite.append("\t").append(di.dataToString()); break; case NID: super.outputToWrite.append("\t").append(this.rf2ExportHelper.getIdString(((DynamicNid)di).getDataNid())); break; case UUID: super.outputToWrite.append("\t").append(this.rf2ExportHelper.getIdString(Get.identifierService().getNidForUuids(((DynamicUUID)di).getDataUUID()))); break; case ARRAY: case BYTEARRAY: case POLYMORPHIC: case UNKNOWN: default : throw new RuntimeException("Unsupported type for export! " + di.getDynamicDataType()); } } break; case COMPONENT_NID: super.outputToWrite .append("\t" + this.rf2ExportHelper.getIdString(((ComponentNidVersion)sv).getComponentNid())); case MEASURE_CONSTRAINTS: super.outputToWrite .append("\t" + ((MeasureConstraints)sv).getConstraintDescription() + "\t") .append(((MeasureConstraints)sv).getConstraintDescription() + "\t") .append(((MeasureConstraints)sv).getInitialLowerBound() + "\t") .append(((MeasureConstraints)sv).getInitialUpperBound() + "\t") .append(((MeasureConstraints)sv).getInitialIncludeUpperBound() + "\t") .append(((MeasureConstraints)sv).getInitialIncludeLowerBound() + "\t") .append(((MeasureConstraints)sv).getMinimumValue() + "\t") .append(((MeasureConstraints)sv).getMaximumValue() + "\t") .append(((MeasureConstraints)sv).getMinimumGranularity() + "\t") .append(((MeasureConstraints)sv).getMaximumGranularity() + "\t") .append(((MeasureConstraints)sv).showRange() + "\t") .append(((MeasureConstraints)sv).showGranularity() + "\t") .append(((MeasureConstraints)sv).showIncludeBounds() + "\t") .append(this.rf2ExportHelper.getIdString(((MeasureConstraints)sv).getMeasureSemanticConstraintAssemblageNid())); break; case IMAGE: case DESCRIPTION: case LOGIC_GRAPH: case RF2_RELATIONSHIP: case CONCEPT: case UNKNOWN: default : throw new RuntimeException("Invalid / Unsupported data type passed into refset exporter: " + sv.getSemanticType()); } super.outputToWrite.append("\r\n"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.hasToString; @LuceneTestCase.SuppressFileSystems("*") public class RemovePluginCommandTests extends ESTestCase { private Path home; private Environment env; static class MockRemovePluginCommand extends RemovePluginCommand { final Environment env; private MockRemovePluginCommand(final Environment env) { this.env = env; } @Override protected Environment createEnv(Map<String, String> settings) throws UserException { return env; } } @Override @Before public void setUp() throws Exception { super.setUp(); home = createTempDir(); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() .put("path.home", home) .build(); env = TestEnvironment.newEnvironment(settings); } void createPlugin(String name) throws Exception { createPlugin(env.pluginsFile(), name); } void createPlugin(Path path, String name) throws Exception { PluginTestUtil.writePluginProperties( path.resolve(name), "description", "dummy", "name", name, "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), "classname", "SomeClass"); } void createMetaPlugin(String name, String... plugins) throws Exception { PluginTestUtil.writeMetaPluginProperties( env.pluginsFile().resolve(name), "description", "dummy", "name", name); for (String plugin : plugins) { createPlugin(env.pluginsFile().resolve(name), plugin); } } static MockTerminal removePlugin(String name, Path home, boolean purge) throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); MockTerminal terminal = new MockTerminal(); new MockRemovePluginCommand(env).execute(terminal, env, name, purge); return terminal; } static void assertRemoveCleaned(Environment env) throws IOException { try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".removing")) { fail("Removal dir still exists, " + file); } } } } public void testMissing() throws Exception { UserException e = expectThrows(UserException.class, () -> removePlugin("dne", home, randomBoolean())); assertTrue(e.getMessage(), e.getMessage().contains("plugin [dne] not found")); assertRemoveCleaned(env); } public void testBasic() throws Exception { createPlugin("fake"); Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); createPlugin("other"); removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.pluginsFile().resolve("other"))); assertRemoveCleaned(env); } public void testBasicMeta() throws Exception { createMetaPlugin("meta", "fake1"); createPlugin("other"); removePlugin("meta", home, randomBoolean()); assertFalse(Files.exists(env.pluginsFile().resolve("meta"))); assertTrue(Files.exists(env.pluginsFile().resolve("other"))); assertRemoveCleaned(env); UserException exc = expectThrows(UserException.class, () -> removePlugin("fake1", home, randomBoolean())); assertThat(exc.getMessage(), containsString("plugin [fake1] not found")); } public void testBin() throws Exception { createPlugin("fake"); Path binDir = env.binFile().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); assertRemoveCleaned(env); } public void testBinNotDir() throws Exception { createPlugin("fake"); Files.createFile(env.binFile().resolve("fake")); UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("fake"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("fake"))); assertRemoveCleaned(env); } public void testConfigDirPreserved() throws Exception { createPlugin("fake"); final Path configDir = env.configFile().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, false); assertTrue(Files.exists(env.configFile().resolve("fake"))); assertThat(terminal.getOutput(), containsString(expectedConfigDirPreservedMessage(configDir))); assertRemoveCleaned(env); } public void testPurgePluginExists() throws Exception { createPlugin("fake"); final Path configDir = env.configFile().resolve("fake"); if (randomBoolean()) { Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); } final MockTerminal terminal = removePlugin("fake", home, true); assertFalse(Files.exists(env.configFile().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } public void testPurgePluginDoesNotExist() throws Exception { final Path configDir = env.configFile().resolve("fake"); Files.createDirectories(configDir); Files.createFile(configDir.resolve("fake.yml")); final MockTerminal terminal = removePlugin("fake", home, true); assertFalse(Files.exists(env.configFile().resolve("fake"))); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); assertRemoveCleaned(env); } public void testPurgeNothingExists() throws Exception { final UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, true)); assertThat(e, hasToString(containsString("plugin [fake] not found"))); } public void testPurgeOnlyMarkerFileExists() throws Exception { final Path configDir = env.configFile().resolve("fake"); final Path removing = env.pluginsFile().resolve(".removing-fake"); Files.createFile(removing); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertFalse(Files.exists(removing)); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); } public void testNoConfigDirPreserved() throws Exception { createPlugin("fake"); final Path configDir = env.configFile().resolve("fake"); final MockTerminal terminal = removePlugin("fake", home, randomBoolean()); assertThat(terminal.getOutput(), not(containsString(expectedConfigDirPreservedMessage(configDir)))); } public void testRemoveUninstalledPluginErrors() throws Exception { UserException e = expectThrows(UserException.class, () -> removePlugin("fake", home, randomBoolean())); assertEquals(ExitCodes.CONFIG, e.exitCode); assertEquals("plugin [fake] not found; run 'elasticsearch-plugin list' to get list of installed plugins", e.getMessage()); MockTerminal terminal = new MockTerminal(); new MockRemovePluginCommand(env) { protected boolean addShutdownHook() { return false; } }.main(new String[] { "-Epath.home=" + home, "fake" }, terminal); try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) { assertEquals("-> removing [fake]...", reader.readLine()); assertEquals("ERROR: plugin [fake] not found; run 'elasticsearch-plugin list' to get list of installed plugins", reader.readLine()); assertNull(reader.readLine()); } } public void testMissingPluginName() throws Exception { UserException e = expectThrows(UserException.class, () -> removePlugin(null, home, randomBoolean())); assertEquals(ExitCodes.USAGE, e.exitCode); assertEquals("plugin name is required", e.getMessage()); } public void testRemoveWhenRemovingMarker() throws Exception { createPlugin("fake"); Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); Files.createFile(env.pluginsFile().resolve(".removing-fake")); removePlugin("fake", home, randomBoolean()); } private String expectedConfigDirPreservedMessage(final Path configDir) { return "-> preserving plugin config files [" + configDir + "] in case of upgrade; use --purge if not needed"; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.logic.v2016_06_01.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.CloudException; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import java.io.IOException; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Path; import retrofit2.http.Query; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in WorkflowRunActionScopedRepetitions. */ public class WorkflowRunActionScopedRepetitionsInner { /** The Retrofit service to perform REST calls. */ private WorkflowRunActionScopedRepetitionsService service; /** The service client containing this operation class. */ private LogicManagementClientImpl client; /** * Initializes an instance of WorkflowRunActionScopedRepetitionsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public WorkflowRunActionScopedRepetitionsInner(Retrofit retrofit, LogicManagementClientImpl client) { this.service = retrofit.create(WorkflowRunActionScopedRepetitionsService.class); this.client = client; } /** * The interface defining all the services for WorkflowRunActionScopedRepetitions to be * used by Retrofit to perform actually REST calls. */ interface WorkflowRunActionScopedRepetitionsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.WorkflowRunActionScopedRepetitions list" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}/actions/{actionName}/scopeRepetitions") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("workflowName") String workflowName, @Path("runName") String runName, @Path("actionName") String actionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.logic.v2016_06_01.WorkflowRunActionScopedRepetitions get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/runs/{runName}/actions/{actionName}/scopeRepetitions/{repetitionName}") Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("workflowName") String workflowName, @Path("runName") String runName, @Path("actionName") String actionName, @Path("repetitionName") String repetitionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * List the workflow run action scoped repetitions. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the WorkflowRunActionRepetitionDefinitionCollectionInner object if successful. */ public WorkflowRunActionRepetitionDefinitionCollectionInner list(String resourceGroupName, String workflowName, String runName, String actionName) { return listWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName).toBlocking().single().body(); } /** * List the workflow run action scoped repetitions. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<WorkflowRunActionRepetitionDefinitionCollectionInner> listAsync(String resourceGroupName, String workflowName, String runName, String actionName, final ServiceCallback<WorkflowRunActionRepetitionDefinitionCollectionInner> serviceCallback) { return ServiceFuture.fromResponse(listWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName), serviceCallback); } /** * List the workflow run action scoped repetitions. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WorkflowRunActionRepetitionDefinitionCollectionInner object */ public Observable<WorkflowRunActionRepetitionDefinitionCollectionInner> listAsync(String resourceGroupName, String workflowName, String runName, String actionName) { return listWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName).map(new Func1<ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner>, WorkflowRunActionRepetitionDefinitionCollectionInner>() { @Override public WorkflowRunActionRepetitionDefinitionCollectionInner call(ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner> response) { return response.body(); } }); } /** * List the workflow run action scoped repetitions. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WorkflowRunActionRepetitionDefinitionCollectionInner object */ public Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner>> listWithServiceResponseAsync(String resourceGroupName, String workflowName, String runName, String actionName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (workflowName == null) { throw new IllegalArgumentException("Parameter workflowName is required and cannot be null."); } if (runName == null) { throw new IllegalArgumentException("Parameter runName is required and cannot be null."); } if (actionName == null) { throw new IllegalArgumentException("Parameter actionName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.list(this.client.subscriptionId(), resourceGroupName, workflowName, runName, actionName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner>>>() { @Override public Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner> clientResponse = listDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<WorkflowRunActionRepetitionDefinitionCollectionInner> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<WorkflowRunActionRepetitionDefinitionCollectionInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<WorkflowRunActionRepetitionDefinitionCollectionInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Get a workflow run action scoped repetition. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @param repetitionName The workflow repetition. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the WorkflowRunActionRepetitionDefinitionInner object if successful. */ public WorkflowRunActionRepetitionDefinitionInner get(String resourceGroupName, String workflowName, String runName, String actionName, String repetitionName) { return getWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName, repetitionName).toBlocking().single().body(); } /** * Get a workflow run action scoped repetition. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @param repetitionName The workflow repetition. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<WorkflowRunActionRepetitionDefinitionInner> getAsync(String resourceGroupName, String workflowName, String runName, String actionName, String repetitionName, final ServiceCallback<WorkflowRunActionRepetitionDefinitionInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName, repetitionName), serviceCallback); } /** * Get a workflow run action scoped repetition. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @param repetitionName The workflow repetition. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WorkflowRunActionRepetitionDefinitionInner object */ public Observable<WorkflowRunActionRepetitionDefinitionInner> getAsync(String resourceGroupName, String workflowName, String runName, String actionName, String repetitionName) { return getWithServiceResponseAsync(resourceGroupName, workflowName, runName, actionName, repetitionName).map(new Func1<ServiceResponse<WorkflowRunActionRepetitionDefinitionInner>, WorkflowRunActionRepetitionDefinitionInner>() { @Override public WorkflowRunActionRepetitionDefinitionInner call(ServiceResponse<WorkflowRunActionRepetitionDefinitionInner> response) { return response.body(); } }); } /** * Get a workflow run action scoped repetition. * * @param resourceGroupName The resource group name. * @param workflowName The workflow name. * @param runName The workflow run name. * @param actionName The workflow action name. * @param repetitionName The workflow repetition. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the WorkflowRunActionRepetitionDefinitionInner object */ public Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionInner>> getWithServiceResponseAsync(String resourceGroupName, String workflowName, String runName, String actionName, String repetitionName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (workflowName == null) { throw new IllegalArgumentException("Parameter workflowName is required and cannot be null."); } if (runName == null) { throw new IllegalArgumentException("Parameter runName is required and cannot be null."); } if (actionName == null) { throw new IllegalArgumentException("Parameter actionName is required and cannot be null."); } if (repetitionName == null) { throw new IllegalArgumentException("Parameter repetitionName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(this.client.subscriptionId(), resourceGroupName, workflowName, runName, actionName, repetitionName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionInner>>>() { @Override public Observable<ServiceResponse<WorkflowRunActionRepetitionDefinitionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<WorkflowRunActionRepetitionDefinitionInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<WorkflowRunActionRepetitionDefinitionInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<WorkflowRunActionRepetitionDefinitionInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<WorkflowRunActionRepetitionDefinitionInner>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.config; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.EnvironmentPipelineMatcher; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import org.apache.commons.lang.builder.ToStringBuilder; import static com.thoughtworks.go.util.command.EnvironmentVariableContext.GO_ENVIRONMENT_NAME; /** * @understands the current persistent information related to a logical grouping of machines */ @ConfigTag("environment") public class EnvironmentConfig implements ParamsAttributeAware, Validatable, EnvironmentVariableScope { @ConfigAttribute(value = NAME_FIELD, optional = false) private CaseInsensitiveString name; @ConfigSubtag private EnvironmentVariablesConfig variables = new EnvironmentVariablesConfig(); @ConfigSubtag private EnvironmentAgentsConfig agents = new EnvironmentAgentsConfig(); @ConfigSubtag private EnvironmentPipelinesConfig pipelines = new EnvironmentPipelinesConfig(); static final String NAME_FIELD = "name"; static final String PIPELINES_FIELD = "pipelines"; static final String AGENTS_FIELD = "agents"; static final String VARIABLES_FIELD = "variables"; private final ConfigErrors configErrors = new ConfigErrors(); public EnvironmentConfig() { } public EnvironmentConfig(final CaseInsensitiveString name) { this.name = name; } public void validate(ValidationContext validationContext) { } public ConfigErrors errors() { return configErrors; } public void addError(String fieldName, String message) { configErrors.add(fieldName, message); } public EnvironmentPipelineMatcher createMatcher() { return new EnvironmentPipelineMatcher(name, agents.getUuids(), pipelines); } public boolean hasAgent(String uuid) { for (EnvironmentAgentConfig agent : agents) { if (agent.hasUuid(uuid)) { return true; } } return false; } public void validateContainsOnlyUuids(Set<String> uuids) { for (EnvironmentAgentConfig agent : agents) { agent.validateUuidPresent(name, uuids); } } public boolean containsPipeline(final CaseInsensitiveString pipelineName) { return pipelines.containsPipelineNamed(pipelineName); } public void addAgent(String uuid) { agents.add(new EnvironmentAgentConfig(uuid)); } public void addAgentIfNew(String uuid) { EnvironmentAgentConfig agentConfig = new EnvironmentAgentConfig(uuid); if (!agents.contains(agentConfig)) { agents.add(agentConfig); } } public void removeAgent(String uuid) { agents.remove(new EnvironmentAgentConfig(uuid)); } public boolean hasName(final CaseInsensitiveString environmentName) { return name.equals(environmentName); } public void addPipeline(final CaseInsensitiveString pipelineName) { pipelines.add(new EnvironmentPipelineConfig(pipelineName)); } public boolean contains(String pipelineName) { return pipelines.containsPipelineNamed(new CaseInsensitiveString(pipelineName)); } public void validateContainsOnlyPipelines(List<CaseInsensitiveString> pipelineNames) { pipelines.validateContainsOnlyPipelines(name, pipelineNames); } public boolean hasSamePipelinesAs(EnvironmentConfig other) { for (EnvironmentPipelineConfig pipeline : pipelines) { if (other.pipelines.containsPipelineNamed(pipeline.getName())) { return true; } } return false; } public CaseInsensitiveString name() { return name; } public EnvironmentAgentsConfig getAgents() { return agents; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EnvironmentConfig that = (EnvironmentConfig) o; if (agents != null ? !agents.equals(that.agents) : that.agents != null) { return false; } if (name != null ? !name.equals(that.name) : that.name != null) { return false; } if (pipelines != null ? !pipelines.equals(that.pipelines) : that.pipelines != null) { return false; } if (variables != null ? !variables.equals(that.variables) : that.variables != null) { return false; } return true; } @Override public int hashCode() { int result = (name != null ? name.hashCode() : 0); result = 31 * result + (agents != null ? agents.hashCode() : 0); result = 31 * result + (pipelines != null ? pipelines.hashCode() : 0); result = 31 * result + (variables != null ? variables.hashCode() : 0); return result; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } public void addEnvironmentVariable(String name, String value) { variables.add(new EnvironmentVariableConfig(name.trim(), value)); } public EnvironmentVariableContext createEnvironmentContext() { EnvironmentVariableContext context = new EnvironmentVariableContext(GO_ENVIRONMENT_NAME, CaseInsensitiveString.str(name)); variables.addTo(context); return context; } public List<CaseInsensitiveString> getPipelineNames() { ArrayList<CaseInsensitiveString> pipelineNames = new ArrayList<CaseInsensitiveString>(); for (EnvironmentPipelineConfig pipeline : pipelines) { pipelineNames.add(pipeline.getName()); } return pipelineNames; } public EnvironmentPipelinesConfig getPipelines() { return pipelines; } public boolean hasVariable(String variableName) { return variables.hasVariable(variableName); } public EnvironmentVariablesConfig getVariables() { return variables; } public void setConfigAttributes(Object attributes) { if (attributes == null) { return; } Map attributeMap = (Map) attributes; if (attributeMap.containsKey(NAME_FIELD)) { name = new CaseInsensitiveString((String) attributeMap.get(NAME_FIELD)); } if (attributeMap.containsKey(PIPELINES_FIELD)) { pipelines.setConfigAttributes(attributeMap.get(PIPELINES_FIELD)); } if (attributeMap.containsKey(AGENTS_FIELD)) { agents.setConfigAttributes(attributeMap.get(AGENTS_FIELD)); } if (attributeMap.containsKey(VARIABLES_FIELD)) { variables.setConfigAttributes(attributeMap.get(VARIABLES_FIELD)); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import java.io.IOException; import java.util.Collections; import java.util.Map; public class Template extends Script { private XContentType contentType; public Template() { super(); } /** * Constructor for simple inline template. The template will have no lang, * content type or params set. * * @param template * The inline template. */ public Template(String template) { super(template, MustacheScriptEngineService.NAME); } /** * Constructor for Template. * * @param template * The cache key of the template to be compiled/executed. For * inline templates this is the actual templates source code. For * indexed templates this is the id used in the request. For on * file templates this is the file name. * @param type * The type of template -- dynamic, indexed, or file. * @param lang * The language of the template to be compiled/executed. * @param xContentType * The {@link XContentType} of the template. * @param params * The map of parameters the template will be executed with. */ public Template(String template, ScriptType type, @Nullable String lang, @Nullable XContentType xContentType, @Nullable Map<String, Object> params) { super(template, type, lang == null ? MustacheScriptEngineService.NAME : lang, params); this.contentType = xContentType; } /** * Method for getting the {@link XContentType} of the template. * * @return The {@link XContentType} of the template. */ public XContentType getContentType() { return contentType; } @Override protected void doReadFrom(StreamInput in) throws IOException { if (in.readBoolean()) { this.contentType = XContentType.readFrom(in); } } @Override protected void doWriteTo(StreamOutput out) throws IOException { boolean hasContentType = contentType != null; out.writeBoolean(hasContentType); if (hasContentType) { XContentType.writeTo(contentType, out); } } @Override protected XContentBuilder scriptFieldToXContent(String template, ScriptType type, XContentBuilder builder, Params builderParams) throws IOException { if (type == ScriptType.INLINE && contentType != null && builder.contentType() == contentType) { builder.rawField(type.getParseField().getPreferredName(), new BytesArray(template)); } else { builder.field(type.getParseField().getPreferredName(), template); } return builder; } public static Template readTemplate(StreamInput in) throws IOException { Template template = new Template(); template.readFrom(in); return template; } @SuppressWarnings("unchecked") public static Script parse(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) { return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher); } @SuppressWarnings("unchecked") public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); } @Deprecated public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames, ParseFieldMatcher parseFieldMatcher) throws IOException { return new TemplateParser(additionalTemplateFieldNames, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); } @Deprecated public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames, String defaultLang, ParseFieldMatcher parseFieldMatcher) throws IOException { return new TemplateParser(additionalTemplateFieldNames, defaultLang).parse(parser, parseFieldMatcher); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((contentType == null) ? 0 : contentType.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; Template other = (Template) obj; if (contentType != other.contentType) return false; return true; } private static class TemplateParser extends AbstractScriptParser<Template> { private XContentType contentType = null; private final Map<String, ScriptType> additionalTemplateFieldNames; private String defaultLang; public TemplateParser(Map<String, ScriptType> additionalTemplateFieldNames, String defaultLang) { this.additionalTemplateFieldNames = additionalTemplateFieldNames; this.defaultLang = defaultLang; } @Override protected Template createSimpleScript(XContentParser parser) throws IOException { return new Template(String.valueOf(parser.objectText()), ScriptType.INLINE, MustacheScriptEngineService.NAME, contentType, null); } @Override protected Template createScript(String script, ScriptType type, String lang, Map<String, Object> params) { return new Template(script, type, lang, contentType, params); } @Override protected String parseInlineScript(XContentParser parser) throws IOException { if (parser.currentToken() == XContentParser.Token.START_OBJECT) { contentType = parser.contentType(); XContentBuilder builder = XContentFactory.contentBuilder(contentType); return builder.copyCurrentStructure(parser).bytes().toUtf8(); } else { return parser.text(); } } @Override protected Map<String, ScriptType> getAdditionalScriptParameters() { return additionalTemplateFieldNames; } @Override protected String getDefaultScriptLang() { return defaultLang; } } }
/* * Copyright 2015 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.data2.metadata.store; import co.cask.cdap.api.dataset.DatasetDefinition; import co.cask.cdap.api.dataset.DatasetProperties; import co.cask.cdap.common.conf.CConfiguration; import co.cask.cdap.common.conf.Constants; import co.cask.cdap.data.runtime.DataSetsModules; import co.cask.cdap.data2.datafabric.dataset.DatasetsUtil; import co.cask.cdap.data2.dataset2.DatasetFramework; import co.cask.cdap.data2.dataset2.DatasetManagementException; import co.cask.cdap.data2.metadata.dataset.MetadataDataset; import co.cask.cdap.data2.metadata.dataset.MetadataEntry; import co.cask.cdap.data2.metadata.dataset.MetadataHistoryEntry; import co.cask.cdap.data2.metadata.publisher.MetadataChangePublisher; import co.cask.cdap.data2.transaction.Transactions; import co.cask.cdap.proto.Id; import co.cask.cdap.proto.metadata.MetadataChangeRecord; import co.cask.cdap.proto.metadata.MetadataRecord; import co.cask.cdap.proto.metadata.MetadataScope; import co.cask.cdap.proto.metadata.MetadataSearchResultRecord; import co.cask.cdap.proto.metadata.MetadataSearchTargetType; import co.cask.tephra.TransactionExecutor; import co.cask.tephra.TransactionExecutorFactory; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.google.inject.name.Named; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; /** * Implementation of {@link MetadataStore} used in distributed mode. */ public class DefaultMetadataStore implements MetadataStore { // TODO: CDAP-4311 Needed only for Upgrade Tool for 3.3. Make private in 3.4. public static final Id.DatasetInstance BUSINESS_METADATA_INSTANCE_ID = Id.DatasetInstance.from(Id.Namespace.SYSTEM, "business.metadata"); private static final Id.DatasetInstance SYSTEM_METADATA_INSTANCE_ID = Id.DatasetInstance.from(Id.Namespace.SYSTEM, "system.metadata"); private static final Map<String, String> EMPTY_PROPERTIES = ImmutableMap.of(); private static final Set<String> EMPTY_TAGS = ImmutableSet.of(); private final CConfiguration cConf; private final TransactionExecutorFactory txExecutorFactory; private final DatasetFramework dsFramework; private final MetadataChangePublisher changePublisher; @Inject DefaultMetadataStore(TransactionExecutorFactory txExecutorFactory, @Named(DataSetsModules.BASIC_DATASET_FRAMEWORK) DatasetFramework dsFramework, CConfiguration cConf, MetadataChangePublisher changePublisher) { this.txExecutorFactory = txExecutorFactory; this.dsFramework = dsFramework; this.cConf = cConf; this.changePublisher = changePublisher; } /** * Adds/updates metadata for the specified {@link Id.NamespacedId}. */ @Override public void setProperties(MetadataScope scope, final Id.NamespacedId entityId, final Map<String, String> properties) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { setPropertiesNoPublish(scope, entityId, properties); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { Map<String, String> existingProperties = input.getProperties(entityId); Set<String> existingTags = input.getTags(entityId); previousRef.set(new MetadataRecord(entityId, existingProperties, existingTags)); for (Map.Entry<String, String> entry : properties.entrySet()) { input.setProperty(entityId, entry.getKey(), entry.getValue()); } } }, scope); final ImmutableMap.Builder<String, String> propAdditions = ImmutableMap.builder(); final ImmutableMap.Builder<String, String> propDeletions = ImmutableMap.builder(); MetadataRecord previousRecord = previousRef.get(); // Iterating over properties all over again, because we want to move the diff calculation outside the transaction. for (Map.Entry<String, String> entry : properties.entrySet()) { String existingValue = previousRecord.getProperties().get(entry.getKey()); if (existingValue != null && existingValue.equals(entry.getValue())) { // Value already exists and is the same as the value being passed. No update necessary. continue; } // At this point, its either an update of an existing property (1 addition + 1 deletion) or a new property. // If it is an update, then mark a single deletion. if (existingValue != null) { propDeletions.put(entry.getKey(), existingValue); } // In both update or new cases, mark a single addition. propAdditions.put(entry.getKey(), entry.getValue()); } publish(previousRecord, new MetadataRecord(entityId, propAdditions.build(), EMPTY_TAGS), new MetadataRecord(entityId, propDeletions.build(), EMPTY_TAGS)); } private void setPropertiesNoPublish(MetadataScope scope, final Id.NamespacedId entityId, final Map<String, String> properties) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { for (Map.Entry<String, String> entry : properties.entrySet()) { input.setProperty(entityId, entry.getKey(), entry.getValue()); } } }, scope); } /** * Adds tags for the specified {@link Id.NamespacedId}. */ @Override public void addTags(MetadataScope scope, final Id.NamespacedId entityId, final String... tagsToAdd) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { addTagsNoPublish(scope, entityId, tagsToAdd); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { Map<String, String> existingProperties = input.getProperties(entityId); Set<String> existingTags = input.getTags(entityId); previousRef.set(new MetadataRecord(entityId, existingProperties, existingTags)); input.addTags(entityId, tagsToAdd); } }, scope); publish(previousRef.get(), new MetadataRecord(entityId, EMPTY_PROPERTIES, Sets.newHashSet(tagsToAdd)), new MetadataRecord(entityId)); } private void addTagsNoPublish(MetadataScope scope, final Id.NamespacedId entityId, final String... tagsToAdd) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.addTags(entityId, tagsToAdd); } }, scope); } @Override public Set<MetadataRecord> getMetadata(Id.NamespacedId entityId) { return ImmutableSet.of(getMetadata(MetadataScope.USER, entityId), getMetadata(MetadataScope.SYSTEM, entityId)); } @Override public MetadataRecord getMetadata(final MetadataScope scope, final Id.NamespacedId entityId) { return execute(new TransactionExecutor.Function<MetadataDataset, MetadataRecord>() { @Override public MetadataRecord apply(MetadataDataset input) throws Exception { Map<String, String> properties = input.getProperties(entityId); Set<String> tags = input.getTags(entityId); return new MetadataRecord(entityId, properties, tags); } }, scope); } /** * @return a set of {@link MetadataRecord}s representing all the metadata (including properties and tags) * for the specified set of {@link Id.NamespacedId}s. */ @Override public Set<MetadataRecord> getMetadata(MetadataScope scope, final Set<Id.NamespacedId> entityIds) { return execute(new TransactionExecutor.Function<MetadataDataset, Set<MetadataRecord>>() { @Override public Set<MetadataRecord> apply(MetadataDataset input) throws Exception { Set<MetadataRecord> metadataRecords = new HashSet<>(entityIds.size()); for (Id.NamespacedId entityId : entityIds) { Map<String, String> properties = input.getProperties(entityId); Set<String> tags = input.getTags(entityId); metadataRecords.add(new MetadataRecord(entityId, properties, tags)); } return metadataRecords; } }, scope); } @Override public Map<String, String> getProperties(Id.NamespacedId entityId) { return ImmutableMap.<String, String>builder() .putAll(getProperties(MetadataScope.USER, entityId)) .putAll(getProperties(MetadataScope.SYSTEM, entityId)) .build(); } /** * @return the metadata for the specified {@link Id.NamespacedId} */ @Override public Map<String, String> getProperties(MetadataScope scope, final Id.NamespacedId entityId) { return execute(new TransactionExecutor.Function<MetadataDataset, Map<String, String>>() { @Override public Map<String, String> apply(MetadataDataset input) throws Exception { return input.getProperties(entityId); } }, scope); } @Override public Set<String> getTags(Id.NamespacedId entityId) { return ImmutableSet.<String>builder() .addAll(getTags(MetadataScope.USER, entityId)) .addAll(getTags(MetadataScope.SYSTEM, entityId)) .build(); } /** * @return the tags for the specified {@link Id.NamespacedId} */ @Override public Set<String> getTags(MetadataScope scope, final Id.NamespacedId entityId) { return execute(new TransactionExecutor.Function<MetadataDataset, Set<String>>() { @Override public Set<String> apply(MetadataDataset input) throws Exception { return input.getTags(entityId); } }, scope); } @Override public void removeMetadata(Id.NamespacedId entityId) { removeMetadata(MetadataScope.USER, entityId); removeMetadata(MetadataScope.SYSTEM, entityId); } /** * Removes all metadata (including properties and tags) for the specified {@link Id.NamespacedId}. */ @Override public void removeMetadata(MetadataScope scope, final Id.NamespacedId entityId) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { removeMetadataNoPublish(scope, entityId); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { previousRef.set(new MetadataRecord(entityId, input.getProperties(entityId), input.getTags(entityId))); input.removeProperties(entityId); input.removeTags(entityId); } }, scope); MetadataRecord previous = previousRef.get(); publish(previous, new MetadataRecord(entityId), new MetadataRecord(previous)); } private void removeMetadataNoPublish(MetadataScope scope, final Id.NamespacedId entityId) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.removeProperties(entityId); input.removeTags(entityId); } }, scope); } /** * Removes all properties for the specified {@link Id.NamespacedId}. */ @Override public void removeProperties(MetadataScope scope, final Id.NamespacedId entityId) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { removePropertiesNoPublish(scope, entityId); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { previousRef.set(new MetadataRecord(entityId, input.getProperties(entityId), input.getTags(entityId))); input.removeProperties(entityId); } }, scope); publish(previousRef.get(), new MetadataRecord(entityId), new MetadataRecord(entityId, previousRef.get().getProperties(), EMPTY_TAGS)); } private void removePropertiesNoPublish(MetadataScope scope, final Id.NamespacedId entityId) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.removeProperties(entityId); } }, scope); } /** * Removes the specified properties of the {@link Id.NamespacedId}. */ @Override public void removeProperties(MetadataScope scope, final Id.NamespacedId entityId, final String... keys) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { removePropertiesNoPublish(scope, entityId, keys); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); final ImmutableMap.Builder<String, String> deletesBuilder = ImmutableMap.builder(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { previousRef.set(new MetadataRecord(entityId, input.getProperties(entityId), input.getTags(entityId))); for (String key : keys) { MetadataEntry record = input.getProperty(entityId, key); if (record == null) { continue; } deletesBuilder.put(record.getKey(), record.getValue()); } input.removeProperties(entityId, keys); } }, scope); publish(previousRef.get(), new MetadataRecord(entityId), new MetadataRecord(entityId, deletesBuilder.build(), EMPTY_TAGS)); } private void removePropertiesNoPublish(MetadataScope scope, final Id.NamespacedId entityId, final String... keys) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.removeProperties(entityId, keys); } }, scope); } /** * Removes all the tags from the {@link Id.NamespacedId} */ @Override public void removeTags(MetadataScope scope, final Id.NamespacedId entityId) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { removeTagsNoPublish(scope, entityId); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { previousRef.set(new MetadataRecord(entityId, input.getProperties(entityId), input.getTags(entityId))); input.removeTags(entityId); } }, scope); MetadataRecord previous = previousRef.get(); publish(previous, new MetadataRecord(entityId), new MetadataRecord(entityId, EMPTY_PROPERTIES, previous.getTags())); } private void removeTagsNoPublish(MetadataScope scope, final Id.NamespacedId entityId) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.removeTags(entityId); } }, scope); } /** * Removes the specified tags from the {@link Id.NamespacedId} */ @Override public void removeTags(MetadataScope scope, final Id.NamespacedId entityId, final String ... tagsToRemove) { if (!cConf.getBoolean(Constants.Metadata.UPDATES_PUBLISH_ENABLED)) { removeTagsNoPublish(scope, entityId, tagsToRemove); return; } final AtomicReference<MetadataRecord> previousRef = new AtomicReference<>(); execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { previousRef.set(new MetadataRecord(entityId, input.getProperties(entityId), input.getTags(entityId))); input.removeTags(entityId, tagsToRemove); } }, scope); publish(previousRef.get(), new MetadataRecord(entityId), new MetadataRecord(entityId, EMPTY_PROPERTIES, Sets.newHashSet(tagsToRemove))); } private void removeTagsNoPublish(MetadataScope scope, final Id.NamespacedId entityId, final String ... tagsToRemove) { execute(new TransactionExecutor.Procedure<MetadataDataset>() { @Override public void apply(MetadataDataset input) throws Exception { input.removeTags(entityId, tagsToRemove); } }, scope); } @Override public Set<MetadataSearchResultRecord> searchMetadata(String namespaceId, String searchQuery) { return ImmutableSet.<MetadataSearchResultRecord>builder() .addAll(searchMetadata(MetadataScope.USER, namespaceId, searchQuery)) .addAll(searchMetadata(MetadataScope.SYSTEM, namespaceId, searchQuery)) .build(); } @Override public Set<MetadataSearchResultRecord> searchMetadata(MetadataScope scope, String namespaceId, String searchQuery) { return searchMetadataOnType(scope, namespaceId, searchQuery, MetadataSearchTargetType.ALL); } @Override public Set<MetadataSearchResultRecord> searchMetadataOnType(String namespaceId, String searchQuery, MetadataSearchTargetType type) { return ImmutableSet.<MetadataSearchResultRecord>builder() .addAll(searchMetadataOnType(MetadataScope.USER, namespaceId, searchQuery, type)) .addAll(searchMetadataOnType(MetadataScope.SYSTEM, namespaceId, searchQuery, type)) .build(); } @Override public Set<MetadataSearchResultRecord> searchMetadataOnType(MetadataScope scope, final String namespaceId, final String searchQuery, final MetadataSearchTargetType type) { Iterable<MetadataEntry> metadataEntries = execute(new TransactionExecutor.Function<MetadataDataset, Iterable<MetadataEntry>>() { @Override public Iterable<MetadataEntry> apply(MetadataDataset input) throws Exception { // Currently we support two types of search formats: value and key:value. // Check for existence of separator char to make sure we did search in the right indexed column. if (searchQuery.contains(MetadataDataset.KEYVALUE_SEPARATOR)) { // key=value search return input.searchByKeyValue(namespaceId, searchQuery, type); } // value search return input.searchByValue(namespaceId, searchQuery, type); } }, scope); ImmutableSet.Builder<MetadataSearchResultRecord> builder = ImmutableSet.builder(); for (MetadataEntry metadataEntry : metadataEntries) { builder.add(new MetadataSearchResultRecord(metadataEntry.getTargetId())); } return builder.build(); } @Override public Set<MetadataRecord> getSnapshotBeforeTime(final Set<Id.NamespacedId> entityIds, final long timeMillis) { return ImmutableSet.<MetadataRecord>builder() .addAll(getSnapshotBeforeTime(MetadataScope.USER, entityIds, timeMillis)) .addAll(getSnapshotBeforeTime(MetadataScope.SYSTEM, entityIds, timeMillis)) .build(); } @Override public Set<MetadataRecord> getSnapshotBeforeTime(MetadataScope scope, final Set<Id.NamespacedId> entityIds, final long timeMillis) { Set<MetadataHistoryEntry> metadataHistoryEntries = execute(new TransactionExecutor.Function<MetadataDataset, Set<MetadataHistoryEntry>>() { @Override public Set<MetadataHistoryEntry> apply(MetadataDataset input) throws Exception { return input.getSnapshotBeforeTime(entityIds, timeMillis); } }, scope); ImmutableSet.Builder<MetadataRecord> builder = ImmutableSet.builder(); for (MetadataHistoryEntry metadataHistoryEntry : metadataHistoryEntries) { builder.add(new MetadataRecord(metadataHistoryEntry.getEntityId(), scope, metadataHistoryEntry.getProperties(), metadataHistoryEntry.getTags())); } return builder.build(); } private void publish(MetadataRecord previous, MetadataRecord additions, MetadataRecord deletions) { MetadataChangeRecord.MetadataDiffRecord diff = new MetadataChangeRecord.MetadataDiffRecord(additions, deletions); MetadataChangeRecord changeRecord = new MetadataChangeRecord(previous, diff, System.currentTimeMillis()); changePublisher.publish(changeRecord); } private <T> T execute(TransactionExecutor.Function<MetadataDataset, T> func, MetadataScope scope) { MetadataDataset metadataDataset = newMetadataDataset(scope); TransactionExecutor txExecutor = Transactions.createTransactionExecutor(txExecutorFactory, metadataDataset); return txExecutor.executeUnchecked(func, metadataDataset); } private void execute(TransactionExecutor.Procedure<MetadataDataset> func, MetadataScope scope) { MetadataDataset metadataScope = newMetadataDataset(scope); TransactionExecutor txExecutor = Transactions.createTransactionExecutor(txExecutorFactory, metadataScope); txExecutor.executeUnchecked(func, metadataScope); } private MetadataDataset newMetadataDataset(MetadataScope scope) { try { return DatasetsUtil.getOrCreateDataset( dsFramework, getMetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.EMPTY, DatasetDefinition.NO_ARGUMENTS, null); } catch (Exception e) { throw Throwables.propagate(e); } } private Id.DatasetInstance getMetadataDatasetInstance(MetadataScope scope) { return MetadataScope.USER == scope ? BUSINESS_METADATA_INSTANCE_ID : SYSTEM_METADATA_INSTANCE_ID; } /** * Adds datasets and types to the given {@link DatasetFramework}. Used by the upgrade tool to upgrade Metadata * Datasets. * * @param framework Dataset framework to add types and datasets to */ public static void setupDatasets(DatasetFramework framework) throws IOException, DatasetManagementException { framework.addInstance(MetadataDataset.class.getName(), BUSINESS_METADATA_INSTANCE_ID, DatasetProperties.EMPTY); framework.addInstance(MetadataDataset.class.getName(), SYSTEM_METADATA_INSTANCE_ID, DatasetProperties.EMPTY); } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.data.seg; //~--- non-JDK imports -------------------------------------------------------- import org.apache.log4j.Logger; import org.broad.igv.Globals; import org.broad.igv.exceptions.DataLoadException; import org.broad.igv.exceptions.ParserException; import org.broad.igv.feature.genome.Genome; import org.broad.igv.track.TrackType; import org.broad.igv.util.ParsingUtils; import org.broad.igv.util.ResourceLocator; import htsjdk.tribble.readers.AsciiLineReader; /** * Example * CCLE_name chrom loc.start loc.end num.mark seg.mean * A2780_OVARY 1 51598 88465 17 1.0491 * A2780_OVARY 1 218569 55606710 30512 0.0248 * A2780_OVARY 1 55606801 61331401 4399 -0.0431 * * @author jrobinso */ public class SegmentFileParser implements SegFileParser { enum Type { SEG, BIRDSUITE, NEXUS } ; private static Logger log = Logger.getLogger(SegmentFileParser.class); boolean birdsuite = false; int sampleColumn = 0; int chrColumn = 1; int startColumn = 2; int endColumn = 3; //int snpCountColumn = 4; // Default value int dataColumn = 5; // Default value ResourceLocator locator; /** * Constructs ... * * @param locator */ public SegmentFileParser(ResourceLocator locator) { this.locator = locator; if (locator.getPath().toLowerCase().endsWith("birdseye_canary_calls")) { birdsuite = true; } } /** * Return a map of trackId -> segment datasource * * @return */ public SegmentedAsciiDataSet loadSegments(ResourceLocator locator, Genome genome) { SegmentedAsciiDataSet dataset = new SegmentedAsciiDataSet(genome); if (birdsuite) { dataset.setTrackType(TrackType.CNV); } AsciiLineReader reader = null; String nextLine = null; int lineNumber = 0; try { reader = ParsingUtils.openAsciiReader(locator); // Parse comments, if any nextLine = reader.readLine(); while (nextLine.startsWith("#") || (nextLine.trim().length() == 0)) { lineNumber++; if (nextLine.length() > 0) { parseComment(nextLine, dataset); } nextLine = reader.readLine(); } // Read column headings String[] headings = nextLine.split("\t"); if (birdsuite) { //sample sample_index copy_number chr start end confidence sampleColumn = 0; dataColumn = 2; chrColumn = 3; startColumn = 4; endColumn = 5; } else { sampleColumn = 0; chrColumn = 1; startColumn = 2; endColumn = 3; dataColumn = headings.length - 1; } while ((nextLine = reader.readLine()) != null && (nextLine.trim().length() > 0)) { lineNumber++; String[] tokens = Globals.tabPattern.split(nextLine, -1); int nTokens = tokens.length; if (nTokens > 4) { int start; int end; try { start = ParsingUtils.parseInt(tokens[startColumn].trim()); } catch (NumberFormatException numberFormatException) { throw new ParserException("Column " + (startColumn + 1) + " must contain a numeric value.", lineNumber, nextLine); } try { end = ParsingUtils.parseInt(tokens[endColumn].trim()); } catch (NumberFormatException numberFormatException) { throw new ParserException("Column " + (endColumn + 1) + " must contain a numeric value.", lineNumber, nextLine); } String chr = tokens[chrColumn].trim(); if (genome != null) { chr = genome.getCanonicalChrName(chr); } String trackId = new String(tokens[sampleColumn].trim()); StringBuffer desc = null; if (birdsuite) { desc = new StringBuffer(); desc.append("<br>"); desc.append(headings[6]); desc.append("="); desc.append(tokens[6]); } else { if (tokens.length > 4) { desc = new StringBuffer(); for (int i = 4; i < headings.length - 1; i++) { desc.append("<br>"); desc.append(headings[i]); desc.append(": "); desc.append(tokens[i]); } } } try { float value = Float.parseFloat(tokens[dataColumn]); String description = desc == null ? null : desc.toString(); dataset.addSegment(trackId, chr, start, end, value, description); } catch (NumberFormatException numberFormatException) { // log.info("Skipping line: " + nextLine); } } } } catch (DataLoadException pe) { throw pe; } catch (ParserException pe) { throw pe; } catch (Exception e) { if (nextLine != null && lineNumber != 0) { throw new ParserException(e.getMessage(), e, lineNumber, nextLine); } else { throw new RuntimeException(e); } } finally { if (reader != null) { reader.close(); } } dataset.sortLists(); return dataset; } /** * Note: This is an exact copy of the method in ExpressionFileParser. Refactor to merge these * two parsers, or share a common base class. * * @param comment * @param dataset */ private void parseComment(String comment, SegmentedAsciiDataSet dataset) { String tmp = comment.substring(1, comment.length()); if (tmp.startsWith("track")) { ParsingUtils.parseTrackLine(tmp, dataset.getTrackProperties()); } else { String[] tokens = tmp.split("="); String key = tokens[0].trim().toLowerCase(); if (key.equals("type")) { if (tokens.length != 2) { return; } try { dataset.setTrackType(TrackType.valueOf(tokens[1].trim().toUpperCase())); } catch (Exception exception) { log.error("Unknown track type: " + tokens[1].trim().toUpperCase()); } } } } }
/* * Copyright (c) 2014, WSO2 Inc. (http://wso2.com) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.codenvy.ui.integration.test.utils; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import org.openqa.selenium.support.events.AbstractWebDriverEventListener; import org.openqa.selenium.support.events.EventFiringWebDriver; import org.openqa.selenium.support.events.WebDriverEventListener; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import org.wso2.carbon.automation.engine.frameworkutils.FrameworkPathUtil; import org.wso2.carbon.automation.extensions.selenium.BrowserManager; import org.openqa.selenium.*; import java.io.File; import java.util.List; import java.util.Set; import java.util.Calendar; import java.text.SimpleDateFormat; /** * Using the modified customized version of seleium web driver from * https://github.com/wso2/product-es/blob/master/modules/integration/tests-ui-integration/tests-ui-extension/src/test/ * java/org.wso2.es.ui.integration.extension.util/ESWebDriver.java * * to enable snapshots on test failure with descriptive test reports */ public class DevSWebDriver implements org.openqa.selenium.WebDriver { protected static final Logger log = Logger.getLogger(DevSWebDriver.class); private static final String IMAGE_FILE_EXT = ".png"; private static final String SUREFIRE_REPORTS = "surefire-reports"; private static final String SCREEN_SHOT = "screen-shot"; private static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"; public static final String RGEX_TO_REPLACE_CHARACTERS = "\\p{Punct}"; public static final String REPLACEMENT = "_"; private final int maxWaitTime; private EventFiringWebDriver driver; private int errorCount = 0; private WebDriverEventListener errorListener = new AbstractWebDriverEventListener() { @Override public void onException(Throwable throwable, WebDriver driver) { errorCount++; String timeStamp = new SimpleDateFormat(DATE_TIME_FORMAT).format(Calendar.getInstance().getTime()); String snapshotName; if (null != throwable) {// because the throwable could be null. snapshotName = timeStamp + " : " + "Error SnapShot" + errorCount + " : " + throwable.getLocalizedMessage(); } else { snapshotName = timeStamp + " : " + "Error SnapShot" + errorCount; } captureScreenShot(snapshotName); } }; public DevSWebDriver() throws Exception { driver = new EventFiringWebDriver(BrowserManager.getWebDriver());//firefox web driver maxWaitTime = UITestConstants.WAITING_TIME_CONSTANT; driver.register(errorListener); } /** * This method takes a screen-shot of current web-driver instance * * * @param snapShotName String indicating name of the screen-shot */ public void captureScreenShot(String snapShotName) { try { String filename = snapShotName + IMAGE_FILE_EXT; String pathName = UITestConstants.SUREFIRE_IMAGE_SAVE_LOCATION + SUREFIRE_REPORTS + File.separator + SCREEN_SHOT; log.error("OnException - Saving Screen-shot : " + filename + " to location " + pathName); File screenShot = this.driver.getScreenshotAs(OutputType.FILE); FileUtils.copyFile(screenShot, new File(pathName + File.separator + filename)); } catch (Exception e) { log.error(e); } } /** * This method will keep refreshing/reloading the current url for a given number of poll-count * until a given element is available * * @param by Element that is expected to be present * @param pollCount Number of time page need to be reloaded into webdriver */ public void findElementPoll(By by, int pollCount) { int count = 0; while (!isElementPresent(by) && count < pollCount) { String url = driver.getCurrentUrl(); driver.get(url); count++; } } /** * This method checks whether a given element is present in the page * * @param by Element to be present in the page * @return true if element is present false otherwise */ private boolean isElementPresent(By by) { try { driver.findElement(by); return true; } catch (NoSuchElementException e) { return false; } } /** * This method will wait untill a given element is present in the page for a given amount of time * * @param by Element to be present in the current page * @param waitTimeSec Time to wait in seconds */ private void waitTillElementPresent(By by, int waitTimeSec) { WebDriverWait wait; wait = new WebDriverWait(driver, waitTimeSec); wait.until(ExpectedConditions.presenceOfElementLocated(by)); } @Override public void get(String s) { driver.get(s); } @Override public String getCurrentUrl() { return driver.getCurrentUrl(); } @Override public String getTitle() { return driver.getTitle(); } @Override public List<WebElement> findElements(By by) { return driver.findElements(by); } /** * This method has override the findElement method in a way it will wait for maximum of 30 seconds * * @param by By element for findElement method * @return return the result of default WebDriver.findElement(By by) subjected to 30sec of max wait time */ @Override public WebElement findElement(By by) { waitTillElementPresent(by, this.maxWaitTime); return driver.findElement(by); } @Override public String getPageSource() { return driver.getPageSource(); } @Override public void close() { driver.close(); } @Override public void quit() { driver.quit(); } @Override public Set<String> getWindowHandles() { return driver.getWindowHandles(); } @Override public String getWindowHandle() { return driver.getWindowHandle(); } @Override public TargetLocator switchTo() { return driver.switchTo(); } @Override public Navigation navigate() { return driver.navigate(); } @Override public Options manage() { return driver.manage(); } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest; import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.from; import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_ACTIVITY_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TOTAL_JOBS; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_TYPE; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_TENANT_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_ACTIVITY_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_PROCESS_DEFINITION_ID; import static org.camunda.bpm.engine.rest.helper.MockProvider.createMockBatch; import static org.camunda.bpm.engine.rest.helper.NoIntermediaryInvocation.immediatelyAfter; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyListOf; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isNull; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.ws.rs.core.Response.Status; import org.camunda.bpm.engine.BadUserRequestException; import org.camunda.bpm.engine.RuntimeService; import org.camunda.bpm.engine.batch.Batch; import org.camunda.bpm.engine.impl.ProcessInstanceQueryImpl; import org.camunda.bpm.engine.migration.MigratingActivityInstanceValidationReport; import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationException; import org.camunda.bpm.engine.migration.MigratingProcessInstanceValidationReport; import org.camunda.bpm.engine.migration.MigratingTransitionInstanceValidationReport; import org.camunda.bpm.engine.migration.MigrationInstruction; import org.camunda.bpm.engine.migration.MigrationInstructionValidationReport; import org.camunda.bpm.engine.migration.MigrationPlan; import org.camunda.bpm.engine.migration.MigrationPlanBuilder; import org.camunda.bpm.engine.migration.MigrationPlanExecutionBuilder; import org.camunda.bpm.engine.migration.MigrationPlanValidationException; import org.camunda.bpm.engine.migration.MigrationPlanValidationReport; import org.camunda.bpm.engine.rest.dto.migration.MigrationInstructionDto; import org.camunda.bpm.engine.rest.dto.runtime.ProcessInstanceQueryDto; import org.camunda.bpm.engine.rest.helper.FluentAnswer; import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder; import org.camunda.bpm.engine.rest.helper.MockMigrationPlanBuilder.JoinedMigrationPlanBuilderMock; import org.camunda.bpm.engine.rest.util.container.TestContainerRule; import org.camunda.bpm.engine.rest.util.migration.MigrationExecutionDtoBuilder; import org.camunda.bpm.engine.rest.util.migration.MigrationPlanDtoBuilder; import org.camunda.bpm.engine.runtime.ProcessInstanceQuery; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import org.mockito.Mockito; import io.restassured.response.Response; import java.util.List; import org.camunda.bpm.engine.rest.util.migration.MigrationInstructionDtoBuilder; public class MigrationRestServiceInteractionTest extends AbstractRestServiceTest { @ClassRule public static TestContainerRule rule = new TestContainerRule(); protected static final String MIGRATION_URL = TEST_RESOURCE_ROOT_PATH + "/migration"; protected static final String GENERATE_MIGRATION_URL = MIGRATION_URL + "/generate"; protected static final String VALIDATE_MIGRATION_URL = MIGRATION_URL + "/validate"; protected static final String EXECUTE_MIGRATION_URL = MIGRATION_URL + "/execute"; protected static final String EXECUTE_MIGRATION_ASYNC_URL = MIGRATION_URL + "/executeAsync"; protected RuntimeService runtimeServiceMock; protected JoinedMigrationPlanBuilderMock migrationPlanBuilderMock; protected MigrationPlanExecutionBuilder migrationPlanExecutionBuilderMock; @Before public void setUpRuntimeData() { runtimeServiceMock = mock(RuntimeService.class); when(processEngine.getRuntimeService()).thenReturn(runtimeServiceMock); migrationPlanBuilderMock = new MockMigrationPlanBuilder() .sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID) .targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .builder(); when(runtimeServiceMock.createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID))) .thenReturn(migrationPlanBuilderMock); migrationPlanExecutionBuilderMock = mock(MigrationPlanExecutionBuilder.class); when(migrationPlanExecutionBuilderMock.processInstanceIds(anyListOf(String.class))).thenReturn(migrationPlanExecutionBuilderMock); when(runtimeServiceMock.newMigration(any(MigrationPlan.class))).thenReturn(migrationPlanExecutionBuilderMock); } @Test public void generateMigrationPlanWithInitialEmptyInstructions() { Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instructions(Collections.<Map<String, Object>>emptyList()) .build(); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(GENERATE_MIGRATION_URL); verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan); verifyGenerateMigrationPlanResponse(response); } @Test public void generateMigrationPlanWithInitialNullInstructions() { Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instructions(null) .build(); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(GENERATE_MIGRATION_URL); verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan); verifyGenerateMigrationPlanResponse(response); } @Test public void generateMigrationPlanWithNoInitialInstructions() { Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .build(); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then() .statusCode(Status.OK.getStatusCode()) .when() .post(GENERATE_MIGRATION_URL); verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan); verifyGenerateMigrationPlanResponse(response); } @Test public void generateMigrationPlanIgnoringInitialInstructions() { Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction("ignored", "ignored") .build(); Response response = given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(GENERATE_MIGRATION_URL); verifyGenerateMigrationPlanInteraction(migrationPlanBuilderMock, initialMigrationPlan); verifyGenerateMigrationPlanResponse(response); } @Test public void generateMigrationPlanWithNullSourceProcessDefinition() { String message = "source process definition id is null"; MigrationPlanBuilder planBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS); when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString())) .thenReturn(planBuilder); when(planBuilder.mapEqualActivities().build()) .thenThrow(new BadUserRequestException(message)); Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(GENERATE_MIGRATION_URL); } @Test public void generateMigrationPlanWithNonExistingSourceProcessDefinition() { String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS); when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString())) .thenReturn(migrationPlanBuilder); when( migrationPlanBuilder .mapEqualActivities() .build()) .thenThrow(new BadUserRequestException(message)); Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID).build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(GENERATE_MIGRATION_URL); } @Test public void generateMigrationPlanWithNullTargetProcessDefinition() { String message = "target process definition id is null"; MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS); when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class))) .thenReturn(migrationPlanBuilder); when( migrationPlanBuilder .mapEqualActivities() .build()) .thenThrow(new BadUserRequestException(message)); Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, null).build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(GENERATE_MIGRATION_URL); } @Test public void generateMigrationPlanWithNonExistingTargetProcessDefinition() { String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; MigrationPlanBuilder migrationPlanBuilder = mock(MigrationPlanBuilder.class, Mockito.RETURNS_DEEP_STUBS); when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID))) .thenReturn(migrationPlanBuilder); when( migrationPlanBuilder .mapEqualActivities() .build()) .thenThrow(new BadUserRequestException(message)); Map<String, Object> initialMigrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID).build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(initialMigrationPlan) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(GENERATE_MIGRATION_URL); } @Test public void generatePlanUpdateEventTriggers() { migrationPlanBuilderMock = new MockMigrationPlanBuilder() .sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID) .targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true) .builder(); Map<String, Object> generationRequest = new HashMap<String, Object>(); generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID); generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID); generationRequest.put("updateEventTriggers", true); when(runtimeServiceMock.createMigrationPlan(anyString(), anyString())) .thenReturn(migrationPlanBuilderMock); given() .contentType(POST_JSON_CONTENT_TYPE) .body(generationRequest) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(GENERATE_MIGRATION_URL); verify(runtimeServiceMock).createMigrationPlan(eq(EXAMPLE_PROCESS_DEFINITION_ID), eq(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID)); InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock); // the map equal activities method should be called inOrder.verify(migrationPlanBuilderMock).mapEqualActivities(); inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTriggers(); verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString()); } @Test public void generatePlanUpdateEventTriggerResponse() { migrationPlanBuilderMock = new MockMigrationPlanBuilder() .sourceProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID) .targetProcessDefinitionId(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true) .builder(); when(runtimeServiceMock.createMigrationPlan(anyString(), anyString())) .thenReturn(migrationPlanBuilderMock); Map<String, Object> generationRequest = new HashMap<String, Object>(); generationRequest.put("sourceProcessDefinitionId", EXAMPLE_PROCESS_DEFINITION_ID); generationRequest.put("targetProcessDefinitionId", ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID); given() .contentType(POST_JSON_CONTENT_TYPE) .body(generationRequest) .then().expect() .statusCode(Status.OK.getStatusCode()) .body("instructions[0].sourceActivityIds[0]", equalTo(EXAMPLE_ACTIVITY_ID)) .body("instructions[0].targetActivityIds[0]", equalTo(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("instructions[0].updateEventTrigger", equalTo(true)) .when() .post(GENERATE_MIGRATION_URL); } @Test public void executeMigrationPlan() { Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(EXECUTE_MIGRATION_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)); verifyMigrationPlanExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanWithProcessInstanceQuery() { when(runtimeServiceMock.createProcessInstanceQuery()) .thenReturn(new ProcessInstanceQueryImpl()); ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto(); processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstanceQuery(processInstanceQuery) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(EXECUTE_MIGRATION_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)); verifyMigrationPlanExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanSkipListeners() { Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID) .skipCustomListeners(true) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(EXECUTE_MIGRATION_URL); verifyMigrationPlanExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanSkipIoMappings() { Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID) .skipIoMappings(true) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(EXECUTE_MIGRATION_URL); verifyMigrationPlanExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanWithNullInstructions() { MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class); when(instructionReport.getMigrationInstruction()).thenReturn(null); when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(1)) .body("validationReport.instructionReports[0].instruction", nullValue()) .body("validationReport.instructionReports[0].failures", hasSize(1)) .body("validationReport.instructionReports[0].failures[0]", is("failure")) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithEmptyInstructions() { MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class); when(instructionReport.getMigrationInstruction()).thenReturn(null); when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); ((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)) .put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList()); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(1)) .body("validationReport.instructionReports[0].instruction", nullValue()) .body("validationReport.instructionReports[0].failures", hasSize(1)) .body("validationReport.instructionReports[0].failures[0]", is("failure")) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNullSourceProcessInstanceId() { String message = "source process definition id is null"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString())) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNonExistingSourceProcessInstanceId() { String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString())) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNullTargetProcessInstanceId() { String message = "target process definition id is null"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class))) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNonExistingTargetProcessInstanceId() { String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID))) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNullSourceActivityId() { String message = "sourceActivityId is null"; when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString())) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNonExistingSourceActivityId() { String message = "sourceActivity is null"; when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString())) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNullTargetActivityId() { String message = "targetActivityId is null"; when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class))) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, null) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanWithNonExistingTargetActivityId() { String message = "targetActivity is null"; when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID))) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanValidationException() { MigrationInstruction migrationInstruction = mock(MigrationInstruction.class); when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID); MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class); when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class); when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(2)) .body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1)) .body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1)) .body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("validationReport.instructionReports[0].failures", hasSize(2)) .body("validationReport.instructionReports[0].failures[0]", is("failure1")) .body("validationReport.instructionReports[0].failures[1]", is("failure2")) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigratingProcessInstanceValidationException() { MigrationInstruction migrationInstruction = mock(MigrationInstruction.class); when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID); MigratingActivityInstanceValidationReport instanceReport1 = mock(MigratingActivityInstanceValidationReport.class); when(instanceReport1.getActivityInstanceId()).thenReturn(EXAMPLE_ACTIVITY_INSTANCE_ID); when(instanceReport1.getMigrationInstruction()).thenReturn(migrationInstruction); when(instanceReport1.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(instanceReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigratingTransitionInstanceValidationReport instanceReport2 = mock(MigratingTransitionInstanceValidationReport.class); when(instanceReport2.getTransitionInstanceId()).thenReturn("transitionInstanceId"); when(instanceReport2.getMigrationInstruction()).thenReturn(migrationInstruction); when(instanceReport2.getSourceScopeId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(instanceReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigratingProcessInstanceValidationReport processInstanceReport = mock(MigratingProcessInstanceValidationReport.class); when(processInstanceReport.getProcessInstanceId()).thenReturn(EXAMPLE_PROCESS_INSTANCE_ID); when(processInstanceReport.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); when(processInstanceReport.getActivityInstanceReports()).thenReturn(Arrays.asList(instanceReport1)); when(processInstanceReport.getTransitionInstanceReports()).thenReturn(Arrays.asList(instanceReport2)); doThrow(new MigratingProcessInstanceValidationException("fooo", processInstanceReport)) .when(migrationPlanExecutionBuilderMock).execute(); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigratingProcessInstanceValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.processInstanceId", is(EXAMPLE_PROCESS_INSTANCE_ID)) .body("validationReport.failures", hasSize(2)) .body("validationReport.failures[0]", is("failure1")) .body("validationReport.failures[1]", is("failure2")) .body("validationReport.activityInstanceValidationReports", hasSize(1)) .body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1)) .body("validationReport.activityInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1)) .body("validationReport.activityInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("validationReport.activityInstanceValidationReports[0].activityInstanceId", is(EXAMPLE_ACTIVITY_INSTANCE_ID)) .body("validationReport.activityInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.activityInstanceValidationReports[0].failures", hasSize(2)) .body("validationReport.activityInstanceValidationReports[0].failures[0]", is("failure1")) .body("validationReport.activityInstanceValidationReports[0].failures[1]", is("failure2")) .body("validationReport.transitionInstanceValidationReports", hasSize(1)) .body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds", hasSize(1)) .body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds", hasSize(1)) .body("validationReport.transitionInstanceValidationReports[0].migrationInstruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("validationReport.transitionInstanceValidationReports[0].transitionInstanceId", is("transitionInstanceId")) .body("validationReport.transitionInstanceValidationReports[0].sourceScopeId", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.transitionInstanceValidationReports[0].failures", hasSize(2)) .body("validationReport.transitionInstanceValidationReports[0].failures[0]", is("failure1")) .body("validationReport.transitionInstanceValidationReports[0].failures[1]", is("failure2")) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanAsync() { Batch batchMock = createMockBatch(); when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.OK.getStatusCode()) .body("id", is(EXAMPLE_BATCH_ID)) .body("type", is(EXAMPLE_BATCH_TYPE)) .body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS)) .body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED)) .body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB)) .body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID)) .body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID)) .body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID)) .body("tenantId", is(EXAMPLE_TENANT_ID)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)); verifyMigrationPlanAsyncExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanAsyncWithProcessInstanceQuery() { when(runtimeServiceMock.createProcessInstanceQuery()) .thenReturn(new ProcessInstanceQueryImpl()); ProcessInstanceQueryDto processInstanceQuery = new ProcessInstanceQueryDto(); processInstanceQuery.setProcessDefinitionId(EXAMPLE_PROCESS_DEFINITION_ID); Batch batchMock = createMockBatch(); when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstanceQuery(processInstanceQuery) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.OK.getStatusCode()) .body("id", is(EXAMPLE_BATCH_ID)) .body("type", is(EXAMPLE_BATCH_TYPE)) .body("totalJobs", is(EXAMPLE_BATCH_TOTAL_JOBS)) .body("batchJobsPerSeed", is(EXAMPLE_BATCH_JOBS_PER_SEED)) .body("invocationsPerBatchJob", is(EXAMPLE_INVOCATIONS_PER_BATCH_JOB)) .body("seedJobDefinitionId", is(EXAMPLE_SEED_JOB_DEFINITION_ID)) .body("monitorJobDefinitionId", is(EXAMPLE_MONITOR_JOB_DEFINITION_ID)) .body("batchJobDefinitionId", is(EXAMPLE_BATCH_JOB_DEFINITION_ID)) .body("tenantId", is(EXAMPLE_TENANT_ID)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)); verifyMigrationPlanAsyncExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanAsyncSkipListeners() { Batch batchMock = createMockBatch(); when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID) .skipCustomListeners(true) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); verifyMigrationPlanAsyncExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanAsyncSkipIoMappings() { Batch batchMock = createMockBatch(); when(migrationPlanExecutionBuilderMock.executeAsync()).thenReturn(batchMock); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID) .skipIoMappings(true) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.OK.getStatusCode()) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); verifyMigrationPlanAsyncExecutionInteraction(migrationExecution); } @Test public void executeMigrationPlanAsyncWithNullInstructions() { MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class); when(instructionReport.getMigrationInstruction()).thenReturn(null); when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(1)) .body("validationReport.instructionReports[0].instruction", nullValue()) .body("validationReport.instructionReports[0].failures", hasSize(1)) .body("validationReport.instructionReports[0].failures[0]", is("failure")) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithEmptyInstructions() { MigrationInstructionValidationReport instructionReport = mock(MigrationInstructionValidationReport.class); when(instructionReport.getMigrationInstruction()).thenReturn(null); when(instructionReport.getFailures()).thenReturn(Collections.singletonList("failure")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Collections.singletonList(instructionReport)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); ((Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)) .put(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS, Collections.<MigrationInstructionDto>emptyList()); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(1)) .body("validationReport.instructionReports[0].instruction", nullValue()) .body("validationReport.instructionReports[0].failures", hasSize(1)) .body("validationReport.instructionReports[0].failures[0]", is("failure")) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNullSourceProcessDefinitionId() { String message = "source process definition id is null"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(isNull(String.class), anyString())) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(null, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNonExistingSourceProcessDefinitionId() { String message = "source process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(eq(NON_EXISTING_PROCESS_DEFINITION_ID), anyString())) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(NON_EXISTING_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNullTargetProcessDefinitionId() { String message = "target process definition id is null"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(anyString(), isNull(String.class))) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, null) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNonExistingTargetProcessDefinitionId() { String message = "target process definition with id " + NON_EXISTING_PROCESS_DEFINITION_ID + " does not exist"; JoinedMigrationPlanBuilderMock migrationPlanBuilder = mock(JoinedMigrationPlanBuilderMock.class, new FluentAnswer()); when(runtimeServiceMock.createMigrationPlan(anyString(), eq(NON_EXISTING_PROCESS_DEFINITION_ID))) .thenReturn(migrationPlanBuilder); when(migrationPlanBuilder.build()).thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, NON_EXISTING_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNullSourceActivityId() { String message = "sourceActivityId is null"; when(migrationPlanBuilderMock.mapActivities(isNull(String.class), anyString())) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(null, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNonExistingSourceActivityId() { String message = "sourceActivity is null"; when(migrationPlanBuilderMock.mapActivities(eq(NON_EXISTING_ACTIVITY_ID), anyString())) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(NON_EXISTING_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNullTargetActivityId() { String message = "targetActivityId is null"; when(migrationPlanBuilderMock.mapActivities(anyString(), isNull(String.class))) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, null) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanAsyncWithNonExistingTargetActivityId() { String message = "targetActivity is null"; when(migrationPlanBuilderMock.mapActivities(anyString(), eq(NON_EXISTING_ACTIVITY_ID))) .thenThrow(new BadUserRequestException(message)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, NON_EXISTING_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("message", is(message)) .when() .post(EXECUTE_MIGRATION_URL); } @Test public void executeMigrationPlanAsyncValidationException() { MigrationInstruction migrationInstruction = mock(MigrationInstruction.class); when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID); MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class); when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class); when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.BAD_REQUEST.getStatusCode()) .body("type", equalTo(MigrationPlanValidationException.class.getSimpleName())) .body("message", is("fooo")) .body("validationReport.instructionReports", hasSize(2)) .body("validationReport.instructionReports[0].instruction.sourceActivityIds", hasSize(1)) .body("validationReport.instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID)) .body("validationReport.instructionReports[0].instruction.targetActivityIds", hasSize(1)) .body("validationReport.instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("validationReport.instructionReports[0].failures", hasSize(2)) .body("validationReport.instructionReports[0].failures[0]", is("failure1")) .body("validationReport.instructionReports[0].failures[1]", is("failure2")) .when() .post(EXECUTE_MIGRATION_ASYNC_URL); } @Test public void executeMigrationPlanUpdateEventTrigger() { Map<String, Object> migrationExecution = new MigrationExecutionDtoBuilder() .migrationPlan(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID, true) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, false) .done() .processInstances(EXAMPLE_PROCESS_INSTANCE_ID, ANOTHER_EXAMPLE_PROCESS_INSTANCE_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationExecution) .then().expect() .statusCode(Status.NO_CONTENT.getStatusCode()) .when() .post(EXECUTE_MIGRATION_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, (Map<String, Object>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_MIGRATION_PLAN)); verifyMigrationPlanExecutionInteraction(migrationExecution); } @Test public void validateMigrationPlan() { Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .instruction(ANOTHER_EXAMPLE_ACTIVITY_ID, EXAMPLE_ACTIVITY_ID, true) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationPlan) .then().expect() .statusCode(Status.OK.getStatusCode()) .body("instructionReports", hasSize(0)) .when() .post(VALIDATE_MIGRATION_URL); verifyCreateMigrationPlanInteraction(migrationPlanBuilderMock, migrationPlan); } @Test public void validateMigrationPlanValidationException() { MigrationInstruction migrationInstruction = mock(MigrationInstruction.class); when(migrationInstruction.getSourceActivityId()).thenReturn(EXAMPLE_ACTIVITY_ID); when(migrationInstruction.getTargetActivityId()).thenReturn(ANOTHER_EXAMPLE_ACTIVITY_ID); MigrationInstructionValidationReport instructionReport1 = mock(MigrationInstructionValidationReport.class); when(instructionReport1.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport1.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationInstructionValidationReport instructionReport2 = mock(MigrationInstructionValidationReport.class); when(instructionReport2.getMigrationInstruction()).thenReturn(migrationInstruction); when(instructionReport2.getFailures()).thenReturn(Arrays.asList("failure1", "failure2")); MigrationPlanValidationReport validationReport = mock(MigrationPlanValidationReport.class); when(validationReport.getInstructionReports()).thenReturn(Arrays.asList(instructionReport1, instructionReport2)); when(migrationPlanBuilderMock.build()).thenThrow(new MigrationPlanValidationException("fooo", validationReport)); Map<String, Object> migrationPlan = new MigrationPlanDtoBuilder(EXAMPLE_PROCESS_DEFINITION_ID, ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID) .instruction(EXAMPLE_ACTIVITY_ID, ANOTHER_EXAMPLE_ACTIVITY_ID) .build(); given() .contentType(POST_JSON_CONTENT_TYPE) .body(migrationPlan) .then().expect() .statusCode(Status.OK.getStatusCode()) .body("instructionReports", hasSize(2)) .body("instructionReports[0].instruction.sourceActivityIds", hasSize(1)) .body("instructionReports[0].instruction.sourceActivityIds[0]", is(EXAMPLE_ACTIVITY_ID)) .body("instructionReports[0].instruction.targetActivityIds", hasSize(1)) .body("instructionReports[0].instruction.targetActivityIds[0]", is(ANOTHER_EXAMPLE_ACTIVITY_ID)) .body("instructionReports[0].failures", hasSize(2)) .body("instructionReports[0].failures[0]", is("failure1")) .body("instructionReports[0].failures[1]", is("failure2")) .when() .post(VALIDATE_MIGRATION_URL); } protected void verifyGenerateMigrationPlanResponse(Response response) { String responseContent = response.asString(); String sourceProcessDefinitionId = from(responseContent).getString("sourceProcessDefinitionId"); String targetProcessDefinitionId = from(responseContent).getString("targetProcessDefinitionId"); List<Map<String, Object>> instructions = from(responseContent).getList("instructions"); assertThat(sourceProcessDefinitionId).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID); assertThat(targetProcessDefinitionId).isEqualTo(ANOTHER_EXAMPLE_PROCESS_DEFINITION_ID); assertThat(instructions).hasSize(2); assertThat(instructions.get(0)) .containsEntry("sourceActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)) .containsEntry("targetActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)) .containsEntry("updateEventTrigger", false); assertThat(instructions.get(1)) .containsEntry("sourceActivityIds", Collections.singletonList(ANOTHER_EXAMPLE_ACTIVITY_ID)) .containsEntry("targetActivityIds", Collections.singletonList(EXAMPLE_ACTIVITY_ID)) .containsEntry("updateEventTrigger", false); } protected void verifyGenerateMigrationPlanInteraction(MigrationPlanBuilder migrationPlanBuilderMock, Map<String, Object> initialMigrationPlan) { verify(runtimeServiceMock).createMigrationPlan(eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString()), eq(initialMigrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString())); // the map equal activities method should be called verify(migrationPlanBuilderMock).mapEqualActivities(); // other instructions are ignored verify(migrationPlanBuilderMock, never()).mapActivities(anyString(), anyString()); } protected void verifyCreateMigrationPlanInteraction(JoinedMigrationPlanBuilderMock migrationPlanBuilderMock, Map<String, Object> migrationPlan) { verify(runtimeServiceMock).createMigrationPlan(migrationPlan.get(MigrationPlanDtoBuilder.PROP_SOURCE_PROCESS_DEFINITION_ID).toString(), migrationPlan.get(MigrationPlanDtoBuilder.PROP_TARGET_PROCESS_DEFINITION_ID).toString()); // the map equal activities method should not be called verify(migrationPlanBuilderMock, never()).mapEqualActivities(); // all instructions are added List<Map<String, Object>> instructions = (List<Map<String, Object>>) migrationPlan.get(MigrationPlanDtoBuilder.PROP_INSTRUCTIONS); if (instructions != null) { for (Map<String, Object> migrationInstructionDto : instructions) { InOrder inOrder = Mockito.inOrder(migrationPlanBuilderMock); String sourceActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_SOURCE_ACTIVITY_IDS)).get(0); String targetActivityId = ((List<String>) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_TARGET_ACTIVITY_IDS)).get(0); inOrder.verify(migrationPlanBuilderMock).mapActivities(eq(sourceActivityId), eq(targetActivityId)); Boolean updateEventTrigger = (Boolean) migrationInstructionDto.get(MigrationInstructionDtoBuilder.PROP_UPDATE_EVENT_TRIGGER); if (Boolean.TRUE.equals(updateEventTrigger)) { inOrder.verify(migrationPlanBuilderMock, immediatelyAfter()).updateEventTrigger(); } } } } protected void verifyMigrationPlanExecutionInteraction(Map<String, Object> migrationExecution) { InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock); inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class)); verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution); inOrder.verify(migrationPlanExecutionBuilderMock).execute(); inOrder.verifyNoMoreInteractions(); } protected void verifyMigrationPlanAsyncExecutionInteraction(Map<String, Object> migrationExecution) { InOrder inOrder = inOrder(runtimeServiceMock, migrationPlanExecutionBuilderMock); inOrder.verify(runtimeServiceMock).newMigration(any(MigrationPlan.class)); verifyMigrationExecutionBuilderInteraction(inOrder, migrationExecution); inOrder.verify(migrationPlanExecutionBuilderMock).executeAsync(); Mockito.verifyNoMoreInteractions(migrationPlanExecutionBuilderMock); } protected void verifyMigrationExecutionBuilderInteraction(InOrder inOrder, Map<String, Object> migrationExecution) { List<String> processInstanceIds = ((List<String>) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_IDS)); inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceIds(eq(processInstanceIds)); ProcessInstanceQueryDto processInstanceQuery = (ProcessInstanceQueryDto) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_PROCESS_INSTANCE_QUERY); if (processInstanceQuery != null) { verifyMigrationPlanExecutionProcessInstanceQuery(inOrder); } Boolean skipCustomListeners = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_CUSTOM_LISTENERS); if (Boolean.TRUE.equals(skipCustomListeners)) { inOrder.verify(migrationPlanExecutionBuilderMock).skipCustomListeners(); } Boolean skipIoMappings = (Boolean) migrationExecution.get(MigrationExecutionDtoBuilder.PROP_SKIP_IO_MAPPINGS); if (Boolean.TRUE.equals(skipIoMappings)) { inOrder.verify(migrationPlanExecutionBuilderMock).skipIoMappings(); } } protected void verifyMigrationPlanExecutionProcessInstanceQuery(InOrder inOrder) { ArgumentCaptor<ProcessInstanceQuery> queryCapture = ArgumentCaptor.forClass(ProcessInstanceQuery.class); inOrder.verify(migrationPlanExecutionBuilderMock).processInstanceQuery(queryCapture.capture()); ProcessInstanceQueryImpl actualQuery = (ProcessInstanceQueryImpl) queryCapture.getValue(); assertThat(actualQuery).isNotNull(); assertThat(actualQuery.getProcessDefinitionId()).isEqualTo(EXAMPLE_PROCESS_DEFINITION_ID); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.api.runtime; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.executionByProcessDefinitionId; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.executionByProcessDefinitionKey; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.executionByProcessInstanceId; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.hierarchical; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.inverted; import static org.camunda.bpm.engine.test.api.runtime.TestOrderingUtil.verifySorting; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.camunda.bpm.engine.ProcessEngineException; import org.camunda.bpm.engine.impl.persistence.entity.ExecutionEntity; import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase; import org.camunda.bpm.engine.repository.ProcessDefinitionQuery; import org.camunda.bpm.engine.runtime.EventSubscription; import org.camunda.bpm.engine.runtime.Execution; import org.camunda.bpm.engine.runtime.ExecutionQuery; import org.camunda.bpm.engine.runtime.Incident; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.variable.Variables; import org.junit.Assert; /** * @author Joram Barrez * @author Frederik Heremans */ public class ExecutionQueryTest extends PluggableProcessEngineTestCase { private static String CONCURRENT_PROCESS_KEY = "concurrent"; private static String SEQUENTIAL_PROCESS_KEY = "oneTaskProcess"; private List<String> concurrentProcessInstanceIds; private List<String> sequentialProcessInstanceIds; protected void setUp() throws Exception { super.setUp(); repositoryService.createDeployment() .addClasspathResource("org/camunda/bpm/engine/test/api/runtime/oneTaskProcess.bpmn20.xml") .addClasspathResource("org/camunda/bpm/engine/test/api/runtime/concurrentExecution.bpmn20.xml") .deploy(); concurrentProcessInstanceIds = new ArrayList<String>(); sequentialProcessInstanceIds = new ArrayList<String>(); for (int i = 0; i < 4; i++) { concurrentProcessInstanceIds.add(runtimeService.startProcessInstanceByKey(CONCURRENT_PROCESS_KEY, "BUSINESS-KEY-" + i).getId()); } sequentialProcessInstanceIds.add(runtimeService.startProcessInstanceByKey(SEQUENTIAL_PROCESS_KEY).getId()); } protected void tearDown() throws Exception { for (org.camunda.bpm.engine.repository.Deployment deployment : repositoryService.createDeploymentQuery().list()) { repositoryService.deleteDeployment(deployment.getId(), true); } super.tearDown(); } public void testQueryByProcessDefinitionKey() { // Concurrent process with 3 executions for each process instance assertEquals(12, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).list().size()); assertEquals(1, runtimeService.createExecutionQuery().processDefinitionKey(SEQUENTIAL_PROCESS_KEY).list().size()); } public void testQueryByInvalidProcessDefinitionKey() { ExecutionQuery query = runtimeService.createExecutionQuery().processDefinitionKey("invalid"); assertNull(query.singleResult()); assertEquals(0, query.list().size()); assertEquals(0, query.count()); } public void testQueryByProcessInstanceId() { for (String processInstanceId : concurrentProcessInstanceIds) { ExecutionQuery query = runtimeService.createExecutionQuery().processInstanceId(processInstanceId); assertEquals(3, query.list().size()); assertEquals(3, query.count()); } assertEquals(1, runtimeService.createExecutionQuery().processInstanceId(sequentialProcessInstanceIds.get(0)).list().size()); } public void testQueryByInvalidProcessInstanceId() { ExecutionQuery query = runtimeService.createExecutionQuery().processInstanceId("invalid"); assertNull(query.singleResult()); assertEquals(0, query.list().size()); assertEquals(0, query.count()); } public void testQueryExecutionId() { Execution execution = runtimeService.createExecutionQuery().processDefinitionKey(SEQUENTIAL_PROCESS_KEY).singleResult(); assertNotNull(runtimeService.createExecutionQuery().executionId(execution.getId())); } public void testQueryByInvalidExecutionId() { ExecutionQuery query = runtimeService.createExecutionQuery().executionId("invalid"); assertNull(query.singleResult()); assertEquals(0, query.list().size()); assertEquals(0, query.count()); } public void testQueryByActivityId() { ExecutionQuery query = runtimeService.createExecutionQuery().activityId("receivePayment"); assertEquals(4, query.list().size()); assertEquals(4, query.count()); try { assertNull(query.singleResult()); fail(); } catch (ProcessEngineException e) { } } public void testQueryByInvalidActivityId() { ExecutionQuery query = runtimeService.createExecutionQuery().activityId("invalid"); assertNull(query.singleResult()); assertEquals(0, query.list().size()); assertEquals(0, query.count()); } public void testQueryPaging() { assertEquals(13, runtimeService.createExecutionQuery().count()); assertEquals(4, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).listPage(0, 4).size()); assertEquals(1, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).listPage(2, 1).size()); assertEquals(10, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).listPage(1, 10).size()); assertEquals(12, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).listPage(0, 20).size()); } @SuppressWarnings("unchecked") public void testQuerySorting() { // 13 executions: 3 for each concurrent, 1 for the sequential List<Execution> executions = runtimeService.createExecutionQuery().orderByProcessInstanceId().asc().list(); assertEquals(13, executions.size()); verifySorting(executions, executionByProcessInstanceId()); executions = runtimeService.createExecutionQuery().orderByProcessDefinitionId().asc().list(); assertEquals(13, executions.size()); verifySorting(executions, executionByProcessDefinitionId(processEngine)); executions = runtimeService.createExecutionQuery().orderByProcessDefinitionKey().asc().list(); assertEquals(13, executions.size()); verifySorting(executions, executionByProcessDefinitionKey(processEngine)); executions = runtimeService.createExecutionQuery().orderByProcessInstanceId().desc().list(); assertEquals(13, executions.size()); verifySorting(executions, inverted(executionByProcessInstanceId())); executions = runtimeService.createExecutionQuery().orderByProcessDefinitionId().desc().list(); assertEquals(13, executions.size()); verifySorting(executions, inverted(executionByProcessDefinitionId(processEngine))); executions = runtimeService.createExecutionQuery().orderByProcessDefinitionKey().desc().list(); assertEquals(13, executions.size()); verifySorting(executions, inverted(executionByProcessDefinitionKey(processEngine))); executions = runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).orderByProcessDefinitionId().asc().list(); assertEquals(12, executions.size()); verifySorting(executions, executionByProcessDefinitionId(processEngine)); executions = runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).orderByProcessDefinitionId().desc().list(); assertEquals(12, executions.size()); verifySorting(executions, executionByProcessDefinitionId(processEngine)); executions = runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).orderByProcessDefinitionKey().asc() .orderByProcessInstanceId().desc().list(); assertEquals(12, executions.size()); verifySorting(executions, hierarchical(executionByProcessDefinitionKey(processEngine), inverted(executionByProcessInstanceId()))); } public void testQueryInvalidSorting() { try { runtimeService.createExecutionQuery().orderByProcessDefinitionKey().list(); fail(); } catch (ProcessEngineException e) { } } public void testQueryByBusinessKey() { assertEquals(3, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).processInstanceBusinessKey("BUSINESS-KEY-1").list().size()); assertEquals(3, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).processInstanceBusinessKey("BUSINESS-KEY-2").list().size()); assertEquals(0, runtimeService.createExecutionQuery().processDefinitionKey(CONCURRENT_PROCESS_KEY).processInstanceBusinessKey("NON-EXISTING").list().size()); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryStringVariable() { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("stringVar", "abcdef"); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("stringVar", "abcdef"); vars.put("stringVar2", "ghijkl"); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("stringVar", "azerty"); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Test EQUAL on single string variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("stringVar", "abcdef"); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Test EQUAL on two string variables, should result in single match query = runtimeService.createExecutionQuery().variableValueEquals("stringVar", "abcdef").variableValueEquals("stringVar2", "ghijkl"); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Test NOT_EQUAL, should return only 1 execution execution = runtimeService.createExecutionQuery().variableValueNotEquals("stringVar", "abcdef").singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN, should return only matching 'azerty' execution = runtimeService.createExecutionQuery().variableValueGreaterThan("stringVar", "abcdef").singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThan("stringVar", "z").singleResult(); Assert.assertNull(execution); // Test GREATER_THAN_OR_EQUAL, should return 3 results assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("stringVar", "abcdef").count()); assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("stringVar", "z").count()); // Test LESS_THAN, should return 2 results executions = runtimeService.createExecutionQuery().variableValueLessThan("stringVar", "abcdeg").list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("stringVar", "abcdef").count()); assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("stringVar", "z").count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("stringVar", "abcdef").list(); Assert.assertEquals(2, executions.size()); expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("stringVar", "z").count()); assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("stringVar", "aa").count()); // Test LIKE execution = runtimeService.createExecutionQuery().variableValueLike("stringVar", "azert%").singleResult(); assertNotNull(execution); assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueLike("stringVar", "%y").singleResult(); assertNotNull(execution); assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueLike("stringVar", "%zer%").singleResult(); assertNotNull(execution); assertEquals(processInstance3.getId(), execution.getId()); assertEquals(3, runtimeService.createExecutionQuery().variableValueLike("stringVar", "a%").count()); assertEquals(0, runtimeService.createExecutionQuery().variableValueLike("stringVar", "%x%").count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryLongVariable() { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("longVar", 12345L); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("longVar", 12345L); vars.put("longVar2", 67890L); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("longVar", 55555L); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Query on single long variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("longVar", 12345L); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Query on two long variables, should result in single match query = runtimeService.createExecutionQuery().variableValueEquals("longVar", 12345L).variableValueEquals("longVar2", 67890L); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Query with unexisting variable value execution = runtimeService.createExecutionQuery().variableValueEquals("longVar", 999L).singleResult(); Assert.assertNull(execution); // Test NOT_EQUALS execution = runtimeService.createExecutionQuery().variableValueNotEquals("longVar", 12345L).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN execution = runtimeService.createExecutionQuery().variableValueGreaterThan("longVar", 44444L).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThan("longVar", 55555L).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThan("longVar",1L).count()); // Test GREATER_THAN_OR_EQUAL execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("longVar", 44444L).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("longVar", 55555L).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("longVar",1L).count()); // Test LESS_THAN executions = runtimeService.createExecutionQuery().variableValueLessThan("longVar", 55555L).list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("longVar", 12345L).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThan("longVar",66666L).count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("longVar", 55555L).list(); Assert.assertEquals(3, executions.size()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("longVar", 12344L).count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryDoubleVariable() { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("doubleVar", 12345.6789); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("doubleVar", 12345.6789); vars.put("doubleVar2", 9876.54321); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("doubleVar", 55555.5555); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Query on single double variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("doubleVar", 12345.6789); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Query on two double variables, should result in single value query = runtimeService.createExecutionQuery().variableValueEquals("doubleVar", 12345.6789).variableValueEquals("doubleVar2", 9876.54321); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Query with unexisting variable value execution = runtimeService.createExecutionQuery().variableValueEquals("doubleVar", 9999.99).singleResult(); Assert.assertNull(execution); // Test NOT_EQUALS execution = runtimeService.createExecutionQuery().variableValueNotEquals("doubleVar", 12345.6789).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN execution = runtimeService.createExecutionQuery().variableValueGreaterThan("doubleVar", 44444.4444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThan("doubleVar", 55555.5555).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThan("doubleVar",1.234).count()); // Test GREATER_THAN_OR_EQUAL execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("doubleVar", 44444.4444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("doubleVar", 55555.5555).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("doubleVar",1.234).count()); // Test LESS_THAN executions = runtimeService.createExecutionQuery().variableValueLessThan("doubleVar", 55555.5555).list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("doubleVar", 12345.6789).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThan("doubleVar",66666.6666).count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("doubleVar", 55555.5555).list(); Assert.assertEquals(3, executions.size()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("doubleVar", 12344.6789).count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryIntegerVariable() { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("integerVar", 12345); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("integerVar", 12345); vars.put("integerVar2", 67890); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("integerVar", 55555); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Query on single integer variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("integerVar", 12345); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Query on two integer variables, should result in single value query = runtimeService.createExecutionQuery().variableValueEquals("integerVar", 12345).variableValueEquals("integerVar2", 67890); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Query with unexisting variable value execution = runtimeService.createExecutionQuery().variableValueEquals("integerVar", 9999).singleResult(); Assert.assertNull(execution); // Test NOT_EQUALS execution = runtimeService.createExecutionQuery().variableValueNotEquals("integerVar", 12345).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN execution = runtimeService.createExecutionQuery().variableValueGreaterThan("integerVar", 44444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThan("integerVar", 55555).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThan("integerVar",1).count()); // Test GREATER_THAN_OR_EQUAL execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("integerVar", 44444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("integerVar", 55555).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("integerVar",1).count()); // Test LESS_THAN executions = runtimeService.createExecutionQuery().variableValueLessThan("integerVar", 55555).list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("integerVar", 12345).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThan("integerVar",66666).count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("integerVar", 55555).list(); Assert.assertEquals(3, executions.size()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("integerVar", 12344).count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryShortVariable() { Map<String, Object> vars = new HashMap<String, Object>(); short shortVar = 1234; vars.put("shortVar", shortVar); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); short shortVar2 = 6789; vars = new HashMap<String, Object>(); vars.put("shortVar", shortVar); vars.put("shortVar2", shortVar2); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("shortVar", (short)5555); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Query on single short variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("shortVar", shortVar); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Query on two short variables, should result in single value query = runtimeService.createExecutionQuery().variableValueEquals("shortVar", shortVar).variableValueEquals("shortVar2", shortVar2); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Query with unexisting variable value short unexistingValue = (short)9999; execution = runtimeService.createExecutionQuery().variableValueEquals("shortVar", unexistingValue).singleResult(); Assert.assertNull(execution); // Test NOT_EQUALS execution = runtimeService.createExecutionQuery().variableValueNotEquals("shortVar", (short)1234).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN execution = runtimeService.createExecutionQuery().variableValueGreaterThan("shortVar", (short)4444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThan("shortVar", (short)5555).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThan("shortVar",(short)1).count()); // Test GREATER_THAN_OR_EQUAL execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("shortVar", (short)4444).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("shortVar", (short)5555).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("shortVar",(short)1).count()); // Test LESS_THAN executions = runtimeService.createExecutionQuery().variableValueLessThan("shortVar", (short)5555).list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("shortVar", (short)1234).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThan("shortVar",(short)6666).count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("shortVar", (short)5555).list(); Assert.assertEquals(3, executions.size()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("shortVar", (short)1233).count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryDateVariable() throws Exception { Map<String, Object> vars = new HashMap<String, Object>(); Date date1 = Calendar.getInstance().getTime(); vars.put("dateVar", date1); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); Date date2 = Calendar.getInstance().getTime(); vars = new HashMap<String, Object>(); vars.put("dateVar", date1); vars.put("dateVar2", date2); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); Calendar nextYear = Calendar.getInstance(); nextYear.add(Calendar.YEAR, 1); vars = new HashMap<String, Object>(); vars.put("dateVar",nextYear.getTime()); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); Calendar nextMonth = Calendar.getInstance(); nextMonth.add(Calendar.MONTH, 1); Calendar twoYearsLater = Calendar.getInstance(); twoYearsLater.add(Calendar.YEAR, 2); Calendar oneYearAgo = Calendar.getInstance(); oneYearAgo.add(Calendar.YEAR, -1); // Query on single short variable, should result in 2 matches ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("dateVar", date1); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(2, executions.size()); // Query on two short variables, should result in single value query = runtimeService.createExecutionQuery().variableValueEquals("dateVar", date1).variableValueEquals("dateVar2", date2); Execution execution = query.singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance2.getId(), execution.getId()); // Query with unexisting variable value Date unexistingDate = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss").parse("01/01/1989 12:00:00"); execution = runtimeService.createExecutionQuery().variableValueEquals("dateVar", unexistingDate).singleResult(); Assert.assertNull(execution); // Test NOT_EQUALS execution = runtimeService.createExecutionQuery().variableValueNotEquals("dateVar", date1).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); // Test GREATER_THAN execution = runtimeService.createExecutionQuery().variableValueGreaterThan("dateVar", nextMonth.getTime()).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueGreaterThan("dateVar", nextYear.getTime()).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThan("dateVar", oneYearAgo.getTime()).count()); // Test GREATER_THAN_OR_EQUAL execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("dateVar", nextMonth.getTime()).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); execution = runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("dateVar", nextYear.getTime()).singleResult(); Assert.assertNotNull(execution); Assert.assertEquals(processInstance3.getId(), execution.getId()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("dateVar",oneYearAgo.getTime()).count()); // Test LESS_THAN executions = runtimeService.createExecutionQuery().variableValueLessThan("dateVar", nextYear.getTime()).list(); Assert.assertEquals(2, executions.size()); List<String> expecedIds = Arrays.asList(processInstance1.getId(), processInstance2.getId()); List<String> ids = new ArrayList<String>(Arrays.asList(executions.get(0).getId(), executions.get(1).getId())); ids.removeAll(expecedIds); assertTrue(ids.isEmpty()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThan("dateVar", date1).count()); Assert.assertEquals(3, runtimeService.createExecutionQuery().variableValueLessThan("dateVar", twoYearsLater.getTime()).count()); // Test LESS_THAN_OR_EQUAL executions = runtimeService.createExecutionQuery().variableValueLessThanOrEqual("dateVar", nextYear.getTime()).list(); Assert.assertEquals(3, executions.size()); Assert.assertEquals(0, runtimeService.createExecutionQuery().variableValueLessThanOrEqual("dateVar", oneYearAgo.getTime()).count()); runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testBooleanVariable() throws Exception { // TEST EQUALS HashMap<String, Object> vars = new HashMap<String, Object>(); vars.put("booleanVar", true); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("booleanVar", false); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); List<ProcessInstance> instances = runtimeService.createProcessInstanceQuery().variableValueEquals("booleanVar", true).list(); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(processInstance1.getId(), instances.get(0).getId()); instances = runtimeService.createProcessInstanceQuery().variableValueEquals("booleanVar", false).list(); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(processInstance2.getId(), instances.get(0).getId()); // TEST NOT_EQUALS instances = runtimeService.createProcessInstanceQuery().variableValueNotEquals("booleanVar", true).list(); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(processInstance2.getId(), instances.get(0).getId()); instances = runtimeService.createProcessInstanceQuery().variableValueNotEquals("booleanVar", false).list(); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(processInstance1.getId(), instances.get(0).getId()); // Test unsupported operations try { runtimeService.createProcessInstanceQuery().variableValueGreaterThan("booleanVar", true); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'greater than' condition", ae.getMessage()); } try { runtimeService.createProcessInstanceQuery().variableValueGreaterThanOrEqual("booleanVar", true); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'greater than or equal' condition", ae.getMessage()); } try { runtimeService.createProcessInstanceQuery().variableValueLessThan("booleanVar", true); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'less than' condition", ae.getMessage()); } try { runtimeService.createProcessInstanceQuery().variableValueLessThanOrEqual("booleanVar", true); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'less than or equal' condition", ae.getMessage()); } runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryVariablesUpdatedToNullValue() { // Start process instance with different types of variables Map<String, Object> variables = new HashMap<String, Object>(); variables.put("longVar", 928374L); variables.put("shortVar", (short) 123); variables.put("integerVar", 1234); variables.put("stringVar", "coca-cola"); variables.put("booleanVar", true); variables.put("dateVar", new Date()); variables.put("nullVar", null); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", variables); ExecutionQuery query = runtimeService.createExecutionQuery() .variableValueEquals("longVar", null) .variableValueEquals("shortVar", null) .variableValueEquals("integerVar", null) .variableValueEquals("stringVar", null) .variableValueEquals("booleanVar", null) .variableValueEquals("dateVar", null); ExecutionQuery notQuery = runtimeService.createExecutionQuery() .variableValueNotEquals("longVar", null) .variableValueNotEquals("shortVar", null) .variableValueNotEquals("integerVar", null) .variableValueNotEquals("stringVar", null) .variableValueNotEquals("booleanVar", null) .variableValueNotEquals("dateVar", null); assertNull(query.singleResult()); assertNotNull(notQuery.singleResult()); // Set all existing variables values to null runtimeService.setVariable(processInstance.getId(), "longVar", null); runtimeService.setVariable(processInstance.getId(), "shortVar", null); runtimeService.setVariable(processInstance.getId(), "integerVar", null); runtimeService.setVariable(processInstance.getId(), "stringVar", null); runtimeService.setVariable(processInstance.getId(), "booleanVar", null); runtimeService.setVariable(processInstance.getId(), "dateVar", null); runtimeService.setVariable(processInstance.getId(), "nullVar", null); Execution queryResult = query.singleResult(); assertNotNull(queryResult); assertEquals(processInstance.getId(), queryResult.getId()); assertNull(notQuery.singleResult()); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryNullVariable() throws Exception { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("nullVar", null); ProcessInstance processInstance1 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("nullVar", "notnull"); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("nullVarLong", "notnull"); ProcessInstance processInstance3 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("nullVarDouble", "notnull"); ProcessInstance processInstance4 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); vars = new HashMap<String, Object>(); vars.put("nullVarByte", "testbytes".getBytes()); ProcessInstance processInstance5 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); // Query on null value, should return one value ExecutionQuery query = runtimeService.createExecutionQuery().variableValueEquals("nullVar", null); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(1, executions.size()); Assert.assertEquals(processInstance1.getId(), executions.get(0).getId()); // Test NOT_EQUALS null Assert.assertEquals(1, runtimeService.createExecutionQuery().variableValueNotEquals("nullVar", null).count()); Assert.assertEquals(1, runtimeService.createExecutionQuery().variableValueNotEquals("nullVarLong", null).count()); Assert.assertEquals(1, runtimeService.createExecutionQuery().variableValueNotEquals("nullVarDouble", null).count()); // When a byte-array refrence is present, the variable is not considered null Assert.assertEquals(1, runtimeService.createExecutionQuery().variableValueNotEquals("nullVarByte", null).count()); // All other variable queries with null should throw exception try { runtimeService.createExecutionQuery().variableValueGreaterThan("nullVar", null); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'greater than' condition", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual("nullVar", null); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'greater than or equal' condition", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLessThan("nullVar", null); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'less than' condition", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLessThanOrEqual("nullVar", null); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'less than or equal' condition", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLike("nullVar", null); fail("Excetion expected"); } catch(ProcessEngineException ae) { assertTextPresent("Booleans and null cannot be used in 'like' condition", ae.getMessage()); } runtimeService.deleteProcessInstance(processInstance1.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); runtimeService.deleteProcessInstance(processInstance3.getId(), "test"); runtimeService.deleteProcessInstance(processInstance4.getId(), "test"); runtimeService.deleteProcessInstance(processInstance5.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryInvalidTypes() throws Exception { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("bytesVar", "test".getBytes()); vars.put("serializableVar",new DummySerializable()); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); try { runtimeService.createExecutionQuery() .variableValueEquals("bytesVar", "test".getBytes()) .list(); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("Variables of type ByteArray cannot be used to query", ae.getMessage()); } try { runtimeService.createExecutionQuery() .variableValueEquals("serializableVar", new DummySerializable()) .list(); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("Object values cannot be used to query", ae.getMessage()); } runtimeService.deleteProcessInstance(processInstance.getId(), "test"); } public void testQueryVariablesNullNameArgument() { try { runtimeService.createExecutionQuery().variableValueEquals(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueNotEquals(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueGreaterThan(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueGreaterThanOrEqual(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLessThan(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLessThanOrEqual(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } try { runtimeService.createExecutionQuery().variableValueLike(null, "value"); fail("Expected exception"); } catch(ProcessEngineException ae) { assertTextPresent("name is null", ae.getMessage()); } } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testQueryAllVariableTypes() throws Exception { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("nullVar", null); vars.put("stringVar", "string"); vars.put("longVar", 10L); vars.put("doubleVar", 1.2); vars.put("integerVar", 1234); vars.put("booleanVar", true); vars.put("shortVar", (short) 123); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); ExecutionQuery query = runtimeService.createExecutionQuery() .variableValueEquals("nullVar", null) .variableValueEquals("stringVar", "string") .variableValueEquals("longVar", 10L) .variableValueEquals("doubleVar", 1.2) .variableValueEquals("integerVar", 1234) .variableValueEquals("booleanVar", true) .variableValueEquals("shortVar", (short) 123); List<Execution> executions = query.list(); Assert.assertNotNull(executions); Assert.assertEquals(1, executions.size()); Assert.assertEquals(processInstance.getId(), executions.get(0).getId()); runtimeService.deleteProcessInstance(processInstance.getId(), "test"); } @Deployment(resources={ "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml"}) public void testClashingValues() throws Exception { Map<String, Object> vars = new HashMap<String, Object>(); vars.put("var", 1234L); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars); Map<String, Object> vars2 = new HashMap<String, Object>(); vars2.put("var", 1234); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", vars2); List<Execution> executions = runtimeService.createExecutionQuery() .processDefinitionKey("oneTaskProcess") .variableValueEquals("var", 1234L) .list(); assertEquals(1, executions.size()); assertEquals(processInstance.getId(), executions.get(0).getProcessInstanceId()); runtimeService.deleteProcessInstance(processInstance.getId(), "test"); runtimeService.deleteProcessInstance(processInstance2.getId(), "test"); } @Deployment public void testQueryBySignalSubscriptionName() { runtimeService.startProcessInstanceByKey("catchSignal"); // it finds subscribed instances Execution execution = runtimeService.createExecutionQuery() .signalEventSubscription("alert") .singleResult(); assertNotNull(execution); // test query for nonexisting subscription execution = runtimeService.createExecutionQuery() .signalEventSubscription("nonExisitng") .singleResult(); assertNull(execution); // it finds more than one runtimeService.startProcessInstanceByKey("catchSignal"); assertEquals(2, runtimeService.createExecutionQuery().signalEventSubscription("alert").count()); } @Deployment public void testQueryBySignalSubscriptionNameBoundary() { runtimeService.startProcessInstanceByKey("signalProces"); // it finds subscribed instances Execution execution = runtimeService.createExecutionQuery() .signalEventSubscription("Test signal") .singleResult(); assertNotNull(execution); // test query for nonexisting subscription execution = runtimeService.createExecutionQuery() .signalEventSubscription("nonExisitng") .singleResult(); assertNull(execution); // it finds more than one runtimeService.startProcessInstanceByKey("signalProces"); assertEquals(2, runtimeService.createExecutionQuery().signalEventSubscription("Test signal").count()); } public void testNativeQuery() { // just test that the query will be constructed and executed, details are tested in the TaskQueryTest assertEquals("ACT_RU_EXECUTION", managementService.getTableName(Execution.class)); long executionCount = runtimeService.createExecutionQuery().count(); assertEquals(executionCount, runtimeService.createNativeExecutionQuery().sql("SELECT * FROM " + managementService.getTableName(Execution.class)).list().size()); assertEquals(executionCount, runtimeService.createNativeExecutionQuery().sql("SELECT count(*) FROM " + managementService.getTableName(Execution.class)).count()); } public void testNativeQueryPaging() { assertEquals(5, runtimeService.createNativeExecutionQuery().sql("SELECT * FROM " + managementService.getTableName(Execution.class)).listPage(1, 5).size()); assertEquals(1, runtimeService.createNativeExecutionQuery().sql("SELECT * FROM " + managementService.getTableName(Execution.class)).listPage(2, 1).size()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/concurrentExecution.bpmn20.xml"}) public void testExecutionQueryWithProcessVariable() { Map<String, Object> variables = new HashMap<String, Object>(); variables.put("x", "parent"); ProcessInstance pi = runtimeService.startProcessInstanceByKey("concurrent", variables); List<Execution> concurrentExecutions = runtimeService.createExecutionQuery().processInstanceId(pi.getId()).list(); assertEquals(3, concurrentExecutions.size()); for (Execution execution : concurrentExecutions) { if (!((ExecutionEntity)execution).isProcessInstanceExecution()) { // only the concurrent executions, not the root one, would be cooler to query that directly, see http://jira.codehaus.org/browse/ACT-1373 runtimeService.setVariableLocal(execution.getId(), "x", "child"); } } assertEquals(2, runtimeService.createExecutionQuery().processInstanceId(pi.getId()).variableValueEquals("x", "child").count()); assertEquals(1, runtimeService.createExecutionQuery().processInstanceId(pi.getId()).variableValueEquals("x", "parent").count()); assertEquals(3, runtimeService.createExecutionQuery().processInstanceId(pi.getId()).processVariableValueEquals("x", "parent").count()); assertEquals(3, runtimeService.createExecutionQuery().processInstanceId(pi.getId()).processVariableValueNotEquals("x", "xxx").count()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/concurrentExecution.bpmn20.xml"}) public void testExecutionQueryForSuspendedExecutions() { List<Execution> suspendedExecutions = runtimeService.createExecutionQuery().suspended().list(); assertEquals(suspendedExecutions.size(), 0); for (String instanceId : concurrentProcessInstanceIds) { runtimeService.suspendProcessInstanceById(instanceId); } suspendedExecutions = runtimeService.createExecutionQuery().suspended().list(); assertEquals(12, suspendedExecutions.size()); List<Execution> activeExecutions = runtimeService.createExecutionQuery().active().list(); assertEquals(1, activeExecutions.size()); for (Execution activeExecution : activeExecutions) { assertEquals(activeExecution.getProcessInstanceId(), sequentialProcessInstanceIds.get(0)); } } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentId() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentId(incident.getId()).list(); assertEquals(1, executionList.size()); } public void testQueryByInvalidIncidentId() { ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery(); assertEquals(0, query.incidentId("invalid").count()); try { query.incidentId(null); fail(); } catch (ProcessEngineException e) {} } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentType() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentType(incident.getIncidentType()).list(); assertEquals(1, executionList.size()); } public void testQueryByInvalidIncidentType() { ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery(); assertEquals(0, query.incidentType("invalid").count()); try { query.incidentType(null); fail(); } catch (ProcessEngineException e) {} } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentMessage() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentMessage(incident.getIncidentMessage()).list(); assertEquals(1, executionList.size()); } public void testQueryByInvalidIncidentMessage() { ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery(); assertEquals(0, query.incidentMessage("invalid").count()); try { query.incidentMessage(null); fail(); } catch (ProcessEngineException e) {} } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentMessageLike() { runtimeService.startProcessInstanceByKey("failingProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentMessageLike("%exception%").list(); assertEquals(1, executionList.size()); } public void testQueryByInvalidIncidentMessageLike() { ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery(); assertEquals(0, query.incidentMessageLike("invalid").count()); try { query.incidentMessageLike(null); fail(); } catch (ProcessEngineException e) {} } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingSubProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentIdSubProcess() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingSubProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentId(incident.getId()).list(); assertEquals(1, executionList.size()); // execution id of subprocess != process instance id assertNotSame(processInstance.getId(), executionList.get(0).getId()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingSubProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentTypeInSubprocess() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingSubProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentType(incident.getIncidentType()).list(); assertEquals(1, executionList.size()); // execution id of subprocess != process instance id assertNotSame(processInstance.getId(), executionList.get(0).getId()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingSubProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentMessageInSubProcess() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingSubProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentMessage(incident.getIncidentMessage()).list(); assertEquals(1, executionList.size()); // execution id of subprocess != process instance id assertNotSame(processInstance.getId(), executionList.get(0).getId()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/failingSubProcessCreateOneIncident.bpmn20.xml"}) public void testQueryByIncidentMessageLikeSubProcess() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingSubProcess"); executeAvailableJobs(); List<Incident> incidentList = runtimeService.createIncidentQuery().list(); assertEquals(1, incidentList.size()); runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult(); List<Execution> executionList = runtimeService .createExecutionQuery() .incidentMessageLike("%exception%").list(); assertEquals(1, executionList.size()); // execution id of subprocess != process instance id assertNotSame(processInstance.getId(), executionList.get(0).getId()); } @Deployment(resources={"org/camunda/bpm/engine/test/api/runtime/oneTaskProcess.bpmn20.xml", "org/camunda/bpm/engine/test/api/runtime/oneMessageCatchProcess.bpmn20.xml"}) public void testQueryForExecutionsWithMessageEventSubscriptions() { runtimeService.startProcessInstanceByKey("oneTaskProcess"); runtimeService.startProcessInstanceByKey("oneTaskProcess"); ProcessInstance instance1 = runtimeService.startProcessInstanceByKey("oneMessageCatchProcess"); ProcessInstance instance2 = runtimeService.startProcessInstanceByKey("oneMessageCatchProcess"); List<Execution> executions = runtimeService.createExecutionQuery() .messageEventSubscription().orderByProcessInstanceId().asc().list(); assertEquals(2, executions.size()); if (instance1.getId().compareTo(instance2.getId()) < 0) { assertEquals(instance1.getId(), executions.get(0).getProcessInstanceId()); assertEquals(instance2.getId(), executions.get(1).getProcessInstanceId()); } else { assertEquals(instance2.getId(), executions.get(0).getProcessInstanceId()); assertEquals(instance1.getId(), executions.get(1).getProcessInstanceId()); } } @Deployment(resources="org/camunda/bpm/engine/test/api/runtime/oneMessageCatchProcess.bpmn20.xml") public void testQueryForExecutionsWithMessageEventSubscriptionsOverlappingFilters() { ProcessInstance instance = runtimeService.startProcessInstanceByKey("oneMessageCatchProcess"); Execution execution = runtimeService .createExecutionQuery() .messageEventSubscriptionName("newInvoiceMessage") .messageEventSubscription() .singleResult(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); runtimeService .createExecutionQuery() .messageEventSubscription() .messageEventSubscriptionName("newInvoiceMessage") .list(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); } @Deployment(resources = "org/camunda/bpm/engine/test/api/runtime/twoBoundaryEventSubscriptions.bpmn20.xml") public void testQueryForExecutionsWithMultipleSubscriptions() { // given two message event subscriptions ProcessInstance instance = runtimeService.startProcessInstanceByKey("process"); List<EventSubscription> subscriptions = runtimeService.createEventSubscriptionQuery().processInstanceId(instance.getId()).list(); assertEquals(2, subscriptions.size()); assertEquals(subscriptions.get(0).getExecutionId(), subscriptions.get(1).getExecutionId()); // should return the execution once (not twice) Execution execution = runtimeService .createExecutionQuery() .messageEventSubscription() .singleResult(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); // should return the execution once execution = runtimeService .createExecutionQuery() .messageEventSubscriptionName("messageName_1") .singleResult(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); // should return the execution once execution = runtimeService .createExecutionQuery() .messageEventSubscriptionName("messageName_2") .singleResult(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); // should return the execution once execution = runtimeService .createExecutionQuery() .messageEventSubscriptionName("messageName_1") .messageEventSubscriptionName("messageName_2") .singleResult(); assertNotNull(execution); assertEquals(instance.getId(), execution.getProcessInstanceId()); // should not return the execution execution = runtimeService .createExecutionQuery() .messageEventSubscriptionName("messageName_1") .messageEventSubscriptionName("messageName_2") .messageEventSubscriptionName("another") .singleResult(); assertNull(execution); } @Deployment(resources = "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml") public void testProcessVariableValueEqualsNumber() throws Exception { // long runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123L)); // non-matching long runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 12345L)); // short runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", (short) 123)); // double runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123.0d)); // integer runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123)); // untyped null (should not match) runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", null)); // typed null (should not match) runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", Variables.longValue(null))); runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", "123")); assertEquals(4, runtimeService.createExecutionQuery().processVariableValueEquals("var", Variables.numberValue(123)).count()); assertEquals(4, runtimeService.createExecutionQuery().processVariableValueEquals("var", Variables.numberValue(123L)).count()); assertEquals(4, runtimeService.createExecutionQuery().processVariableValueEquals("var", Variables.numberValue(123.0d)).count()); assertEquals(4, runtimeService.createExecutionQuery().processVariableValueEquals("var", Variables.numberValue((short) 123)).count()); assertEquals(1, runtimeService.createExecutionQuery().processVariableValueEquals("var", Variables.numberValue(null)).count()); assertEquals(4, runtimeService.createExecutionQuery().variableValueEquals("var", Variables.numberValue(123)).count()); assertEquals(4, runtimeService.createExecutionQuery().variableValueEquals("var", Variables.numberValue(123L)).count()); assertEquals(4, runtimeService.createExecutionQuery().variableValueEquals("var", Variables.numberValue(123.0d)).count()); assertEquals(4, runtimeService.createExecutionQuery().variableValueEquals("var", Variables.numberValue((short) 123)).count()); assertEquals(1, runtimeService.createExecutionQuery().variableValueEquals("var", Variables.numberValue(null)).count()); } @Deployment(resources = "org/camunda/bpm/engine/test/api/oneTaskProcess.bpmn20.xml") public void testProcessVariableValueNumberComparison() throws Exception { // long runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123L)); // non-matching long runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 12345L)); // short runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", (short) 123)); // double runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123.0d)); // integer runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", 123)); // untyped null runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", null)); // typed null runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", Variables.longValue(null))); runtimeService.startProcessInstanceByKey("oneTaskProcess", Collections.<String, Object>singletonMap("var", "123")); assertEquals(3, runtimeService.createExecutionQuery().processVariableValueNotEquals("var", Variables.numberValue(123)).count()); } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.tab; import org.chromium.base.VisibleForTesting; import org.chromium.chrome.R; import org.chromium.chrome.browser.ChromeActivity; import org.chromium.chrome.browser.ChromeApplication; import org.chromium.chrome.browser.datausage.DataUseTabUIManager; import org.chromium.chrome.browser.externalnav.ExternalNavigationHandler; import org.chromium.chrome.browser.externalnav.ExternalNavigationHandler.OverrideUrlLoadingResult; import org.chromium.chrome.browser.externalnav.ExternalNavigationParams; import org.chromium.chrome.browser.tabmodel.TabModel.TabLaunchType; import org.chromium.components.navigation_interception.InterceptNavigationDelegate; import org.chromium.components.navigation_interception.NavigationParams; import org.chromium.content_public.browser.NavigationController; import org.chromium.content_public.browser.WebContents; import org.chromium.content_public.common.ConsoleMessageLevel; /** * Class that controls navigations and allows to intercept them. It is used on Android to 'convert' * certain navigations to Intents to 3rd party applications and to "pause" navigations when data use * tracking has ended. */ public class InterceptNavigationDelegateImpl implements InterceptNavigationDelegate { private final ChromeActivity mActivity; private final Tab mTab; private final ExternalNavigationHandler mExternalNavHandler; private final AuthenticatorNavigationInterceptor mAuthenticatorHelper; private ExternalNavigationHandler.OverrideUrlLoadingResult mLastOverrideUrlLoadingResult = ExternalNavigationHandler.OverrideUrlLoadingResult.NO_OVERRIDE; /** * Whether forward history should be cleared after navigation is committed. */ private boolean mClearAllForwardHistoryRequired; private boolean mShouldClearRedirectHistoryForTabClobbering; /** * Default constructor of {@link InterceptNavigationDelegateImpl}. */ public InterceptNavigationDelegateImpl(ChromeActivity activity, Tab tab) { this(new ExternalNavigationHandler(activity), activity, tab); } /** * Constructs a new instance of {@link InterceptNavigationDelegateImpl} with the given * {@link ExternalNavigationHandler}. */ public InterceptNavigationDelegateImpl(ExternalNavigationHandler externalNavHandler, ChromeActivity activity, Tab tab) { mActivity = activity; mTab = tab; mExternalNavHandler = externalNavHandler; mAuthenticatorHelper = ((ChromeApplication) mTab.getApplicationContext()) .createAuthenticatorNavigationInterceptor(mTab); } public boolean shouldIgnoreNewTab(String url, boolean incognito) { if (mAuthenticatorHelper != null && mAuthenticatorHelper.handleAuthenticatorUrl(url)) { return true; } ExternalNavigationParams params = new ExternalNavigationParams.Builder(url, incognito) .setTab(mTab) .setOpenInNewTab(true) .build(); return mExternalNavHandler.shouldOverrideUrlLoading(params) != ExternalNavigationHandler.OverrideUrlLoadingResult.NO_OVERRIDE; } @VisibleForTesting public OverrideUrlLoadingResult getLastOverrideUrlLoadingResultForTests() { return mLastOverrideUrlLoadingResult; } @Override public boolean shouldIgnoreNavigation(NavigationParams navigationParams) { String url = navigationParams.url; if (mAuthenticatorHelper != null && mAuthenticatorHelper.handleAuthenticatorUrl(url)) { return true; } TabRedirectHandler tabRedirectHandler = mTab.getTabRedirectHandler(); tabRedirectHandler.updateNewUrlLoading(navigationParams.pageTransitionType, navigationParams.isRedirect, navigationParams.hasUserGesture || navigationParams.hasUserGestureCarryover, mActivity.getLastUserInteractionTime(), getLastCommittedEntryIndex()); boolean shouldCloseTab = shouldCloseContentsOnOverrideUrlLoadingAndLaunchIntent(); boolean isInitialTabLaunchInBackground = mTab.getLaunchType() == TabLaunchType.FROM_LONGPRESS_BACKGROUND && shouldCloseTab; // http://crbug.com/448977: If a new tab is closed by this overriding, we should open an // Intent in a new tab when Chrome receives it again. ExternalNavigationParams params = new ExternalNavigationParams.Builder( url, mTab.isIncognito(), navigationParams.referrer, navigationParams.pageTransitionType, navigationParams.isRedirect) .setTab(mTab) .setApplicationMustBeInForeground(true) .setRedirectHandler(tabRedirectHandler) .setOpenInNewTab(shouldCloseTab) .setIsBackgroundTabNavigation(mTab.isHidden() && !isInitialTabLaunchInBackground) .setIsMainFrame(navigationParams.isMainFrame) .setHasUserGesture(navigationParams.hasUserGesture) .setShouldCloseContentsOnOverrideUrlLoadingAndLaunchIntent(shouldCloseTab && navigationParams.isMainFrame) .build(); OverrideUrlLoadingResult result = mExternalNavHandler.shouldOverrideUrlLoading(params); mLastOverrideUrlLoadingResult = result; switch (result) { case OVERRIDE_WITH_EXTERNAL_INTENT: assert mExternalNavHandler.canExternalAppHandleUrl(url); if (navigationParams.isMainFrame) { onOverrideUrlLoadingAndLaunchIntent(); } return true; case OVERRIDE_WITH_CLOBBERING_TAB: mShouldClearRedirectHistoryForTabClobbering = true; return true; case OVERRIDE_WITH_ASYNC_ACTION: if (!shouldCloseTab && navigationParams.isMainFrame) { onOverrideUrlLoadingAndLaunchIntent(); } return true; case NO_OVERRIDE: default: if (navigationParams.isExternalProtocol) { logBlockedNavigationToDevToolsConsole(url); return true; } return DataUseTabUIManager.shouldOverrideUrlLoading(mActivity, mTab, url, navigationParams.pageTransitionType, navigationParams.referrer); } } /** * Updates navigation history if navigation is canceled due to intent handler. We go back to the * last committed entry index which was saved before the navigation, and remove the empty * entries from the navigation history. See crbug.com/426679 */ public void maybeUpdateNavigationHistory() { WebContents webContents = mTab.getWebContents(); if (mClearAllForwardHistoryRequired && webContents != null) { NavigationController navigationController = webContents.getNavigationController(); int lastCommittedEntryIndex = getLastCommittedEntryIndex(); while (navigationController.canGoForward()) { boolean ret = navigationController.removeEntryAtIndex( lastCommittedEntryIndex + 1); assert ret; } } else if (mShouldClearRedirectHistoryForTabClobbering && webContents != null) { // http://crbug/479056: Even if we clobber the current tab, we want to remove // redirect history to be consistent. NavigationController navigationController = webContents.getNavigationController(); int indexBeforeRedirection = mTab.getTabRedirectHandler() .getLastCommittedEntryIndexBeforeStartingNavigation(); int lastCommittedEntryIndex = getLastCommittedEntryIndex(); for (int i = lastCommittedEntryIndex - 1; i > indexBeforeRedirection; --i) { boolean ret = navigationController.removeEntryAtIndex(i); assert ret; } } mClearAllForwardHistoryRequired = false; mShouldClearRedirectHistoryForTabClobbering = false; } AuthenticatorNavigationInterceptor getAuthenticatorNavigationInterceptor() { return mAuthenticatorHelper; } private int getLastCommittedEntryIndex() { if (mTab.getWebContents() == null) return -1; return mTab.getWebContents().getNavigationController().getLastCommittedEntryIndex(); } private boolean shouldCloseContentsOnOverrideUrlLoadingAndLaunchIntent() { if (mTab.getWebContents() == null) return false; if (!mTab.getWebContents().getNavigationController().canGoToOffset(0)) return true; // http://crbug/415948 : if the last committed entry index which was saved before this // navigation is invalid, it means that this navigation is the first one since this tab was // created. // In such case, we would like to close this tab. if (mTab.getTabRedirectHandler().isOnNavigation()) { return mTab.getTabRedirectHandler().getLastCommittedEntryIndexBeforeStartingNavigation() == TabRedirectHandler.INVALID_ENTRY_INDEX; } return false; } /** * Called when Chrome decides to override URL loading and show an intent picker. */ private void onOverrideUrlLoadingAndLaunchIntent() { if (mTab.getWebContents() == null) return; // Before leaving Chrome, close the empty child tab. // If a new tab is created through JavaScript open to load this // url, we would like to close it as we will load this url in a // different Activity. if (shouldCloseContentsOnOverrideUrlLoadingAndLaunchIntent()) { if (mTab.getLaunchType() == TabLaunchType.FROM_EXTERNAL_APP) { // Moving task back before closing the tab allows back button to function better // when Chrome was an intermediate link redirector between two apps. // crbug.com/487938. mActivity.moveTaskToBack(true); } mActivity.getTabModelSelector().closeTab(mTab); } else if (mTab.getTabRedirectHandler().isOnNavigation()) { int lastCommittedEntryIndexBeforeNavigation = mTab.getTabRedirectHandler() .getLastCommittedEntryIndexBeforeStartingNavigation(); if (getLastCommittedEntryIndex() > lastCommittedEntryIndexBeforeNavigation) { // http://crbug/426679 : we want to go back to the last committed entry index which // was saved before this navigation, and remove the empty entries from the // navigation history. mClearAllForwardHistoryRequired = true; mTab.getWebContents().getNavigationController().goToNavigationIndex( lastCommittedEntryIndexBeforeNavigation); } } } private void logBlockedNavigationToDevToolsConsole(String url) { int resId = mExternalNavHandler.canExternalAppHandleUrl(url) ? R.string.blocked_navigation_warning : R.string.unreachable_navigation_warning; mTab.getWebContents().addMessageToDevToolsConsole( ConsoleMessageLevel.WARNING, mTab.getApplicationContext().getString(resId, url)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.state.internals; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.ProcessorStateException; import org.apache.kafka.streams.processor.StateRestoreListener; import org.apache.kafka.streams.processor.internals.MockStreamsMetrics; import org.apache.kafka.streams.processor.internals.RecordCollector; import org.apache.kafka.streams.state.KeyValueIterator; import org.apache.kafka.streams.state.RocksDBConfigSetter; import org.apache.kafka.test.MockProcessorContext; import org.apache.kafka.test.NoOpRecordCollector; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.rocksdb.Options; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class RocksDBStoreTest { private final File tempDir = TestUtils.tempDirectory(); private RocksDBStore<String, String> subject; private MockProcessorContext context; private File dir; @Before public void setUp() throws Exception { subject = new RocksDBStore<>("test", Serdes.String(), Serdes.String()); dir = TestUtils.tempDirectory(); context = new MockProcessorContext(dir, Serdes.String(), Serdes.String(), new NoOpRecordCollector(), new ThreadCache("testCache", 0, new MockStreamsMetrics(new Metrics()))); } @After public void tearDown() throws Exception { subject.close(); } @Test public void canSpecifyConfigSetterAsClass() throws Exception { final Map<String, Object> configs = new HashMap<>(); configs.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class); MockRocksDbConfigSetter.called = false; subject.openDB(new ConfigurableProcessorContext(tempDir, Serdes.String(), Serdes.String(), null, null, configs)); assertTrue(MockRocksDbConfigSetter.called); } @Test public void canSpecifyConfigSetterAsString() throws Exception { final Map<String, Object> configs = new HashMap<>(); configs.put(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG, MockRocksDbConfigSetter.class.getName()); MockRocksDbConfigSetter.called = false; subject.openDB(new ConfigurableProcessorContext(tempDir, Serdes.String(), Serdes.String(), null, null, configs)); assertTrue(MockRocksDbConfigSetter.called); } @Test(expected = ProcessorStateException.class) public void shouldThrowProcessorStateExceptionOnOpeningReadOnlyDir() throws IOException { final File tmpDir = TestUtils.tempDirectory(); MockProcessorContext tmpContext = new MockProcessorContext(tmpDir, Serdes.String(), Serdes.Long(), new NoOpRecordCollector(), new ThreadCache("testCache", 0, new MockStreamsMetrics(new Metrics()))); tmpDir.setReadOnly(); subject.openDB(tmpContext); } @Test public void shouldPutAll() { List<KeyValue<String, String>> entries = new ArrayList<>(); entries.add(new KeyValue<>("1", "a")); entries.add(new KeyValue<>("2", "b")); entries.add(new KeyValue<>("3", "c")); subject.init(context, subject); subject.putAll(entries); subject.flush(); assertEquals(subject.get("1"), "a"); assertEquals(subject.get("2"), "b"); assertEquals(subject.get("3"), "c"); } @Test public void shouldTogglePrepareForBulkloadSetting() { subject.init(context, subject); StateRestoreListener restoreListener = (StateRestoreListener) subject.batchingStateRestoreCallback; restoreListener.onRestoreStart(null, null, 0, 0); assertTrue("Should have set bulk loading to true", subject.isPrepareForBulkload()); restoreListener.onRestoreEnd(null, null, 0); assertFalse("Should have set bulk loading to false", subject.isPrepareForBulkload()); } @Test public void shouldRestoreAll() throws Exception { final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>(); entries.add(new KeyValue<>("1".getBytes("UTF-8"), "a".getBytes("UTF-8"))); entries.add(new KeyValue<>("2".getBytes("UTF-8"), "b".getBytes("UTF-8"))); entries.add(new KeyValue<>("3".getBytes("UTF-8"), "c".getBytes("UTF-8"))); subject.init(context, subject); context.restore(subject.name(), entries); assertEquals(subject.get("1"), "a"); assertEquals(subject.get("2"), "b"); assertEquals(subject.get("3"), "c"); } @Test public void shouldHandleDeletesOnRestoreAll() throws Exception { final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>(); entries.add(new KeyValue<>("1".getBytes("UTF-8"), "a".getBytes("UTF-8"))); entries.add(new KeyValue<>("2".getBytes("UTF-8"), "b".getBytes("UTF-8"))); entries.add(new KeyValue<>("3".getBytes("UTF-8"), "c".getBytes("UTF-8"))); entries.add(new KeyValue<>("1".getBytes("UTF-8"), (byte[]) null)); subject.init(context, subject); context.restore(subject.name(), entries); final KeyValueIterator<String, String> iterator = subject.all(); final Set<String> keys = new HashSet<>(); while (iterator.hasNext()) { keys.add(iterator.next().key); } assertThat(keys, equalTo(Utils.mkSet("2", "3"))); } @Test public void shouldHandleDeletesAndPutbackOnRestoreAll() throws Exception { final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>(); entries.add(new KeyValue<>("1".getBytes("UTF-8"), "a".getBytes("UTF-8"))); entries.add(new KeyValue<>("2".getBytes("UTF-8"), "b".getBytes("UTF-8"))); // this will be deleted entries.add(new KeyValue<>("1".getBytes("UTF-8"), (byte[]) null)); entries.add(new KeyValue<>("3".getBytes("UTF-8"), "c".getBytes("UTF-8"))); // this will restore key "1" as WriteBatch applies updates in order entries.add(new KeyValue<>("1".getBytes("UTF-8"), "restored".getBytes("UTF-8"))); subject.init(context, subject); context.restore(subject.name(), entries); final KeyValueIterator<String, String> iterator = subject.all(); final Set<String> keys = new HashSet<>(); while (iterator.hasNext()) { keys.add(iterator.next().key); } assertThat(keys, equalTo(Utils.mkSet("1", "2", "3"))); assertEquals(subject.get("1"), "restored"); assertEquals(subject.get("2"), "b"); assertEquals(subject.get("3"), "c"); } @Test public void shouldRestoreThenDeleteOnRestoreAll() throws Exception { final List<KeyValue<byte[], byte[]>> entries = new ArrayList<>(); entries.add(new KeyValue<>("1".getBytes("UTF-8"), "a".getBytes("UTF-8"))); entries.add(new KeyValue<>("2".getBytes("UTF-8"), "b".getBytes("UTF-8"))); entries.add(new KeyValue<>("3".getBytes("UTF-8"), "c".getBytes("UTF-8"))); subject.init(context, subject); context.restore(subject.name(), entries); assertEquals(subject.get("1"), "a"); assertEquals(subject.get("2"), "b"); assertEquals(subject.get("3"), "c"); entries.clear(); entries.add(new KeyValue<>("2".getBytes("UTF-8"), "b".getBytes("UTF-8"))); entries.add(new KeyValue<>("3".getBytes("UTF-8"), "c".getBytes("UTF-8"))); entries.add(new KeyValue<>("1".getBytes("UTF-8"), (byte[]) null)); context.restore(subject.name(), entries); final KeyValueIterator<String, String> iterator = subject.all(); final Set<String> keys = new HashSet<>(); while (iterator.hasNext()) { keys.add(iterator.next().key); } assertThat(keys, equalTo(Utils.mkSet("2", "3"))); } @Test public void shouldThrowNullPointerExceptionOnNullPut() { subject.init(context, subject); try { subject.put(null, "someVal"); fail("Should have thrown NullPointerException on null put()"); } catch (NullPointerException e) { } } @Test public void shouldThrowNullPointerExceptionOnNullPutAll() { subject.init(context, subject); try { subject.put(null, "someVal"); fail("Should have thrown NullPointerException on null put()"); } catch (NullPointerException e) { } } @Test public void shouldThrowNullPointerExceptionOnNullGet() { subject.init(context, subject); try { subject.get(null); fail("Should have thrown NullPointerException on null get()"); } catch (NullPointerException e) { } } @Test public void shouldThrowNullPointerExceptionOnDelete() { subject.init(context, subject); try { subject.delete(null); fail("Should have thrown NullPointerException on deleting null key"); } catch (NullPointerException e) { } } @Test public void shouldThrowNullPointerExceptionOnRange() { subject.init(context, subject); try { subject.range(null, "2"); fail("Should have thrown NullPointerException on deleting null key"); } catch (NullPointerException e) { } } @Test(expected = ProcessorStateException.class) public void shouldThrowProcessorStateExeptionOnPutDeletedDir() throws IOException { subject.init(context, subject); Utils.delete(dir); subject.put("anyKey", "anyValue"); subject.flush(); } public static class MockRocksDbConfigSetter implements RocksDBConfigSetter { static boolean called; @Override public void setConfig(final String storeName, final Options options, final Map<String, Object> configs) { called = true; } } private static class ConfigurableProcessorContext extends MockProcessorContext { final Map<String, Object> configs; ConfigurableProcessorContext(final File stateDir, final Serde<?> keySerde, final Serde<?> valSerde, final RecordCollector collector, final ThreadCache cache, final Map<String, Object> configs) { super(stateDir, keySerde, valSerde, collector, cache); this.configs = configs; } @Override public Map<String, Object> appConfigs() { return configs; } } }
package com.fasterxml.jackson.databind.convert; import java.math.BigDecimal; import java.util.*; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.util.StdConverter; public class TestConvertingDeserializer extends com.fasterxml.jackson.databind.BaseMapTest { @JsonDeserialize(converter=ConvertingBeanConverter.class) static class ConvertingBean { protected int x, y; protected ConvertingBean(int x, int y) { this.x = x; this.y = y; } } static class Point { protected int x, y; public Point(int v1, int v2) { x = v1; y = v2; } } static class ConvertingBeanContainer { public List<ConvertingBean> values; public ConvertingBeanContainer() { } public ConvertingBeanContainer(ConvertingBean... beans) { values = Arrays.asList(beans); } } static class ConvertingBeanConverter extends StdConverter<int[],ConvertingBean> { @Override public ConvertingBean convert(int[] values) { return new ConvertingBean(values[0], values[1]); } } private static class PointConverter extends StdConverter<int[], Point> { @Override public Point convert(int[] value) { return new Point(value[0], value[1]); } } static class PointWrapper { @JsonDeserialize(converter=PointConverter.class) public Point value; protected PointWrapper() { } public PointWrapper(int x, int y) { value = new Point(x, y); } } static class PointListWrapperArray { @JsonDeserialize(contentConverter=PointConverter.class) public Point[] values; } static class PointListWrapperList { @JsonDeserialize(contentConverter=PointConverter.class) public List<Point> values; } static class PointListWrapperMap { @JsonDeserialize(contentConverter=PointConverter.class) public Map<String,Point> values; } static class LowerCaser extends StdConverter<String, String> { @Override public String convert(String value) { return value.toLowerCase(); } } static class LowerCaseText { @JsonDeserialize(converter=LowerCaser.class) public String text; } static class LowerCaseTextArray { @JsonDeserialize(contentConverter=LowerCaser.class) public String[] texts; } // for [databind#795] static class ToNumberConverter extends StdConverter<String,Number> { @Override public Number convert(String value) { return new BigDecimal(value); } } static class Issue795Bean { @JsonDeserialize(converter=ToNumberConverter.class) public Number value; } /* /********************************************************** /* Test methods /********************************************************** */ public void testClassAnnotationSimple() throws Exception { ConvertingBean bean = objectReader(ConvertingBean.class).readValue("[1,2]"); assertNotNull(bean); assertEquals(1, bean.x); assertEquals(2, bean.y); } public void testClassAnnotationForLists() throws Exception { ConvertingBeanContainer container = objectReader(ConvertingBeanContainer.class) .readValue("{\"values\":[[1,2],[3,4]]}"); assertNotNull(container); assertNotNull(container.values); assertEquals(2, container.values.size()); assertEquals(4, container.values.get(1).y); } public void testPropertyAnnotationSimple() throws Exception { PointWrapper wrapper = objectReader(PointWrapper.class).readValue("{\"value\":[3,4]}"); assertNotNull(wrapper); assertNotNull(wrapper.value); assertEquals(3, wrapper.value.x); assertEquals(4, wrapper.value.y); } public void testPropertyAnnotationLowerCasing() throws Exception { LowerCaseText text = objectReader(LowerCaseText.class).readValue("{\"text\":\"Yay!\"}"); assertNotNull(text); assertNotNull(text.text); assertEquals("yay!", text.text); } public void testPropertyAnnotationArrayLC() throws Exception { LowerCaseTextArray texts = objectReader(LowerCaseTextArray.class).readValue("{\"texts\":[\"ABC\"]}"); assertNotNull(texts); assertNotNull(texts.texts); assertEquals(1, texts.texts.length); assertEquals("abc", texts.texts[0]); } public void testPropertyAnnotationForArrays() throws Exception { PointListWrapperArray array = objectReader(PointListWrapperArray.class) .readValue("{\"values\":[[4,5],[5,4]]}"); assertNotNull(array); assertNotNull(array.values); assertEquals(2, array.values.length); assertEquals(5, array.values[1].x); } public void testPropertyAnnotationForLists() throws Exception { PointListWrapperList array = objectReader(PointListWrapperList.class) .readValue("{\"values\":[[7,8],[8,7]]}"); assertNotNull(array); assertNotNull(array.values); assertEquals(2, array.values.size()); assertEquals(7, array.values.get(0).x); } public void testPropertyAnnotationForMaps() throws Exception { PointListWrapperMap map = objectReader(PointListWrapperMap.class) .readValue("{\"values\":{\"a\":[1,2]}}"); assertNotNull(map); assertNotNull(map.values); assertEquals(1, map.values.size()); Point p = map.values.get("a"); assertNotNull(p); assertEquals(1, p.x); assertEquals(2, p.y); } // [databind#795] public void testConvertToAbstract() throws Exception { Issue795Bean bean = objectReader(Issue795Bean.class) .readValue("{\"value\":\"1.25\"}"); assertNotNull(bean.value); assertTrue("Type not BigDecimal but "+bean.value.getClass(), bean.value instanceof BigDecimal); assertEquals(new BigDecimal("1.25"), bean.value); } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.mock.jndi; import java.util.Hashtable; import javax.naming.Context; import javax.naming.NamingException; import javax.naming.spi.InitialContextFactory; import javax.naming.spi.InitialContextFactoryBuilder; import javax.naming.spi.NamingManager; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.ReflectionUtils; /** * Simple implementation of a JNDI naming context builder. * * <p>Mainly targeted at test environments, where each test case can * configure JNDI appropriately, so that {@code new InitialContext()} * will expose the required objects. Also usable for standalone applications, * e.g. for binding a JDBC DataSource to a well-known JNDI location, to be * able to use traditional Java EE data access code outside of a Java EE * container. * * <p>There are various choices for DataSource implementations: * <ul> * <li>{@code SingleConnectionDataSource} (using the same Connection for all getConnection calls) * <li>{@code DriverManagerDataSource} (creating a new Connection on each getConnection call) * <li>Apache's Commons DBCP offers {@code org.apache.commons.dbcp.BasicDataSource} (a real pool) * </ul> * * <p>Typical usage in bootstrap code: * * <pre class="code"> * SimpleNamingContextBuilder builder = new SimpleNamingContextBuilder(); * DataSource ds = new DriverManagerDataSource(...); * builder.bind("java:comp/env/jdbc/myds", ds); * builder.activate();</pre> * * Note that it's impossible to activate multiple builders within the same JVM, * due to JNDI restrictions. Thus to configure a fresh builder repeatedly, use * the following code to get a reference to either an already activated builder * or a newly activated one: * * <pre class="code"> * SimpleNamingContextBuilder builder = SimpleNamingContextBuilder.emptyActivatedContextBuilder(); * DataSource ds = new DriverManagerDataSource(...); * builder.bind("java:comp/env/jdbc/myds", ds);</pre> * * Note that you <i>should not</i> call {@code activate()} on a builder from * this factory method, as there will already be an activated one in any case. * * <p>An instance of this class is only necessary at setup time. * An application does not need to keep a reference to it after activation. * * @author Juergen Hoeller * @author Rod Johnson * @see #emptyActivatedContextBuilder() * @see #bind(String, Object) * @see #activate() * @see SimpleNamingContext * @see org.springframework.jdbc.datasource.SingleConnectionDataSource * @see org.springframework.jdbc.datasource.DriverManagerDataSource */ public class SimpleNamingContextBuilder implements InitialContextFactoryBuilder { /** An instance of this class bound to JNDI */ private static volatile SimpleNamingContextBuilder activated; private static boolean initialized = false; private static final Object initializationLock = new Object(); /** * Checks if a SimpleNamingContextBuilder is active. * @return the current SimpleNamingContextBuilder instance, * or {@code null} if none */ public static SimpleNamingContextBuilder getCurrentContextBuilder() { return activated; } /** * If no SimpleNamingContextBuilder is already configuring JNDI, * create and activate one. Otherwise take the existing activate * SimpleNamingContextBuilder, clear it and return it. * <p>This is mainly intended for test suites that want to * reinitialize JNDI bindings from scratch repeatedly. * @return an empty SimpleNamingContextBuilder that can be used * to control JNDI bindings */ public static SimpleNamingContextBuilder emptyActivatedContextBuilder() throws NamingException { if (activated != null) { // Clear already activated context builder. activated.clear(); } else { // Create and activate new context builder. SimpleNamingContextBuilder builder = new SimpleNamingContextBuilder(); // The activate() call will cause an assignment to the activated variable. builder.activate(); } return activated; } private final Log logger = LogFactory.getLog(getClass()); private final Hashtable<String,Object> boundObjects = new Hashtable<>(); /** * Register the context builder by registering it with the JNDI NamingManager. * Note that once this has been done, {@code new InitialContext()} will always * return a context from this factory. Use the {@code emptyActivatedContextBuilder()} * static method to get an empty context (for example, in test methods). * @throws IllegalStateException if there's already a naming context builder * registered with the JNDI NamingManager */ public void activate() throws IllegalStateException, NamingException { logger.info("Activating simple JNDI environment"); synchronized (initializationLock) { if (!initialized) { Assert.state(!NamingManager.hasInitialContextFactoryBuilder(), "Cannot activate SimpleNamingContextBuilder: there is already a JNDI provider registered. " + "Note that JNDI is a JVM-wide service, shared at the JVM system class loader level, " + "with no reset option. As a consequence, a JNDI provider must only be registered once per JVM."); NamingManager.setInitialContextFactoryBuilder(this); initialized = true; } } activated = this; } /** * Temporarily deactivate this context builder. It will remain registered with * the JNDI NamingManager but will delegate to the standard JNDI InitialContextFactory * (if configured) instead of exposing its own bound objects. * <p>Call {@code activate()} again in order to expose this context builder's own * bound objects again. Such activate/deactivate sequences can be applied any number * of times (e.g. within a larger integration test suite running in the same VM). * @see #activate() */ public void deactivate() { logger.info("Deactivating simple JNDI environment"); activated = null; } /** * Clear all bindings in this context builder, while keeping it active. */ public void clear() { this.boundObjects.clear(); } /** * Bind the given object under the given name, for all naming contexts * that this context builder will generate. * @param name the JNDI name of the object (e.g. "java:comp/env/jdbc/myds") * @param obj the object to bind (e.g. a DataSource implementation) */ public void bind(String name, Object obj) { if (logger.isInfoEnabled()) { logger.info("Static JNDI binding: [" + name + "] = [" + obj + "]"); } this.boundObjects.put(name, obj); } /** * Simple InitialContextFactoryBuilder implementation, * creating a new SimpleNamingContext instance. * @see SimpleNamingContext */ @Override public InitialContextFactory createInitialContextFactory(Hashtable<?,?> environment) { if (activated == null && environment != null) { Object icf = environment.get(Context.INITIAL_CONTEXT_FACTORY); if (icf != null) { Class<?> icfClass; if (icf instanceof Class) { icfClass = (Class<?>) icf; } else if (icf instanceof String) { icfClass = ClassUtils.resolveClassName((String) icf, getClass().getClassLoader()); } else { throw new IllegalArgumentException("Invalid value type for environment key [" + Context.INITIAL_CONTEXT_FACTORY + "]: " + icf.getClass().getName()); } if (!InitialContextFactory.class.isAssignableFrom(icfClass)) { throw new IllegalArgumentException( "Specified class does not implement [" + InitialContextFactory.class.getName() + "]: " + icf); } try { return (InitialContextFactory) ReflectionUtils.accessibleConstructor(icfClass).newInstance(); } catch (Throwable ex) { throw new IllegalStateException("Unable to instantiate specified InitialContextFactory: " + icf, ex); } } } // Default case... return new InitialContextFactory() { @Override @SuppressWarnings("unchecked") public Context getInitialContext(Hashtable<?,?> environment) { return new SimpleNamingContext("", boundObjects, (Hashtable<String, Object>) environment); } }; } }
/* * Copyright 2010 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.asynchttpclient.async; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import org.asynchttpclient.FluentStringsMap; import org.testng.annotations.Test; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; public class FluentStringsMapTest { @Test public void emptyTest() { FluentStringsMap map = new FluentStringsMap(); assertTrue(map.keySet().isEmpty()); } @Test public void normalTest() { FluentStringsMap map = new FluentStringsMap(); map.add("fOO", "bAr"); map.add("Baz", Arrays.asList("fOo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("fOO", "Baz"))); assertEquals(map.getFirstValue("fOO"), "bAr"); assertEquals(map.getJoinedValue("fOO", ", "), "bAr"); assertEquals(map.get("fOO"), Arrays.asList("bAr")); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertEquals(map.getFirstValue("Baz"), "fOo"); assertEquals(map.getJoinedValue("Baz", ", "), "fOo, bar"); assertEquals(map.get("Baz"), Arrays.asList("fOo", "bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void addNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("fOO", "bAr"); map.add(null, Arrays.asList("fOo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("fOO"))); assertEquals(map.getFirstValue("fOO"), "bAr"); assertEquals(map.getJoinedValue("fOO", ", "), "bAr"); assertEquals(map.get("fOO"), Arrays.asList("bAr")); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertNull(map.getFirstValue(null)); assertNull(map.getJoinedValue("Baz", ", ")); assertNull(map.get(null)); } @Test public void sameKeyMultipleTimesTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "baz,foo"); map.add("foo", Arrays.asList("bar")); map.add("foo", "bla", "blubb"); map.add("fOO", "duh"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "fOO"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo, bar, bla, blubb"); assertEquals(map.get("foo"), Arrays.asList("baz,foo", "bar", "bla", "blubb")); assertEquals(map.getFirstValue("fOO"), "duh"); assertEquals(map.getJoinedValue("fOO", ", "), "duh"); assertEquals(map.get("fOO"), Arrays.asList("duh")); } @Test public void emptyValueTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", ""); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), ""); assertEquals(map.getJoinedValue("foo", ", "), ""); assertEquals(map.get("foo"), Arrays.asList("")); } @Test public void nullValueTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", (String) null); assertEquals(map.getFirstValue("foo"), null); assertEquals(map.getJoinedValue("foo", ", "), null); assertEquals(map.get("foo").size(), 1); } @Test public void mapConstructorTest() { Map<String, Collection<String>> headerMap = new LinkedHashMap<>(); headerMap.put("foo", Arrays.asList("baz,foo")); headerMap.put("baz", Arrays.asList("bar")); headerMap.put("bar", Arrays.asList("bla", "blubb")); FluentStringsMap map = new FluentStringsMap(headerMap); headerMap.remove("foo"); headerMap.remove("bar"); headerMap.remove("baz"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo"); assertEquals(map.get("foo"), Arrays.asList("baz,foo")); assertEquals(map.getFirstValue("baz"), "bar"); assertEquals(map.getJoinedValue("baz", ", "), "bar"); assertEquals(map.get("baz"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "bla"); assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb"); assertEquals(map.get("bar"), Arrays.asList("bla", "blubb")); } @Test public void mapConstructorNullTest() { FluentStringsMap map = new FluentStringsMap((Map<String, Collection<String>>) null); assertEquals(map.keySet().size(), 0); } @Test public void copyConstructorTest() { FluentStringsMap srcHeaders = new FluentStringsMap(); srcHeaders.add("foo", "baz,foo"); srcHeaders.add("baz", Arrays.asList("bar")); srcHeaders.add("bar", "bla", "blubb"); FluentStringsMap map = new FluentStringsMap(srcHeaders); srcHeaders.delete("foo"); srcHeaders.delete("bar"); srcHeaders.delete("baz"); assertTrue(srcHeaders.keySet().isEmpty()); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "baz,foo"); assertEquals(map.getJoinedValue("foo", ", "), "baz,foo"); assertEquals(map.get("foo"), Arrays.asList("baz,foo")); assertEquals(map.getFirstValue("baz"), "bar"); assertEquals(map.getJoinedValue("baz", ", "), "bar"); assertEquals(map.get("baz"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "bla"); assertEquals(map.getJoinedValue("bar", ", "), "bla, blubb"); assertEquals(map.get("bar"), Arrays.asList("bla", "blubb")); } @Test public void copyConstructorNullTest() { FluentStringsMap map = new FluentStringsMap((FluentStringsMap) null); assertEquals(map.keySet().size(), 0); } @Test public void deleteTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("baz"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteTestDifferentCase() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("bAz"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteUndefinedKeyTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete("bar"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.delete(null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll("baz", "Boo"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllArrayDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll("Foo", "baz"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll(Arrays.asList("baz", "foo")); assertEquals(map.keySet(), Collections.<String> emptyList()); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void deleteAllCollectionDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll(Arrays.asList("bAz", "fOO")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllNullArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll((String[]) null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void deleteAllNullCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.deleteAll((Collection<String>) null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceArrayTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith("foo", "blub", "bla"); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "blub"); assertEquals(map.getJoinedValue("foo", ", "), "blub, bla"); assertEquals(map.get("foo"), Arrays.asList("blub", "bla")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceCollectionTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith("foo", Arrays.asList("blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "blub"); assertEquals(map.getJoinedValue("foo", ", "), "blub, bla"); assertEquals(map.get("foo"), Arrays.asList("blub", "bla")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceDifferentCaseTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith("Foo", Arrays.asList("blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz", "Foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("Foo"), "blub"); assertEquals(map.getJoinedValue("Foo", ", "), "blub, bla"); assertEquals(map.get("Foo"), Arrays.asList("blub", "bla")); } @Test public void replaceUndefinedTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith("bar", Arrays.asList("blub")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz", "bar"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("bar"), "blub"); assertEquals(map.getJoinedValue("bar", ", "), "blub"); assertEquals(map.get("bar"), Arrays.asList("blub")); } @Test public void replaceNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith(null, Arrays.asList("blub")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceValueWithNullTest() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceWith("baz", (Collection<String>) null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertNull(map.getFirstValue("baz")); assertNull(map.getJoinedValue("baz", ", ")); assertNull(map.get("baz")); } @Test public void replaceAllMapTest1() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll(new FluentStringsMap().add("bar", "baz").add("Foo", "blub", "bla")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz", "Foo"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "baz"); assertEquals(map.getJoinedValue("bar", ", "), "baz"); assertEquals(map.get("bar"), Arrays.asList("baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); assertEquals(map.getFirstValue("Foo"), "blub"); assertEquals(map.getJoinedValue("Foo", ", "), "blub, bla"); assertEquals(map.get("Foo"), Arrays.asList("blub", "bla")); } @Test public void replaceAllTest2() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); LinkedHashMap<String, Collection<String>> newValues = new LinkedHashMap<>(); newValues.put("bar", Arrays.asList("baz")); newValues.put("foo", null); map.replaceAll(newValues); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("bar", "baz"))); assertNull(map.getFirstValue("foo")); assertNull(map.getJoinedValue("foo", ", ")); assertNull(map.get("foo")); assertEquals(map.getFirstValue("bar"), "baz"); assertEquals(map.getJoinedValue("bar", ", "), "baz"); assertEquals(map.get("bar"), Arrays.asList("baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceAllNullTest1() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll((FluentStringsMap) null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } @Test public void replaceAllNullTest2() { FluentStringsMap map = new FluentStringsMap(); map.add("foo", "bar"); map.add("bar", "foo, bar", "baz"); map.add("baz", Arrays.asList("foo", "bar")); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); map.replaceAll((Map<String, Collection<String>>) null); assertEquals(map.keySet(), new LinkedHashSet<>(Arrays.asList("foo", "bar", "baz"))); assertEquals(map.getFirstValue("foo"), "bar"); assertEquals(map.getJoinedValue("foo", ", "), "bar"); assertEquals(map.get("foo"), Arrays.asList("bar")); assertEquals(map.getFirstValue("bar"), "foo, bar"); assertEquals(map.getJoinedValue("bar", ", "), "foo, bar, baz"); assertEquals(map.get("bar"), Arrays.asList("foo, bar", "baz")); assertEquals(map.getFirstValue("baz"), "foo"); assertEquals(map.getJoinedValue("baz", ", "), "foo, bar"); assertEquals(map.get("baz"), Arrays.asList("foo", "bar")); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.util; import com.android.internal.util.ArrayUtils; /** * SparseBooleanArrays map integers to booleans. * Unlike a normal array of booleans * there can be gaps in the indices. It is intended to be more efficient * than using a HashMap to map Integers to Booleans. */ public class SparseBooleanArray implements Cloneable { /** * Creates a new SparseBooleanArray containing no mappings. */ public SparseBooleanArray() { this(10); } /** * Creates a new SparseBooleanArray containing no mappings that will not * require any additional memory allocation to store the specified * number of mappings. */ public SparseBooleanArray(int initialCapacity) { initialCapacity = ArrayUtils.idealIntArraySize(initialCapacity); mKeys = new int[initialCapacity]; mValues = new boolean[initialCapacity]; mSize = 0; } @Override public SparseBooleanArray clone() { SparseBooleanArray clone = null; try { clone = (SparseBooleanArray) super.clone(); clone.mKeys = mKeys.clone(); clone.mValues = mValues.clone(); } catch (CloneNotSupportedException cnse) { /* ignore */ } return clone; } /** * Gets the boolean mapped from the specified key, or <code>false</code> * if no such mapping has been made. */ public boolean get(int key) { return get(key, false); } /** * Gets the boolean mapped from the specified key, or the specified value * if no such mapping has been made. */ public boolean get(int key, boolean valueIfKeyNotFound) { int i = binarySearch(mKeys, 0, mSize, key); if (i < 0) { return valueIfKeyNotFound; } else { return mValues[i]; } } /** * Removes the mapping from the specified key, if there was any. */ public void delete(int key) { int i = binarySearch(mKeys, 0, mSize, key); if (i >= 0) { System.arraycopy(mKeys, i + 1, mKeys, i, mSize - (i + 1)); System.arraycopy(mValues, i + 1, mValues, i, mSize - (i + 1)); mSize--; } } /** * Adds a mapping from the specified key to the specified value, * replacing the previous mapping from the specified key if there * was one. */ public void put(int key, boolean value) { int i = binarySearch(mKeys, 0, mSize, key); if (i >= 0) { mValues[i] = value; } else { i = ~i; if (mSize >= mKeys.length) { int n = ArrayUtils.idealIntArraySize(mSize + 1); int[] nkeys = new int[n]; boolean[] nvalues = new boolean[n]; // Log.e("SparseBooleanArray", "grow " + mKeys.length + " to " + n); System.arraycopy(mKeys, 0, nkeys, 0, mKeys.length); System.arraycopy(mValues, 0, nvalues, 0, mValues.length); mKeys = nkeys; mValues = nvalues; } if (mSize - i != 0) { // Log.e("SparseBooleanArray", "move " + (mSize - i)); System.arraycopy(mKeys, i, mKeys, i + 1, mSize - i); System.arraycopy(mValues, i, mValues, i + 1, mSize - i); } mKeys[i] = key; mValues[i] = value; mSize++; } } /** * Returns the number of key-value mappings that this SparseBooleanArray * currently stores. */ public int size() { return mSize; } /** * Given an index in the range <code>0...size()-1</code>, returns * the key from the <code>index</code>th key-value mapping that this * SparseBooleanArray stores. */ public int keyAt(int index) { return mKeys[index]; } /** * Given an index in the range <code>0...size()-1</code>, returns * the value from the <code>index</code>th key-value mapping that this * SparseBooleanArray stores. */ public boolean valueAt(int index) { return mValues[index]; } /** * Returns the index for which {@link #keyAt} would return the * specified key, or a negative number if the specified * key is not mapped. */ public int indexOfKey(int key) { return binarySearch(mKeys, 0, mSize, key); } /** * Returns an index for which {@link #valueAt} would return the * specified key, or a negative number if no keys map to the * specified value. * Beware that this is a linear search, unlike lookups by key, * and that multiple keys can map to the same value and this will * find only one of them. */ public int indexOfValue(boolean value) { for (int i = 0; i < mSize; i++) if (mValues[i] == value) return i; return -1; } /** * Removes all key-value mappings from this SparseBooleanArray. */ public void clear() { mSize = 0; } /** * Puts a key/value pair into the array, optimizing for the case where * the key is greater than all existing keys in the array. */ public void append(int key, boolean value) { if (mSize != 0 && key <= mKeys[mSize - 1]) { put(key, value); return; } int pos = mSize; if (pos >= mKeys.length) { int n = ArrayUtils.idealIntArraySize(pos + 1); int[] nkeys = new int[n]; boolean[] nvalues = new boolean[n]; // Log.e("SparseBooleanArray", "grow " + mKeys.length + " to " + n); System.arraycopy(mKeys, 0, nkeys, 0, mKeys.length); System.arraycopy(mValues, 0, nvalues, 0, mValues.length); mKeys = nkeys; mValues = nvalues; } mKeys[pos] = key; mValues[pos] = value; mSize = pos + 1; } private static int binarySearch(int[] a, int start, int len, int key) { int high = start + len, low = start - 1, guess; while (high - low > 1) { guess = (high + low) / 2; if (a[guess] < key) low = guess; else high = guess; } if (high == start + len) return ~(start + len); else if (a[high] == key) return high; else return ~high; } private int[] mKeys; private boolean[] mValues; private int mSize; }
/* * Copyright DataStax, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.oss.driver.examples.datatypes; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Objects; /** * Inserts and retrieves values in columns of user-defined types. * * <p>By default, the Java driver maps user-defined types to {@link UdtValue}. This example goes * beyond that and shows how to map user-defined types to arbitrary Java types, leveraging the * special {@link MappingCodec}. * * <p>A simpler example of usage of user-defined types can be found in {@link * UserDefinedTypesSimple}. * * <p>Preconditions: * * <ul> * <li>An Apache Cassandra(R) cluster is running and accessible through the contacts points * identified by basic.contact-points (see application.conf). * </ul> * * <p>Side effects: * * <ul> * <li>creates a new keyspace "examples" in the cluster. If a keyspace with this name already * exists, it will be reused; * <li>creates a table "examples.udts". If it already exists, it will be reused; * <li>inserts data in the table. * </ul> * * @see UserDefinedTypesSimple * @see MappingCodec * @see <a * href="https://docs.datastax.com/en/developer/java-driver/latest/manual/core/custom_codecs/">driver * documentation on custom codecs</a> */ public class UserDefinedTypesMapped { /** The Java Pojo that will be mapped to the user-defined type "coordinates". */ public static class Coordinates { private final int x; private final int y; public Coordinates(int x, int y) { this.x = x; this.y = y; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (!(o instanceof Coordinates)) { return false; } else { Coordinates that = (Coordinates) o; return x == that.x && y == that.y; } } @Override public int hashCode() { return Objects.hash(x, y); } @Override public String toString() { return "(" + x + ',' + y + ')'; } } /** The custom codec that will convert to and from {@link Coordinates}. */ public static class CoordinatesCodec extends MappingCodec<UdtValue, Coordinates> { public CoordinatesCodec(@NonNull TypeCodec<UdtValue> innerCodec) { super(innerCodec, GenericType.of(Coordinates.class)); } @NonNull @Override public UserDefinedType getCqlType() { return (UserDefinedType) super.getCqlType(); } @Nullable @Override protected Coordinates innerToOuter(@Nullable UdtValue value) { return value == null ? null : new Coordinates(value.getInt("x"), value.getInt("y")); } @Nullable @Override protected UdtValue outerToInner(@Nullable Coordinates value) { return value == null ? null : getCqlType().newValue().setInt("x", value.x).setInt("y", value.y); } } public static void main(String[] args) { try (CqlSession session = CqlSession.builder().build()) { createSchema(session); registerCoordinatesCodec(session); insertData(session); retrieveData(session); } } private static void createSchema(CqlSession session) { session.execute( "CREATE KEYSPACE IF NOT EXISTS examples " + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); session.execute("CREATE TYPE IF NOT EXISTS examples.coordinates(x int, y int)"); session.execute("CREATE TABLE IF NOT EXISTS examples.udts(k int PRIMARY KEY, c coordinates)"); } private static void registerCoordinatesCodec(CqlSession session) { // retrieve the codec registry MutableCodecRegistry codecRegistry = (MutableCodecRegistry) session.getContext().getCodecRegistry(); // retrieve the user-defined type metadata UserDefinedType coordinatesType = retrieveCoordinatesType(session); // retrieve the driver built-in codec for the user-defined type "coordinates" TypeCodec<UdtValue> innerCodec = codecRegistry.codecFor(coordinatesType); // create a custom codec to map the "coordinates" user-defined type to the Coordinates class CoordinatesCodec coordinatesCodec = new CoordinatesCodec(innerCodec); // register the new codec codecRegistry.register(coordinatesCodec); } private static void insertData(CqlSession session) { // prepare the INSERT statement PreparedStatement prepared = session.prepare("INSERT INTO examples.udts (k, c) VALUES (?, ?)"); // bind the parameters in one pass Coordinates coordinates1 = new Coordinates(12, 34); BoundStatement boundStatement1 = prepared.bind(1, coordinates1); // execute the insertion session.execute(boundStatement1); // alternate method: bind the parameters one by one Coordinates coordinates2 = new Coordinates(56, 78); BoundStatement boundStatement2 = prepared.bind().setInt("k", 2).set("c", coordinates2, Coordinates.class); // execute the insertion session.execute(boundStatement2); } private static void retrieveData(CqlSession session) { for (int k = 1; k <= 2; k++) { // Execute the SELECT query and retrieve the single row in the result set SimpleStatement statement = SimpleStatement.newInstance("SELECT c FROM examples.udts WHERE k = ?", k); Row row = session.execute(statement).one(); assert row != null; // Retrieve the value for column c Coordinates coordinatesValue = row.get("c", Coordinates.class); assert coordinatesValue != null; // Display the contents of the Coordinates instance System.out.println("found coordinate: " + coordinatesValue); } } private static UserDefinedType retrieveCoordinatesType(CqlSession session) { return session .getMetadata() .getKeyspace("examples") .flatMap(ks -> ks.getUserDefinedType("coordinates")) .orElseThrow(IllegalStateException::new); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.logging; import static java.lang.System.lineSeparator; import static org.apache.geode.logging.internal.spi.LogWriterLevel.ALL; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.text.DateFormat; import java.text.ParseException; import java.util.Date; import java.util.StringTokenizer; import org.apache.geode.LogWriter; import org.apache.geode.internal.ExitCode; /** * Parses a log file written by a {@link LogWriter} into {@link LogFileParser.LogEntry}s. It * behaves sort of like an {@link StringTokenizer}. * * @since GemFire 3.0 */ public class LogFileParser { private static final boolean TRIM_TIMESTAMPS = Boolean.getBoolean("mergelogs.TRIM_TIMESTAMPS"); private static final boolean NEWLINE_AFTER_HEADER = Boolean.getBoolean("mergelogs.NEWLINE_AFTER_HEADER"); private static final boolean TRIM_NAMES = Boolean.getBoolean("mergelogs.TRIM_NAMES"); /** Text that signifies the start of a JRockit-style thread dump */ private static final String FULL_THREAD_DUMP = "===== FULL THREAD DUMP ==============="; /** The name of the log file being parsed */ private final String logFileName; /** The name of the log file plus a colon and space */ private final String extLogFileName; /** The buffer to read the log file from */ private final BufferedReader br; /** Are there more entries to parser? */ private boolean hasMoreEntries; /** The timestamp of the entry being parsed */ private String timestamp; /** StringBuffer containing the text of the entry we're parsing */ private StringBuffer sb; /** whether we're still reading the first line of the first entry */ private boolean firstEntry = true; /** * StringBuffer containing white space that is the same length as logFileName plus ": ", in a * monospace font when tabs are 8 chars long */ private final StringBuffer whiteFileName; /** whether to suppress blank lines in output */ private final boolean suppressBlanks; /** * Creates a new {@code LogFileParser} that reads a log from a given * {@code BufferedReader}. Blanks are not suppressed, and non-timestamped lines are emitted * as-is. * * @param logFileName The name of the log file being parsed. This is appended to the entry. If * {@code logFileName} is {@code null} nothing will be appended. * @param br Where to read the log from */ public LogFileParser(final String logFileName, final BufferedReader br) { this(logFileName, br, false, false); } /** * Creates a new {@code LogFileParser} that reads a log from a given * {@code BufferedReader}. * * @param logFileName The name of the log file being parsed. This is appended to the entry. If * {@code logFileName} is {@code null} nothing will be appended. * @param br Where to read the log from * @param tabOut Whether to add white-space to non-timestamped lines to align them with lines * containing file names. * @param suppressBlanks whether to suppress blank lines */ public LogFileParser(final String logFileName, final BufferedReader br, final boolean tabOut, final boolean suppressBlanks) { this.logFileName = logFileName; this.br = br; hasMoreEntries = true; timestamp = null; sb = new StringBuffer(); this.suppressBlanks = suppressBlanks; whiteFileName = new StringBuffer(); if (tabOut) { int numTabs = (logFileName.length() + 2) / 8; for (int i = 0; i < numTabs; i++) { whiteFileName.append('\t'); } for (int i = (logFileName.length() + 2) % 8; i > 0; i--) { whiteFileName.append(' '); } } if (this.logFileName != null) { extLogFileName = this.logFileName + ": "; } else { extLogFileName = null; } } /** * Returns whether or not there are any more entries in the file to be parser. */ public boolean hasMoreEntries() { return hasMoreEntries; } /** * copy the timestamp out of a log entry, if there is one, and return it. if there isn't a * timestamp, return null */ private String getTimestamp(final String line) { int llen = line.length(); String result = null; if (llen > 10) { // first see if the start of the line is a timestamp, as in a thread-dump's stamp if (line.charAt(0) == '2' && line.charAt(1) == '0' && line.charAt(4) == '-' && line.charAt(7) == '-') { return line.substring(0, 19).replace('-', '/'); } // now look for gemfire's log format if (line.charAt(0) == '[') { if (line.charAt(1) == 'i' && line.charAt(2) == 'n' && line.charAt(3) == 'f' || line.charAt(1) == 'f' && line.charAt(2) == 'i' && line.charAt(3) == 'n' || line.charAt(1) == 'w' && line.charAt(2) == 'a' && line.charAt(3) == 'r' || line.charAt(1) == 'd' && line.charAt(2) == 'e' && line.charAt(3) == 'b' || line.charAt(1) == 't' && line.charAt(2) == 'r' && line.charAt(3) == 'a' || line.charAt(1) == 's' && line.charAt(2) == 'e' && line.charAt(3) == 'v' || line.charAt(1) == 'c' && line.charAt(2) == 'o' && line.charAt(3) == 'n' || line.charAt(1) == 'e' && line.charAt(2) == 'r' && line.charAt(3) == 'r' || line.charAt(1) == 's' && line.charAt(2) == 'e' && line.charAt(3) == 'c' && line.charAt(4) == 'u' && line.charAt(5) == 'r') { int sidx = 4; while (sidx < llen && line.charAt(sidx) != ' ') { sidx++; } int endIdx = sidx + 24; if (endIdx < llen) { result = line.substring(sidx + 1, endIdx + 1); } } } } return result; } /** * Returns the next entry in the log file. The last entry will be an instance of * {@link LogFileParser.LastLogEntry}. */ public LogEntry getNextEntry() throws IOException { LogEntry entry = null; while (br.ready()) { String lineStr = br.readLine(); if (lineStr == null) { break; } int llen = lineStr.length(); int lend = llen; if (suppressBlanks || firstEntry) { // trim the end of the line while (lend > 1 && Character.isWhitespace(lineStr.charAt(lend - 1))) { lend--; } if (lend == 0) { continue; } } StringBuilder line = new StringBuilder(lineStr); if (lend != llen) { line.setLength(lend); llen = lend; } // Matcher matcher = pattern.matcher(line); String nextTimestamp = getTimestamp(lineStr); // See if we've found the beginning of a new log entry. If so, bundle // up the current string buffer and return it in a LogEntry representing // the currently parsed text if (nextTimestamp != null) { if (timestamp != null && TRIM_TIMESTAMPS) { int tsl = timestamp.length(); if (tsl > 0) { // find where the year/mo/dy starts and delete it and the time zone int start = 5; if (line.charAt(start) != ' ') // info & fine { if (line.charAt(++start) != ' ') // finer & error { if (line.charAt(++start) != ' ') // finest, severe, config { if (line.charAt(++start) != ' ') // warning { start = 0; } } } } if (start > 0) { line.delete(start + 25, start + 29); // time zone line.delete(start, start + 11); // date if (TRIM_NAMES) { int idx2 = line.indexOf("<", +12); if (idx2 > start + 13) { line.delete(start + 13, idx2 - 1); } } } } if (NEWLINE_AFTER_HEADER) { int idx = line.indexOf("tid="); if (idx > 0) { idx = line.indexOf("]", idx + 4); if (idx + 1 < line.length()) { line.insert(idx + 1, lineSeparator() + " "); } } } } if (timestamp != null) { entry = new LogEntry(timestamp, sb.toString(), suppressBlanks); } timestamp = nextTimestamp; if (!firstEntry) { sb = new StringBuffer(500); } else { firstEntry = false; } if (extLogFileName != null) { sb.append(extLogFileName); } } else if (line.indexOf(FULL_THREAD_DUMP) != -1) { // JRockit-style thread dumps have time stamps! String dump = lineStr; lineStr = br.readLine(); if (lineStr == null) { break; } DateFormat df = DateFormatter.createDateFormat("E MMM d HH:mm:ss yyyy"); df.setLenient(true); try { Date date = df.parse(lineStr); if (timestamp != null) { // We've found the end of a log entry entry = new LogEntry(timestamp, sb.toString()); } df = DateFormatter.createDateFormat(); timestamp = df.format(date); lineStr = dump; sb = new StringBuffer(); if (extLogFileName != null) { sb.append(extLogFileName); } sb.append("[dump "); sb.append(timestamp); sb.append("]").append(lineSeparator()).append(lineSeparator()); } catch (ParseException ex) { // Oh well... sb.append(dump); } } else { sb.append(whiteFileName); } sb.append(line); sb.append(lineSeparator()); if (entry != null) { return entry; } } if (timestamp == null) { // The file didn't contain any log entries. Just use the // current time DateFormat df = DateFormatter.createDateFormat(); // Date now = new Date(); timestamp = df.format(new Date()); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw, true); LocalLogWriter tempLogger = new LocalLogWriter(ALL.intLevel(), pw); tempLogger.info("MISSING TIME STAMP"); pw.flush(); sb.insert(0, lineSeparator() + lineSeparator()); sb.insert(0, sw.toString().trim()); sb.insert(0, extLogFileName); } // Place the final log entry entry = new LastLogEntry(timestamp, sb.toString()); sb = null; hasMoreEntries = false; return entry; } /** * Main program that simply parses a log file and prints out the entries. It is used for testing * purposes. */ public static void main(final String[] args) throws Exception { if (args.length < 1) { System.err.println("** Missing log file name"); ExitCode.FATAL.doSystemExit(); } String logFileName = args[0]; try (FileReader fileReader = new FileReader(logFileName); BufferedReader br = new BufferedReader(fileReader)) { LogFileParser parser = new LogFileParser(logFileName, br, false, false); PrintWriter pw = new PrintWriter(System.out); while (parser.hasMoreEntries()) { LogEntry entry = parser.getNextEntry(); entry.writeTo(pw); } } } /** * A parsed entry in a log file. Note that we maintain the entry's timestamp as a * {@code String}. {@link DateFormat#parse(String) Parsing} it was too * expensive. */ static class LogEntry { /** Timestamp of the log entry */ private final String timestamp; /** The contents of the log entry */ private final String contents; /** whether extraneous blank lines are being suppressed */ private boolean suppressBlanks; /** * Creates a new log entry with the given timestamp and contents */ public LogEntry(final String timestamp, final String contents) { this.timestamp = timestamp; this.contents = contents; } /** * Creates a new log entry with the given timestamp and contents */ public LogEntry(final String timestamp, final String contents, final boolean suppressBlanks) { this.timestamp = timestamp; this.contents = contents.trim(); this.suppressBlanks = suppressBlanks; } /** * Returns the timestamp of this log entry */ public String getTimestamp() { return timestamp; } /** * Returns the contents of this log entry * * @see #writeTo */ String getContents() { return contents; } /** * Writes the contents of this log entry to a {@code PrintWriter}. */ public void writeTo(final PrintWriter pw) { pw.println(contents); if (!suppressBlanks) { pw.println(""); } pw.flush(); } /** * Is this entry the last log entry? */ public boolean isLast() { return false; } } /** * The last log entry read from a log file. We use a separate class to avoid the overhead of an * extra {@code boolean} field in each {@link LogFileParser.LogEntry}. */ static class LastLogEntry extends LogEntry { public LastLogEntry(final String timestamp, final String contents) { super(timestamp, contents); } @Override public boolean isLast() { return true; } } }
package com.sequenceiq.cloudbreak.cloud.azure; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import com.microsoft.azure.management.resources.Deployment; import com.microsoft.azure.management.resources.DeploymentExportResult; import com.sequenceiq.cloudbreak.cloud.azure.client.AzureClient; import com.sequenceiq.cloudbreak.cloud.azure.connector.resource.AzureComputeResourceService; import com.sequenceiq.cloudbreak.cloud.azure.image.marketplace.AzureMarketplaceImageProviderService; import com.sequenceiq.cloudbreak.cloud.azure.validator.AzureImageFormatValidator; import com.sequenceiq.cloudbreak.cloud.azure.view.AzureStackView; import com.sequenceiq.cloudbreak.cloud.context.AuthenticatedContext; import com.sequenceiq.cloudbreak.cloud.context.CloudContext; import com.sequenceiq.cloudbreak.cloud.model.CloudCredential; import com.sequenceiq.cloudbreak.cloud.model.CloudResource; import com.sequenceiq.cloudbreak.cloud.model.CloudResourceStatus; import com.sequenceiq.cloudbreak.cloud.model.CloudStack; import com.sequenceiq.cloudbreak.cloud.model.Group; import com.sequenceiq.cloudbreak.cloud.model.Image; import com.sequenceiq.cloudbreak.cloud.model.Network; import com.sequenceiq.cloudbreak.cloud.model.ResourceStatus; import com.sequenceiq.cloudbreak.cloud.model.Subnet; import com.sequenceiq.cloudbreak.cloud.notification.PersistenceNotifier; import com.sequenceiq.cloudbreak.service.Retry; import com.sequenceiq.common.api.adjustment.AdjustmentTypeWithThreshold; import com.sequenceiq.common.api.type.AdjustmentType; @RunWith(MockitoJUnitRunner.class) public class AzureResourceConnectorTest { private static final AdjustmentType ADJUSTMENT_TYPE = AdjustmentType.EXACT; private static final long THRESHOLD = 1; private static final String STACK_NAME = "someStackNameValue"; private static final String RESOURCE_GROUP_NAME = "resourceGroupName"; private static final String IMAGE_NAME = "image-name"; @Mock private AuthenticatedContext ac; @Mock private CloudStack stack; @Mock private PersistenceNotifier notifier; @Mock private AzureClient client; @Mock private CloudContext cloudContext; @Mock private Deployment deployment; @Mock private AzureStackViewProvider azureStackViewProvider; @InjectMocks private AzureResourceConnector underTest; @Mock private AzureTemplateBuilder azureTemplateBuilder; @Mock private AzureUtils azureUtils; @Mock private AzureStorage azureStorage; @Mock private Retry retryService; @Mock private AzureResourceGroupMetadataProvider azureResourceGroupMetadataProvider; @Mock private AzureComputeResourceService azureComputeResourceService; @Mock private AzureCloudResourceService azureCloudResourceService; @Mock private AzureMarketplaceImageProviderService azureMarketplaceImageProviderService; @Mock private AzureImageFormatValidator azureImageFormatValidator; @Mock private AzureTerminationHelperService azureTerminationHelperService; private List<CloudResource> instances; private List<Group> groups; private Network network; private Image imageModel; @Before public void setUp() { DeploymentExportResult deploymentExportResult = mock(DeploymentExportResult.class); Group group = mock(Group.class); AzureStackView azureStackView = mock(AzureStackView.class); groups = List.of(group); CloudResource cloudResource = mock(CloudResource.class); instances = List.of(cloudResource); network = new Network(new Subnet("0.0.0.0/16")); AzureImage image = new AzureImage("id", "name", true); imageModel = new Image(IMAGE_NAME, new HashMap<>(), "centos7", "redhat7", "", "default", "default-id", new HashMap<>()); when(stack.getGroups()).thenReturn(groups); when(stack.getNetwork()).thenReturn(network); when(stack.getImage()).thenReturn(imageModel); when(ac.getCloudContext()).thenReturn(cloudContext); when(ac.getParameter(AzureClient.class)).thenReturn(client); when(ac.getCloudCredential()).thenReturn(new CloudCredential("aCredentialId", "aCredentialName", "account")); when(azureUtils.getStackName(cloudContext)).thenReturn(STACK_NAME); when(azureStorage.getCustomImage(any(), any(), any())).thenReturn(image); when(deployment.exportTemplate()).thenReturn(deploymentExportResult); when(azureResourceGroupMetadataProvider.getResourceGroupName(cloudContext, stack)).thenReturn(RESOURCE_GROUP_NAME); when(azureCloudResourceService.getDeploymentCloudResources(deployment)).thenReturn(instances); when(azureCloudResourceService.getInstanceCloudResources(STACK_NAME, instances, groups, RESOURCE_GROUP_NAME)).thenReturn(instances); when(azureStackViewProvider.getAzureStack(any(), eq(stack), eq(client), eq(ac))).thenReturn(azureStackView); } @Test public void testWhenTemplateDeploymentDoesNotExistThenComputeResourceServiceBuildsTheResources() { when(client.templateDeploymentExists(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(false); when(client.createTemplateDeployment(any(), any(), any(), any())).thenReturn(deployment); when(azureImageFormatValidator.isMarketplaceImageFormat(any())).thenReturn(false); AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(ADJUSTMENT_TYPE, THRESHOLD); underTest.launch(ac, stack, notifier, adjustmentTypeWithThreshold); verify(azureComputeResourceService, times(1)).buildComputeResourcesForLaunch(eq(ac), eq(stack), eq(adjustmentTypeWithThreshold), eq(instances), any()); verify(azureCloudResourceService, times(1)).getInstanceCloudResources(STACK_NAME, instances, groups, RESOURCE_GROUP_NAME); verify(azureUtils, times(1)).getCustomNetworkId(network); verify(azureUtils, times(1)).getCustomSubnetIds(network); verify(azureMarketplaceImageProviderService, times(0)).get(imageModel); } @Test public void testWhenTemplateDeploymentExistsAndInProgressThenComputeResourceServiceBuildsTheResources() { when(client.templateDeploymentExists(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(true); when(client.getTemplateDeployment(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(deployment); when(client.getTemplateDeploymentStatus(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(ResourceStatus.IN_PROGRESS); when(azureImageFormatValidator.isMarketplaceImageFormat(imageModel)).thenReturn(false); AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(ADJUSTMENT_TYPE, THRESHOLD); underTest.launch(ac, stack, notifier, adjustmentTypeWithThreshold); verify(azureComputeResourceService, times(1)).buildComputeResourcesForLaunch(any(AuthenticatedContext.class), any(CloudStack.class), eq(adjustmentTypeWithThreshold), any(), any()); verify(azureCloudResourceService, times(1)).getInstanceCloudResources(STACK_NAME, instances, groups, RESOURCE_GROUP_NAME); verify(azureUtils, times(1)).getCustomNetworkId(network); verify(client, never()).createTemplateDeployment(any(), any(), any(), any()); verify(client, times(2)).getTemplateDeployment(any(), any()); verify(azureMarketplaceImageProviderService, times(0)).get(imageModel); } @Test public void testWhenTemplateDeploymentExistsAndFinishedThenComputeResourceServiceBuildsTheResources() { when(client.templateDeploymentExists(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(true); when(client.createTemplateDeployment(any(), any(), any(), any())).thenReturn(deployment); when(client.getTemplateDeploymentStatus(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(ResourceStatus.CREATED); when(azureImageFormatValidator.isMarketplaceImageFormat(imageModel)).thenReturn(false); AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(ADJUSTMENT_TYPE, THRESHOLD); underTest.launch(ac, stack, notifier, adjustmentTypeWithThreshold); verify(azureComputeResourceService, times(1)).buildComputeResourcesForLaunch(any(AuthenticatedContext.class), any(CloudStack.class), eq(adjustmentTypeWithThreshold), any(), any()); verify(azureCloudResourceService, times(1)).getInstanceCloudResources(STACK_NAME, instances, groups, RESOURCE_GROUP_NAME); verify(azureUtils, times(1)).getCustomNetworkId(network); verify(client, times(1)).createTemplateDeployment(any(), any(), any(), any()); verify(client, times(1)).getTemplateDeployment(any(), any()); verify(azureMarketplaceImageProviderService, times(0)).get(imageModel); } @Test public void testWhenMarketplaceImageThenTemplateBuilderUsesMarketplaceImage() { when(client.templateDeploymentExists(RESOURCE_GROUP_NAME, STACK_NAME)).thenReturn(false); when(client.createTemplateDeployment(any(), any(), any(), any())).thenReturn(deployment); when(azureImageFormatValidator.isMarketplaceImageFormat(any())).thenReturn(true); AdjustmentTypeWithThreshold adjustmentTypeWithThreshold = new AdjustmentTypeWithThreshold(ADJUSTMENT_TYPE, THRESHOLD); underTest.launch(ac, stack, notifier, adjustmentTypeWithThreshold); verify(azureMarketplaceImageProviderService, times(1)).get(imageModel); } @Test public void testLaunchLoadBalancerHandlesGracefully() throws Exception { List<CloudResourceStatus> cloudResourceStatuses = underTest.launchLoadBalancers(ac, stack, notifier); Assert.assertEquals(0, cloudResourceStatuses.size()); } @Test public void testTermiate() { when(azureTerminationHelperService.handleTransientDeployment(any(), any(), any())).thenReturn(List.of()); when(azureTerminationHelperService.terminate(any(), any(), any())) .thenReturn(List.of(new CloudResourceStatus(instances.get(0), ResourceStatus.DELETED))); List<CloudResourceStatus> statuses = underTest.terminate(ac, stack, new ArrayList<>(instances)); for (CloudResourceStatus status : statuses) { Assert.assertEquals(ResourceStatus.DELETED, status.getStatus()); } } }